summaryrefslogtreecommitdiff
path: root/chromium/media
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2021-05-20 09:47:09 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2021-06-07 11:15:42 +0000
commit189d4fd8fad9e3c776873be51938cd31a42b6177 (patch)
tree6497caeff5e383937996768766ab3bb2081a40b2 /chromium/media
parent8bc75099d364490b22f43a7ce366b366c08f4164 (diff)
downloadqtwebengine-chromium-189d4fd8fad9e3c776873be51938cd31a42b6177.tar.gz
BASELINE: Update Chromium to 90.0.4430.221
Change-Id: Iff4d9d18d2fcf1a576f3b1f453010f744a232920 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/media')
-rw-r--r--chromium/media/BUILD.gn1
-rw-r--r--chromium/media/DEPS3
-rw-r--r--chromium/media/OWNERS1
-rw-r--r--chromium/media/audio/BUILD.gn12
-rw-r--r--chromium/media/audio/DEPS3
-rw-r--r--chromium/media/audio/OWNERS2
-rw-r--r--chromium/media/audio/android/aaudio.sigs2
-rw-r--r--chromium/media/audio/android/aaudio_output.cc24
-rw-r--r--chromium/media/audio/android/audio_android_unittest.cc3
-rw-r--r--chromium/media/audio/android/audio_manager_android.cc3
-rw-r--r--chromium/media/audio/audio_debug_recording_session_impl.cc2
-rw-r--r--chromium/media/audio/audio_encoders_unittest.cc341
-rw-r--r--chromium/media/audio/audio_features.cc4
-rw-r--r--chromium/media/audio/audio_features.h4
-rw-r--r--chromium/media/audio/audio_input_stream_data_interceptor.h2
-rw-r--r--chromium/media/audio/audio_low_latency_input_output_unittest.cc8
-rw-r--r--chromium/media/audio/audio_manager.cc8
-rw-r--r--chromium/media/audio/audio_manager.h5
-rw-r--r--chromium/media/audio/audio_manager_base.cc2
-rw-r--r--chromium/media/audio/audio_manager_unittest.cc61
-rw-r--r--chromium/media/audio/audio_opus_encoder.cc396
-rw-r--r--chromium/media/audio/audio_opus_encoder.h57
-rw-r--r--chromium/media/audio/audio_output_device.cc17
-rw-r--r--chromium/media/audio/audio_pcm_encoder.cc34
-rw-r--r--chromium/media/audio/audio_pcm_encoder.h32
-rw-r--r--chromium/media/audio/audio_system_test_util.cc3
-rw-r--r--chromium/media/audio/audio_thread_impl.cc2
-rw-r--r--chromium/media/audio/cras/audio_manager_chromeos.cc89
-rw-r--r--chromium/media/audio/cras/audio_manager_chromeos.h13
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.cc59
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.h4
-rw-r--r--chromium/media/audio/cras/cras_input_unittest.cc6
-rw-r--r--chromium/media/audio/cras/cras_unified_unittest.cc6
-rw-r--r--chromium/media/audio/cras/cras_util.cc96
-rw-r--r--chromium/media/audio/cras/cras_util.h11
-rw-r--r--chromium/media/audio/linux/audio_manager_linux.cc4
-rw-r--r--chromium/media/audio/mac/audio_auhal_mac.cc50
-rw-r--r--chromium/media/audio/mac/audio_auhal_mac.h20
-rw-r--r--chromium/media/audio/pulse/audio_manager_pulse.cc2
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.cc2
-rw-r--r--chromium/media/audio/win/core_audio_util_win.cc4
-rw-r--r--chromium/media/audio/win/waveout_output_win.cc6
-rw-r--r--chromium/media/base/BUILD.gn22
-rw-r--r--chromium/media/base/OWNERS1
-rw-r--r--chromium/media/base/android/BUILD.gn6
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc2
-rw-r--r--chromium/media/base/android/media_player_bridge.cc83
-rw-r--r--chromium/media/base/android/media_player_bridge.h8
-rw-r--r--chromium/media/base/async_destroy_video_decoder.h5
-rw-r--r--chromium/media/base/audio_block_fifo.cc26
-rw-r--r--chromium/media/base/audio_buffer.cc89
-rw-r--r--chromium/media/base/audio_buffer.h8
-rw-r--r--chromium/media/base/audio_buffer_unittest.cc68
-rw-r--r--chromium/media/base/audio_bus.cc75
-rw-r--r--chromium/media/base/audio_bus.h16
-rw-r--r--chromium/media/base/audio_bus_unittest.cc83
-rw-r--r--chromium/media/base/audio_decoder.h3
-rw-r--r--chromium/media/base/audio_encoder.cc50
-rw-r--r--chromium/media/base/audio_encoder.h104
-rw-r--r--chromium/media/base/audio_fifo.cc6
-rw-r--r--chromium/media/base/audio_latency.cc39
-rw-r--r--chromium/media/base/audio_latency_unittest.cc27
-rw-r--r--chromium/media/base/audio_renderer.h3
-rw-r--r--chromium/media/base/bind_to_current_loop.h115
-rw-r--r--chromium/media/base/bind_to_current_loop_unittest.cc376
-rw-r--r--chromium/media/base/cdm_context.cc4
-rw-r--r--chromium/media/base/cdm_context.h13
-rw-r--r--chromium/media/base/cdm_promise_adapter.cc20
-rw-r--r--chromium/media/base/cdm_promise_adapter.h7
-rw-r--r--chromium/media/base/decoder.cc52
-rw-r--r--chromium/media/base/decoder.h41
-rw-r--r--chromium/media/base/decoder_factory.cc5
-rw-r--r--chromium/media/base/decoder_factory.h8
-rw-r--r--chromium/media/base/decryptor.h11
-rw-r--r--chromium/media/base/eme_constants.h14
-rw-r--r--chromium/media/base/fake_audio_worker.cc2
-rw-r--r--chromium/media/base/fallback_video_decoder.cc107
-rw-r--r--chromium/media/base/fallback_video_decoder.h60
-rw-r--r--chromium/media/base/fallback_video_decoder_unittest.cc166
-rw-r--r--chromium/media/base/ipc/DEPS6
-rw-r--r--chromium/media/base/ipc/media_param_traits_macros.h16
-rw-r--r--chromium/media/base/key_systems.cc37
-rw-r--r--chromium/media/base/key_systems_unittest.cc5
-rw-r--r--chromium/media/base/limits.h4
-rw-r--r--chromium/media/base/logging_override_if_enabled.h12
-rw-r--r--chromium/media/base/mac/color_space_util_mac.h10
-rw-r--r--chromium/media/base/mac/color_space_util_mac.mm97
-rw-r--r--chromium/media/base/media_log_properties.cc2
-rw-r--r--chromium/media/base/media_log_properties.h14
-rw-r--r--chromium/media/base/media_serializers.h17
-rw-r--r--chromium/media/base/media_switches.cc116
-rw-r--r--chromium/media/base/media_switches.h36
-rw-r--r--chromium/media/base/media_track.h10
-rw-r--r--chromium/media/base/mime_util_internal.cc7
-rw-r--r--chromium/media/base/mime_util_unittest.cc10
-rw-r--r--chromium/media/base/mock_filters.cc14
-rw-r--r--chromium/media/base/mock_filters.h46
-rw-r--r--chromium/media/base/null_video_sink.h2
-rw-r--r--chromium/media/base/offloading_audio_encoder.cc76
-rw-r--r--chromium/media/base/offloading_audio_encoder.h62
-rw-r--r--chromium/media/base/offloading_audio_encoder_unittest.cc127
-rw-r--r--chromium/media/base/offloading_video_encoder.cc7
-rw-r--r--chromium/media/base/pipeline.h7
-rw-r--r--chromium/media/base/pipeline_impl.cc37
-rw-r--r--chromium/media/base/pipeline_impl.h5
-rw-r--r--chromium/media/base/pipeline_impl_unittest.cc6
-rw-r--r--chromium/media/base/pipeline_status.cc110
-rw-r--r--chromium/media/base/pipeline_status.h51
-rw-r--r--chromium/media/base/renderer.cc4
-rw-r--r--chromium/media/base/renderer.h3
-rw-r--r--chromium/media/base/renderer_factory_selector.cc1
-rw-r--r--chromium/media/base/sample_rates.cc3
-rw-r--r--chromium/media/base/sample_rates.h1
-rw-r--r--chromium/media/base/shared_memory_pool.cc109
-rw-r--r--chromium/media/base/shared_memory_pool.h77
-rw-r--r--chromium/media/base/shared_memory_pool_unittest.cc57
-rw-r--r--chromium/media/base/silent_sink_suspender.h2
-rw-r--r--chromium/media/base/sinc_resampler.cc3
-rw-r--r--chromium/media/base/status.h56
-rw-r--r--chromium/media/base/status_codes.h59
-rw-r--r--chromium/media/base/status_unittest.cc47
-rw-r--r--chromium/media/base/stream_parser.cc6
-rw-r--r--chromium/media/base/stream_parser.h5
-rw-r--r--chromium/media/base/supported_types.cc35
-rw-r--r--chromium/media/base/supported_types_unittest.cc6
-rw-r--r--chromium/media/base/supported_video_decoder_config.cc (renamed from chromium/media/video/supported_video_decoder_config.cc)2
-rw-r--r--chromium/media/base/supported_video_decoder_config.h (renamed from chromium/media/video/supported_video_decoder_config.h)12
-rw-r--r--chromium/media/base/supported_video_decoder_config_unittest.cc104
-rw-r--r--chromium/media/base/user_input_monitor_unittest.cc136
-rw-r--r--chromium/media/base/user_input_monitor_win.cc85
-rw-r--r--chromium/media/base/video_decoder.cc4
-rw-r--r--chromium/media/base/video_decoder.h10
-rw-r--r--chromium/media/base/video_decoder_config.cc6
-rw-r--r--chromium/media/base/video_decoder_config.h5
-rw-r--r--chromium/media/base/video_encoder.h8
-rw-r--r--chromium/media/base/video_frame.cc84
-rw-r--r--chromium/media/base/video_frame.h12
-rw-r--r--chromium/media/base/video_frame_feedback.h4
-rw-r--r--chromium/media/base/video_frame_layout.cc1
-rw-r--r--chromium/media/base/video_frame_metadata.cc9
-rw-r--r--chromium/media/base/video_frame_metadata.h12
-rw-r--r--chromium/media/base/video_frame_unittest.cc28
-rw-r--r--chromium/media/base/video_types.cc5
-rw-r--r--chromium/media/base/video_types.h4
-rw-r--r--chromium/media/base/video_util.cc555
-rw-r--r--chromium/media/base/video_util.h56
-rw-r--r--chromium/media/base/video_util_unittest.cc26
-rw-r--r--chromium/media/base/win/BUILD.gn5
-rw-r--r--chromium/media/base/win/dxgi_device_manager.cc143
-rw-r--r--chromium/media/base/win/dxgi_device_manager.h76
-rw-r--r--chromium/media/base/win/dxgi_device_scope_handle_unittest.cc24
-rw-r--r--chromium/media/base/win/hresult_status_helper.cc7
-rw-r--r--chromium/media/base/win/mf_helpers.cc36
-rw-r--r--chromium/media/base/win/mf_helpers.h21
-rw-r--r--chromium/media/blink/cache_util.cc8
-rw-r--r--chromium/media/blink/key_system_config_selector.cc47
-rw-r--r--chromium/media/blink/key_system_config_selector.h13
-rw-r--r--chromium/media/blink/key_system_config_selector_unittest.cc118
-rw-r--r--chromium/media/blink/multibuffer_data_source_unittest.cc12
-rw-r--r--chromium/media/blink/resource_multibuffer_data_provider.cc4
-rw-r--r--chromium/media/blink/resource_multibuffer_data_provider_unittest.cc3
-rw-r--r--chromium/media/blink/url_index.cc11
-rw-r--r--chromium/media/blink/url_index.h5
-rw-r--r--chromium/media/blink/url_index_unittest.cc18
-rw-r--r--chromium/media/blink/video_frame_compositor.cc4
-rw-r--r--chromium/media/blink/video_frame_compositor.h2
-rw-r--r--chromium/media/blink/watch_time_reporter_unittest.cc21
-rw-r--r--chromium/media/blink/webcontentdecryptionmodule_impl.cc4
-rw-r--r--chromium/media/blink/webencryptedmediaclient_impl.cc7
-rw-r--r--chromium/media/blink/webencryptedmediaclient_impl.h7
-rw-r--r--chromium/media/blink/webmediaplayer_impl.cc288
-rw-r--r--chromium/media/blink/webmediaplayer_impl.h78
-rw-r--r--chromium/media/blink/webmediaplayer_impl_unittest.cc129
-rw-r--r--chromium/media/blink/webmediasource_impl.cc42
-rw-r--r--chromium/media/blink/webmediasource_impl.h15
-rw-r--r--chromium/media/blink/websourcebuffer_impl.cc17
-rw-r--r--chromium/media/blink/websourcebuffer_impl.h3
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc12
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc11
-rw-r--r--chromium/media/capabilities/video_decode_stats_db.cc5
-rw-r--r--chromium/media/capabilities/video_decode_stats_db.h18
-rw-r--r--chromium/media/capture/BUILD.gn53
-rw-r--r--chromium/media/capture/content/OWNERS2
-rw-r--r--chromium/media/capture/content/android/BUILD.gn2
-rw-r--r--chromium/media/capture/content/android/thread_safe_capture_oracle.cc12
-rw-r--r--chromium/media/capture/mojom/video_capture.mojom5
-rw-r--r--chromium/media/capture/mojom/video_capture_types.mojom10
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.cc21
-rw-r--r--chromium/media/capture/video/DEPS1
-rw-r--r--chromium/media/capture/video/android/BUILD.gn2
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.cc11
-rw-r--r--chromium/media/capture/video/chromeos/DEPS2
-rw-r--r--chromium/media/capture/video/chromeos/ash/DEPS3
-rw-r--r--chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.cc (renamed from chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc)302
-rw-r--r--chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h283
-rw-r--r--chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc423
-rw-r--r--chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc84
-rw-r--r--chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h66
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.cc63
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.h12
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc141
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h53
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.cc87
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.h34
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc25
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h11
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_context.cc5
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc380
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.h57
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc43
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc170
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.h45
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc17
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h146
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc227
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.cc7
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.h5
-rw-r--r--chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc3
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.cc11
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.h12
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.cc5
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.h5
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera3.mojom20
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_app.mojom10
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_common.mojom13
-rw-r--r--chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom79
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.cc133
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.h27
-rw-r--r--chromium/media/capture/video/chromeos/request_manager_unittest.cc27
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc31
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.h10
-rw-r--r--chromium/media/capture/video/chromeos/token_manager.cc157
-rw-r--r--chromium/media/capture/video/chromeos/token_manager.h72
-rw-r--r--chromium/media/capture/video/chromeos/token_manager_unittest.cc97
-rw-r--r--chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc34
-rw-r--r--chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h5
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc282
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h133
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc288
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h80
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc50
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h9
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h9
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc19
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h9
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.cc37
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.h9
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.cc1
-rw-r--r--chromium/media/capture/video/file_video_capture_device_factory.cc1
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc7
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc8
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc5
-rw-r--r--chromium/media/capture/video/gpu_memory_buffer_utils.cc2
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.cc13
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.cc34
-rw-r--r--chromium/media/capture/video/mac/DEPS6
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm30
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm30
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc385
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac.h67
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm607
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm4
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h36
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm290
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm424
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h15
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h3
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm17
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.h5
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.mm15
-rw-r--r--chromium/media/capture/video/mac/video_capture_metrics_mac.h23
-rw-r--r--chromium/media/capture/video/mac/video_capture_metrics_mac.mm88
-rw-r--r--chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm87
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc4
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.h9
-rw-r--r--chromium/media/capture/video/mock_video_capture_device_client.h7
-rw-r--r--chromium/media/capture/video/mock_video_frame_receiver.h12
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.cc12
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc4
-rw-r--r--chromium/media/capture/video/video_capture_device.cc24
-rw-r--r--chromium/media/capture/video/video_capture_device.h19
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc91
-rw-r--r--chromium/media/capture/video/video_capture_device_client.h19
-rw-r--r--chromium/media/capture/video/video_capture_device_client_unittest.cc14
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.cc6
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h4
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc38
-rw-r--r--chromium/media/capture/video/video_capture_metrics.cc180
-rw-r--r--chromium/media/capture/video/video_capture_metrics.h19
-rw-r--r--chromium/media/capture/video/video_capture_metrics_unittest.cc59
-rw-r--r--chromium/media/capture/video/video_capture_system_impl.cc6
-rw-r--r--chromium/media/capture/video/video_frame_receiver.cc36
-rw-r--r--chromium/media/capture/video/video_frame_receiver.h28
-rw-r--r--chromium/media/capture/video/video_frame_receiver_on_task_runner.cc10
-rw-r--r--chromium/media/capture/video/video_frame_receiver_on_task_runner.h8
-rw-r--r--chromium/media/capture/video/win/OWNERS6
-rw-r--r--chromium/media/capture/video/win/d3d_capture_test_utils.cc918
-rw-r--r--chromium/media/capture/video/win/d3d_capture_test_utils.h731
-rw-r--r--chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc151
-rw-r--r--chromium/media/capture/video/win/gpu_memory_buffer_tracker.h54
-rw-r--r--chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc167
-rw-r--r--chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc37
-rw-r--r--chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h35
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc36
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.h9
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc100
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.cc231
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.h21
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc153
-rw-r--r--chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc92
-rw-r--r--chromium/media/capture/video/win/video_capture_dxgi_device_manager.h50
-rw-r--r--chromium/media/capture/video_capture_types.cc8
-rw-r--r--chromium/media/capture/video_capture_types.h4
-rw-r--r--chromium/media/capture/video_capturer_source.h14
-rw-r--r--chromium/media/cast/BUILD.gn37
-rw-r--r--chromium/media/cast/OWNERS2
-rw-r--r--chromium/media/cast/README.md28
-rw-r--r--chromium/media/cast/cast_config.h12
-rw-r--r--chromium/media/cast/net/udp_packet_pipe_unittest.cc12
-rw-r--r--chromium/media/cast/net/udp_transport_unittest.cc9
-rw-r--r--chromium/media/cast/sender/external_video_encoder.cc12
-rw-r--r--chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc9
-rw-r--r--chromium/media/cast/sender/fake_video_encode_accelerator_factory.h7
-rw-r--r--chromium/media/cast/sender/performance_metrics_overlay.cc18
-rw-r--r--chromium/media/cast/sender/video_encoder_unittest.cc12
-rw-r--r--chromium/media/cast/sender/video_sender.cc11
-rw-r--r--chromium/media/cast/sender/video_sender_unittest.cc7
-rw-r--r--chromium/media/cast/sender/vp8_encoder.cc2
-rw-r--r--chromium/media/cdm/aes_decryptor.cc4
-rw-r--r--chromium/media/cdm/aes_decryptor.h4
-rw-r--r--chromium/media/cdm/aes_decryptor_unittest.cc42
-rw-r--r--chromium/media/cdm/cdm_adapter.cc23
-rw-r--r--chromium/media/cdm/cdm_adapter.h4
-rw-r--r--chromium/media/cdm/cdm_adapter_unittest.cc25
-rw-r--r--chromium/media/cdm/cdm_paths_unittest.cc6
-rw-r--r--chromium/media/cdm/library_cdm/cdm_paths.gni6
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn1
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc3
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc13
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc12
-rw-r--r--chromium/media/cdm/win/media_foundation_cdm_factory.cc4
-rw-r--r--chromium/media/cdm/win/media_foundation_cdm_session.cc2
-rw-r--r--chromium/media/ffmpeg/ffmpeg_common.cc8
-rw-r--r--chromium/media/ffmpeg/ffmpeg_regression_tests.cc14
-rw-r--r--chromium/media/filters/BUILD.gn2
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.cc4
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.h1
-rw-r--r--chromium/media/filters/audio_decoder_stream_unittest.cc3
-rw-r--r--chromium/media/filters/audio_decoder_unittest.cc10
-rw-r--r--chromium/media/filters/audio_file_reader.cc22
-rw-r--r--chromium/media/filters/audio_video_metadata_extractor_unittest.cc23
-rw-r--r--chromium/media/filters/chunk_demuxer.cc130
-rw-r--r--chromium/media/filters/chunk_demuxer.h39
-rw-r--r--chromium/media/filters/chunk_demuxer_unittest.cc4
-rw-r--r--chromium/media/filters/dav1d_video_decoder.cc16
-rw-r--r--chromium/media/filters/dav1d_video_decoder.h4
-rw-r--r--chromium/media/filters/dav1d_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/decoder_selector.cc30
-rw-r--r--chromium/media/filters/decoder_selector_unittest.cc67
-rw-r--r--chromium/media/filters/decoder_stream.cc54
-rw-r--r--chromium/media/filters/decoder_stream.h23
-rw-r--r--chromium/media/filters/decoder_stream_traits.cc64
-rw-r--r--chromium/media/filters/decoder_stream_traits.h10
-rw-r--r--chromium/media/filters/decrypting_audio_decoder.cc4
-rw-r--r--chromium/media/filters/decrypting_audio_decoder.h1
-rw-r--r--chromium/media/filters/decrypting_audio_decoder_unittest.cc26
-rw-r--r--chromium/media/filters/decrypting_video_decoder.cc6
-rw-r--r--chromium/media/filters/decrypting_video_decoder.h1
-rw-r--r--chromium/media/filters/decrypting_video_decoder_unittest.cc24
-rw-r--r--chromium/media/filters/demuxer_perftest.cc10
-rw-r--r--chromium/media/filters/fake_video_decoder.cc4
-rw-r--r--chromium/media/filters/fake_video_decoder.h1
-rw-r--r--chromium/media/filters/fake_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/ffmpeg_audio_decoder.cc4
-rw-r--r--chromium/media/filters/ffmpeg_audio_decoder.h1
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.cc13
-rw-r--r--chromium/media/filters/ffmpeg_demuxer_unittest.cc8
-rw-r--r--chromium/media/filters/ffmpeg_glue_unittest.cc4
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.cc37
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.h3
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder.cc21
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc10
-rw-r--r--chromium/media/filters/gav1_video_decoder.cc16
-rw-r--r--chromium/media/filters/gav1_video_decoder.h4
-rw-r--r--chromium/media/filters/gav1_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/memory_data_source.h5
-rw-r--r--chromium/media/filters/offloading_video_decoder.cc5
-rw-r--r--chromium/media/filters/offloading_video_decoder.h1
-rw-r--r--chromium/media/filters/offloading_video_decoder_unittest.cc5
-rw-r--r--chromium/media/filters/pipeline_controller.cc4
-rw-r--r--chromium/media/filters/pipeline_controller.h1
-rw-r--r--chromium/media/filters/pipeline_controller_unittest.cc4
-rw-r--r--chromium/media/filters/source_buffer_state.cc55
-rw-r--r--chromium/media/filters/source_buffer_state.h5
-rw-r--r--chromium/media/filters/stream_parser_factory.cc52
-rw-r--r--chromium/media/filters/stream_parser_factory.h20
-rw-r--r--chromium/media/filters/video_decoder_stream_unittest.cc75
-rw-r--r--chromium/media/filters/video_renderer_algorithm.cc8
-rw-r--r--chromium/media/filters/video_renderer_algorithm_unittest.cc12
-rw-r--r--chromium/media/filters/vp9_parser.h2
-rw-r--r--chromium/media/filters/vp9_uncompressed_header_parser.cc9
-rw-r--r--chromium/media/filters/vp9_uncompressed_header_parser.h8
-rw-r--r--chromium/media/filters/vp9_uncompressed_header_parser_unittest.cc65
-rw-r--r--chromium/media/filters/vpx_video_decoder.cc27
-rw-r--r--chromium/media/filters/vpx_video_decoder.h4
-rw-r--r--chromium/media/filters/vpx_video_decoder_fuzzertest.cc8
-rw-r--r--chromium/media/filters/vpx_video_decoder_unittest.cc2
-rw-r--r--chromium/media/formats/BUILD.gn2
-rw-r--r--chromium/media/formats/mp4/h264_annex_b_fuzz_corpus/pps_neq_sps_config_idr.binbin0 -> 461 bytes
-rw-r--r--chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc4
-rw-r--r--chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter_unittest.cc9
-rw-r--r--chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc33
-rw-r--r--chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.cc178
-rw-r--r--chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h79
-rw-r--r--chromium/media/fuchsia/audio/fake_audio_consumer.h3
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc37
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.h19
-rw-r--r--chromium/media/fuchsia/camera/fake_fuchsia_camera.cc6
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.cc153
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.h9
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_decryptor.cc8
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_decryptor.h4
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc3
-rw-r--r--chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc6
-rw-r--r--chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc2
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_pool.cc13
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_pool.h7
-rw-r--r--chromium/media/gpu/BUILD.gn78
-rw-r--r--chromium/media/gpu/accelerated_video_decoder.h9
-rw-r--r--chromium/media/gpu/android/codec_allocator.cc10
-rw-r--r--chromium/media/gpu/android/codec_allocator.h4
-rw-r--r--chromium/media/gpu/android/codec_allocator_unittest.cc48
-rw-r--r--chromium/media/gpu/android/codec_wrapper.cc5
-rw-r--r--chromium/media/gpu/android/codec_wrapper.h7
-rw-r--r--chromium/media/gpu/android/codec_wrapper_unittest.cc18
-rw-r--r--chromium/media/gpu/android/direct_shared_image_video_provider.cc17
-rw-r--r--chromium/media/gpu/android/frame_info_helper.cc22
-rw-r--r--chromium/media/gpu/android/frame_info_helper_unittest.cc47
-rw-r--r--chromium/media/gpu/android/maybe_render_early_manager.cc10
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.cc21
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.h1
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder_unittest.cc37
-rw-r--r--chromium/media/gpu/android/pooled_shared_image_video_provider.cc10
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.cc8
-rw-r--r--chromium/media/gpu/args.gni6
-rw-r--r--chromium/media/gpu/av1_decoder.cc496
-rw-r--r--chromium/media/gpu/av1_decoder.h160
-rw-r--r--chromium/media/gpu/av1_decoder_fuzzertest.cc74
-rw-r--r--chromium/media/gpu/av1_decoder_unittest.cc753
-rw-r--r--chromium/media/gpu/av1_picture.cc30
-rw-r--r--chromium/media/gpu/av1_picture.h38
-rw-r--r--chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc11
-rw-r--r--chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h2
-rw-r--r--chromium/media/gpu/chromeos/fourcc.cc2
-rw-r--r--chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.cc7
-rw-r--r--chromium/media/gpu/chromeos/image_processor_test.cc6
-rw-r--r--chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc93
-rw-r--r--chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc4
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool.cc2
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_utils.cc3
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_utils.h6
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc9
-rw-r--r--chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc3
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.cc44
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.h9
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc16
-rw-r--r--chromium/media/gpu/decode_surface_handler.h39
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.cc10
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_helpers.h2
-rw-r--r--chromium/media/gpu/gpu_video_encode_accelerator_factory.cc16
-rw-r--r--chromium/media/gpu/h264_decoder.cc257
-rw-r--r--chromium/media/gpu/h264_decoder.h63
-rw-r--r--chromium/media/gpu/h264_decoder_unittest.cc251
-rw-r--r--chromium/media/gpu/h265_decoder.cc119
-rw-r--r--chromium/media/gpu/h265_decoder.h5
-rw-r--r--chromium/media/gpu/h265_decoder_unittest.cc54
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager.cc72
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager.h4
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc27
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.cc17
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.h2
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc22
-rw-r--r--chromium/media/gpu/mac/BUILD.gn8
-rw-r--r--chromium/media/gpu/mac/vt_beta.h19
-rw-r--r--chromium/media/gpu/mac/vt_beta.sig6
-rw-r--r--chromium/media/gpu/mac/vt_beta_stubs_header.fragment5
-rw-r--r--chromium/media/gpu/mac/vt_config_util.h1
-rw-r--r--chromium/media/gpu/mac/vt_config_util.mm91
-rw-r--r--chromium/media/gpu/mac/vt_config_util_unittest.cc40
-rw-r--r--chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc42
-rw-r--r--chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h2
-rw-r--r--chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc2
-rw-r--r--chromium/media/gpu/test/BUILD.gn8
-rw-r--r--chromium/media/gpu/v4l2/BUILD.gn6
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.cc92
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.h8
-rw-r--r--chromium/media/gpu/v4l2/v4l2_h264_accelerator_chromium.cc (renamed from chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc)24
-rw-r--r--chromium/media/gpu/v4l2/v4l2_h264_accelerator_chromium.h (renamed from chromium/media/gpu/v4l2/v4l2_h264_accelerator.h)15
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc10
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder.cc81
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder.h15
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc2
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc11
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc44
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.cc13
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.h10
-rw-r--r--chromium/media/gpu/vaapi/BUILD.gn19
-rw-r--r--chromium/media/gpu/vaapi/DEPS8
-rw-r--r--chromium/media/gpu/vaapi/OWNERS14
-rw-r--r--chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc839
-rw-r--r--chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h44
-rw-r--r--chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc291
-rw-r--r--chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h34
-rw-r--r--chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc103
-rw-r--r--chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h23
-rw-r--r--chromium/media/gpu/vaapi/va_prot.sigs12
-rw-r--r--chromium/media/gpu/vaapi/vaapi_common.cc31
-rw-r--r--chromium/media/gpu/vaapi/vaapi_common.h63
-rw-r--r--chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc1
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decoder.cc3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc1
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_tfp.cc3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_unittest.cc124
-rw-r--r--chromium/media/gpu/vaapi/vaapi_utils.cc15
-rw-r--r--chromium/media/gpu/vaapi/vaapi_utils.h3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_utils_unittest.cc1
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc19
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc63
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.cc478
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.h80
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc356
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h131
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc23
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc9
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.cc747
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.h122
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper_unittest.cc149
-rw-r--r--chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.cc8
-rw-r--r--chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc118
-rw-r--r--chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h21
-rw-r--r--chromium/media/gpu/video_decode_accelerator_perf_tests.cc4
-rw-r--r--chromium/media/gpu/video_decode_accelerator_tests.cc163
-rw-r--r--chromium/media/gpu/video_encode_accelerator_perf_tests.cc257
-rw-r--r--chromium/media/gpu/video_encode_accelerator_tests.cc325
-rw-r--r--chromium/media/gpu/video_encode_accelerator_unittest.cc3311
-rw-r--r--chromium/media/gpu/vp8_decoder.cc4
-rw-r--r--chromium/media/gpu/vp8_decoder.h1
-rw-r--r--chromium/media/gpu/vp8_decoder_unittest.cc3
-rw-r--r--chromium/media/gpu/vp9_decoder.cc101
-rw-r--r--chromium/media/gpu/vp9_decoder.h43
-rw-r--r--chromium/media/gpu/windows/d3d11_av1_accelerator.cc820
-rw-r--r--chromium/media/gpu/windows/d3d11_av1_accelerator.h74
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.cc34
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.h1
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc20
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.cc57
-rw-r--r--chromium/media/gpu/windows/d3d11_h264_accelerator.h5
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector.cc8
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper.cc14
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.cc100
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.h12
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc51
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.cc83
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.h15
-rw-r--r--chromium/media/gpu/windows/dxva_picture_buffer_win.cc8
-rw-r--r--chromium/media/gpu/windows/dxva_picture_buffer_win.h27
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc206
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h38
-rw-r--r--chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc197
-rw-r--r--chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h7
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers.cc5
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers.h3
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers_unittest.cc19
-rw-r--r--chromium/media/media_options.gni20
-rw-r--r--chromium/media/midi/BUILD.gn2
-rw-r--r--chromium/media/midi/java/src/org/chromium/midi/UsbMidiDeviceFactoryAndroid.java4
-rw-r--r--chromium/media/midi/midi_manager_win.cc6
-rw-r--r--chromium/media/midi/midi_manager_winrt.cc6
-rw-r--r--chromium/media/midi/midi_manager_winrt.h2
-rw-r--r--chromium/media/mojo/DEPS1
-rw-r--r--chromium/media/mojo/clients/BUILD.gn8
-rw-r--r--chromium/media/mojo/clients/mojo_android_overlay.cc9
-rw-r--r--chromium/media/mojo/clients/mojo_android_overlay.h2
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.cc8
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.h8
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.cc19
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.h9
-rw-r--r--chromium/media/mojo/clients/mojo_decoder_factory.h2
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor.cc28
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor.h18
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor_unittest.cc16
-rw-r--r--chromium/media/mojo/clients/mojo_renderer.cc15
-rw-r--r--chromium/media/mojo/clients/mojo_renderer.h4
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.cc19
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.h8
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.cc13
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.h8
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client.cc403
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client.h152
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc61
-rw-r--r--chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h45
-rw-r--r--chromium/media/mojo/common/mojo_data_pipe_read_write.cc2
-rw-r--r--chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc14
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter.cc73
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter.h6
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc46
-rw-r--r--chromium/media/mojo/mojom/BUILD.gn76
-rw-r--r--chromium/media/mojo/mojom/android_overlay.mojom15
-rw-r--r--chromium/media/mojo/mojom/audio_decoder.mojom4
-rw-r--r--chromium/media/mojo/mojom/interface_factory.mojom8
-rw-r--r--chromium/media/mojo/mojom/media_metrics_provider.mojom10
-rw-r--r--chromium/media/mojo/mojom/media_player.mojom99
-rw-r--r--chromium/media/mojo/mojom/media_types.mojom47
-rw-r--r--chromium/media/mojo/mojom/pipeline_status_mojom_traits.h45
-rw-r--r--chromium/media/mojo/mojom/renderer.mojom2
-rw-r--r--chromium/media/mojo/mojom/renderer_extensions.mojom25
-rw-r--r--chromium/media/mojo/mojom/speech_recognition_service.mojom10
-rw-r--r--chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h2
-rw-r--r--chromium/media/mojo/mojom/video_decoder.mojom3
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator.mojom2
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc11
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc14
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc10
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h14
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc19
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.cc11
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.h2
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc14
-rw-r--r--chromium/media/mojo/mojom/video_transformation_mojom_traits.cc2
-rw-r--r--chromium/media/mojo/mojom/video_transformation_mojom_traits.h2
-rw-r--r--chromium/media/mojo/mojom/watch_time_recorder.mojom7
-rw-r--r--chromium/media/mojo/services/BUILD.gn14
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.cc24
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.h2
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.cc73
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.h15
-rw-r--r--chromium/media/mojo/services/media_foundation_mojo_media_client.cc44
-rw-r--r--chromium/media/mojo/services/media_foundation_mojo_media_client.h32
-rw-r--r--chromium/media/mojo/services/media_foundation_renderer_wrapper.cc101
-rw-r--r--chromium/media/mojo/services/media_foundation_renderer_wrapper.h70
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.cc30
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.h13
-rw-r--r--chromium/media/mojo/services/media_metrics_provider_unittest.cc73
-rw-r--r--chromium/media/mojo/services/media_resource_shim.cc4
-rw-r--r--chromium/media/mojo/services/media_resource_shim.h4
-rw-r--r--chromium/media/mojo/services/media_service_factory.cc9
-rw-r--r--chromium/media/mojo/services/media_service_unittest.cc2
-rw-r--r--chromium/media/mojo/services/mojo_audio_decoder_service.cc16
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.cc9
-rw-r--r--chromium/media/mojo/services/mojo_media_client.h2
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.cc30
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.h13
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.cc14
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.cc127
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.h1
-rw-r--r--chromium/media/mojo/services/watch_time_recorder_unittest.cc387
-rw-r--r--chromium/media/muxers/webm_muxer.cc75
-rw-r--r--chromium/media/muxers/webm_muxer.h35
-rw-r--r--chromium/media/muxers/webm_muxer_unittest.cc60
-rw-r--r--chromium/media/remoting/BUILD.gn1
-rw-r--r--chromium/media/remoting/OWNERS2
-rw-r--r--chromium/media/remoting/courier_renderer.cc18
-rw-r--r--chromium/media/remoting/courier_renderer_unittest.cc12
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter.cc7
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter.h7
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter_unittest.cc5
-rw-r--r--chromium/media/remoting/media_remoting_rpc.proto14
-rw-r--r--chromium/media/remoting/metrics.cc20
-rw-r--r--chromium/media/remoting/metrics.h47
-rw-r--r--chromium/media/remoting/metrics_unittest.cc64
-rw-r--r--chromium/media/remoting/proto_utils.cc6
-rw-r--r--chromium/media/remoting/proto_utils_unittest.cc28
-rw-r--r--chromium/media/remoting/receiver.cc4
-rw-r--r--chromium/media/remoting/renderer_controller.cc217
-rw-r--r--chromium/media/remoting/renderer_controller.h27
-rw-r--r--chromium/media/remoting/stream_provider.cc9
-rw-r--r--chromium/media/renderers/BUILD.gn24
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc32
-rw-r--r--chromium/media/renderers/audio_renderer_impl.h9
-rw-r--r--chromium/media/renderers/audio_renderer_impl_unittest.cc2
-rw-r--r--chromium/media/renderers/decrypting_renderer.cc4
-rw-r--r--chromium/media/renderers/decrypting_renderer.h1
-rw-r--r--chromium/media/renderers/default_decoder_factory.cc67
-rw-r--r--chromium/media/renderers/default_decoder_factory.h2
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc440
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.h27
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer_unittest.cc361
-rw-r--r--chromium/media/renderers/renderer_impl.cc47
-rw-r--r--chromium/media/renderers/renderer_impl.h10
-rw-r--r--chromium/media/renderers/shared_image_video_frame_test_utils.cc254
-rw-r--r--chromium/media/renderers/shared_image_video_frame_test_utils.h63
-rw-r--r--chromium/media/renderers/video_frame_yuv_converter.cc508
-rw-r--r--chromium/media/renderers/video_frame_yuv_converter.h31
-rw-r--r--chromium/media/renderers/video_renderer_impl.cc47
-rw-r--r--chromium/media/renderers/video_renderer_impl.h3
-rw-r--r--chromium/media/renderers/video_resource_updater.cc80
-rw-r--r--chromium/media/renderers/video_resource_updater.h3
-rw-r--r--chromium/media/renderers/video_resource_updater_unittest.cc230
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.cc4
-rw-r--r--chromium/media/renderers/win/media_foundation_stream_wrapper.cc9
-rw-r--r--chromium/media/renderers/win/media_foundation_video_stream.cc17
-rw-r--r--chromium/media/test/BUILD.gn2
-rw-r--r--chromium/media/video/BUILD.gn7
-rw-r--r--chromium/media/video/fake_video_encode_accelerator.cc68
-rw-r--r--chromium/media/video/fake_video_encode_accelerator.h41
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.cc39
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc66
-rw-r--r--chromium/media/video/gpu_video_accelerator_factories.cc32
-rw-r--r--chromium/media/video/gpu_video_accelerator_factories.h21
-rw-r--r--chromium/media/video/h264_parser.cc2
-rw-r--r--chromium/media/video/h264_parser.h8
-rw-r--r--chromium/media/video/h265_parser.cc117
-rw-r--r--chromium/media/video/h265_parser.h9
-rw-r--r--chromium/media/video/h265_parser_fuzzertest.cc4
-rw-r--r--chromium/media/video/h265_parser_unittest.cc10
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.cc8
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.h1
-rw-r--r--chromium/media/video/openh264_video_encoder.cc84
-rw-r--r--chromium/media/video/openh264_video_encoder.h9
-rw-r--r--chromium/media/video/picture.cc30
-rw-r--r--chromium/media/video/picture.h28
-rw-r--r--chromium/media/video/supported_video_decoder_config_unittest.cc2
-rw-r--r--chromium/media/video/video_decode_accelerator.cc10
-rw-r--r--chromium/media/video/video_decode_accelerator.h15
-rw-r--r--chromium/media/video/video_encode_accelerator.cc17
-rw-r--r--chromium/media/video/video_encode_accelerator.h11
-rw-r--r--chromium/media/video/video_encode_accelerator_adapter.cc390
-rw-r--r--chromium/media/video/video_encode_accelerator_adapter.h35
-rw-r--r--chromium/media/video/video_encode_accelerator_adapter_test.cc403
-rw-r--r--chromium/media/video/video_encoder_info.cc1
-rw-r--r--chromium/media/video/vpx_video_encoder.cc142
-rw-r--r--chromium/media/video/vpx_video_encoder.h5
-rw-r--r--chromium/media/webrtc/OWNERS1
739 files changed, 28345 insertions, 11494 deletions
diff --git a/chromium/media/BUILD.gn b/chromium/media/BUILD.gn
index 2247c8f4808..2bb83553394 100644
--- a/chromium/media/BUILD.gn
+++ b/chromium/media/BUILD.gn
@@ -142,6 +142,7 @@ source_set("test_support") {
"//media/base/android:test_support",
"//media/filters:test_support",
"//media/formats:test_support",
+ "//media/renderers:test_support",
"//media/video:test_support",
]
}
diff --git a/chromium/media/DEPS b/chromium/media/DEPS
index 4ae2a118691..dbe6e06305d 100644
--- a/chromium/media/DEPS
+++ b/chromium/media/DEPS
@@ -2,7 +2,7 @@
include_rules = [
"+cc/base/math_util.h",
"+cc/paint",
- "+chromeos/audio",
+ "+components/crash/core/common/crash_key.h",
"+components/system_media_controls/linux/buildflags",
"+crypto",
"+device/udev_linux",
@@ -15,7 +15,6 @@ include_rules = [
"+skia/ext",
"+third_party/dav1d",
"+third_party/ffmpeg",
- "+third_party/libaom",
"+third_party/libgav1",
"+third_party/libvpx",
"+third_party/libyuv",
diff --git a/chromium/media/OWNERS b/chromium/media/OWNERS
index 69199030a27..79f898dfe01 100644
--- a/chromium/media/OWNERS
+++ b/chromium/media/OWNERS
@@ -8,6 +8,7 @@
# and to load balance. Only use OWNERS in this file for these subdirectories
# when doing refactorings and general cleanups.
+set noparent
chcunningham@chromium.org
dalecurtis@chromium.org
jrummell@chromium.org
diff --git a/chromium/media/audio/BUILD.gn b/chromium/media/audio/BUILD.gn
index d56fc217768..c68099d5baa 100644
--- a/chromium/media/audio/BUILD.gn
+++ b/chromium/media/audio/BUILD.gn
@@ -105,8 +105,6 @@ source_set("audio") {
"audio_output_resampler.h",
"audio_output_stream_sink.cc",
"audio_output_stream_sink.h",
- "audio_pcm_encoder.cc",
- "audio_pcm_encoder.h",
"audio_sink_parameters.cc",
"audio_sink_parameters.h",
"audio_source_diverter.h",
@@ -268,13 +266,13 @@ source_set("audio") {
"cras/cras_unified.h",
]
configs += [ ":libcras" ]
- if (is_ash) {
+ if (is_chromeos_ash) {
sources += [
"cras/audio_manager_chromeos.cc",
"cras/audio_manager_chromeos.h",
]
- deps += [ "//chromeos/audio" ]
- } else if (is_linux) {
+ deps += [ "//ash/components/audio" ]
+ } else if (is_linux || is_chromeos_lacros) {
sources += [
"cras/audio_manager_cras.cc",
"cras/audio_manager_cras.h",
@@ -432,7 +430,7 @@ source_set("unit_tests") {
]
}
- if (is_ash || is_chromecast) {
+ if (is_chromeos_ash || is_chromecast) {
sources += [
"test_data.h",
"wav_audio_handler_unittest.cc",
@@ -440,7 +438,7 @@ source_set("unit_tests") {
if (!is_chromecast) {
deps += [
- "//chromeos/audio",
+ "//ash/components/audio",
"//chromeos/dbus/audio",
]
}
diff --git a/chromium/media/audio/DEPS b/chromium/media/audio/DEPS
new file mode 100644
index 00000000000..e58d970a1d5
--- /dev/null
+++ b/chromium/media/audio/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+ash/components/audio",
+]
diff --git a/chromium/media/audio/OWNERS b/chromium/media/audio/OWNERS
index e7ee28564ec..2dc6241d229 100644
--- a/chromium/media/audio/OWNERS
+++ b/chromium/media/audio/OWNERS
@@ -5,5 +5,5 @@ olka@chromium.org
henrika@chromium.org
# Mirroring (and related glue) OWNERS.
-miu@chromium.org
+jophba@chromium.org
mfoltz@chromium.org
diff --git a/chromium/media/audio/android/aaudio.sigs b/chromium/media/audio/android/aaudio.sigs
index 18ce6896a45..1d89b56d675 100644
--- a/chromium/media/audio/android/aaudio.sigs
+++ b/chromium/media/audio/android/aaudio.sigs
@@ -26,5 +26,7 @@ aaudio_result_t AAudioStream_close(AAudioStream* stream);
aaudio_result_t AAudioStream_requestStart(AAudioStream* stream);
aaudio_result_t AAudioStream_requestStop(AAudioStream* stream);
aaudio_result_t AAudioStream_getTimestamp(AAudioStream* stream, clockid_t clockid, int64_t *framePosition, int64_t *timeNanoseconds);
+aaudio_result_t AAudioStream_setBufferSizeInFrames(AAudioStream* stream, int32_t numFrames);
+int32_t AAudioStream_getFramesPerBurst(AAudioStream* stream);
int64_t AAudioStream_getFramesWritten(AAudioStream* stream);
aaudio_result_t AAudioStream_waitForStateChange(AAudioStream* stream, aaudio_stream_state_t inputState, aaudio_stream_state_t *nextState, int64_t timeoutNanoseconds); \ No newline at end of file
diff --git a/chromium/media/audio/android/aaudio_output.cc b/chromium/media/audio/android/aaudio_output.cc
index e9bb8867c71..3d663553707 100644
--- a/chromium/media/audio/android/aaudio_output.cc
+++ b/chromium/media/audio/android/aaudio_output.cc
@@ -42,10 +42,17 @@ AAudioOutputStream::AAudioOutputStream(AudioManagerAndroid* manager,
DCHECK(params.IsValid());
if (AudioManagerAndroid::SupportsPerformanceModeForOutput()) {
- if (params.latency_tag() == AudioLatency::LATENCY_PLAYBACK)
- performance_mode_ = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
- else if (params.latency_tag() == AudioLatency::LATENCY_RTC)
- performance_mode_ = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+ switch (params.latency_tag()) {
+ case AudioLatency::LATENCY_INTERACTIVE:
+ case AudioLatency::LATENCY_RTC:
+ performance_mode_ = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
+ break;
+ case AudioLatency::LATENCY_PLAYBACK:
+ performance_mode_ = AAUDIO_PERFORMANCE_MODE_POWER_SAVING;
+ break;
+ default:
+ performance_mode_ = AAUDIO_PERFORMANCE_MODE_NONE;
+ }
}
}
@@ -70,8 +77,6 @@ bool AAudioOutputStream::Open() {
AAudioStreamBuilder_setFormat(builder, AAUDIO_FORMAT_PCM_FLOAT);
AAudioStreamBuilder_setUsage(builder, usage_);
AAudioStreamBuilder_setPerformanceMode(builder, performance_mode_);
- AAudioStreamBuilder_setBufferCapacityInFrames(builder,
- params_.frames_per_buffer());
AAudioStreamBuilder_setFramesPerDataCallback(builder,
params_.frames_per_buffer());
@@ -87,6 +92,13 @@ bool AAudioOutputStream::Open() {
if (AAUDIO_OK != result)
return false;
+ // After opening the stream, sets the effective buffer size to 3X the burst
+ // size to prevent glitching if the burst is small (e.g. < 128). On some
+ // devices you can get by with 1X or 2X, but 3X is safer.
+ int32_t framesPerBurst = AAudioStream_getFramesPerBurst(aaudio_stream_);
+ int32_t sizeRequested = framesPerBurst * (framesPerBurst < 128 ? 3 : 2);
+ AAudioStream_setBufferSizeInFrames(aaudio_stream_, sizeRequested);
+
audio_bus_ = AudioBus::Create(params_);
return true;
diff --git a/chromium/media/audio/android/audio_android_unittest.cc b/chromium/media/audio/android/audio_android_unittest.cc
index e838cedfcf3..8dfff9017f1 100644
--- a/chromium/media/audio/android/audio_android_unittest.cc
+++ b/chromium/media/audio/android/audio_android_unittest.cc
@@ -203,7 +203,8 @@ class FileAudioSource : public AudioOutputStream::AudioSourceCallback {
// sufficient data remaining in the file to fill up the complete frame.
int frames = max_size / (dest->channels() * kBytesPerSample);
if (max_size) {
- dest->FromInterleaved(file_->data() + pos_, frames, kBytesPerSample);
+ auto* source = reinterpret_cast<const int16_t*>(file_->data() + pos_);
+ dest->FromInterleaved<SignedInt16SampleTypeTraits>(source, frames);
pos_ += max_size;
}
diff --git a/chromium/media/audio/android/audio_manager_android.cc b/chromium/media/audio/android/audio_manager_android.cc
index cd4f188256f..5cbcd509ca0 100644
--- a/chromium/media/audio/android/audio_manager_android.cc
+++ b/chromium/media/audio/android/audio_manager_android.cc
@@ -487,7 +487,8 @@ bool AudioManagerAndroid::UseAAudio() {
if (!base::FeatureList::IsEnabled(features::kUseAAudioDriver))
return false;
- if (!base::android::BuildInfo::GetInstance()->is_at_least_q()) {
+ if (base::android::BuildInfo::GetInstance()->sdk_int() <
+ base::android::SDK_VERSION_Q) {
// We need APIs that weren't added until API Level 28. Also, AAudio crashes
// on Android P, so only consider Q and above.
return false;
diff --git a/chromium/media/audio/audio_debug_recording_session_impl.cc b/chromium/media/audio/audio_debug_recording_session_impl.cc
index fd39a79863f..93113a3acf4 100644
--- a/chromium/media/audio/audio_debug_recording_session_impl.cc
+++ b/chromium/media/audio/audio_debug_recording_session_impl.cc
@@ -30,7 +30,7 @@ namespace media {
namespace {
#if defined(OS_WIN)
-#define NumberToStringType base::NumberToString16
+#define NumberToStringType base::NumberToWString
#else
#define NumberToStringType base::NumberToString
#endif
diff --git a/chromium/media/audio/audio_encoders_unittest.cc b/chromium/media/audio/audio_encoders_unittest.cc
index 4cff1e26cf5..50a4278c159 100644
--- a/chromium/media/audio/audio_encoders_unittest.cc
+++ b/chromium/media/audio/audio_encoders_unittest.cc
@@ -3,14 +3,16 @@
// found in the LICENSE file.
#include <cstring>
+#include <limits>
#include <memory>
#include <utility>
#include <vector>
#include "base/bind.h"
+#include "base/test/bind.h"
+#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "media/audio/audio_opus_encoder.h"
-#include "media/audio/audio_pcm_encoder.h"
#include "media/audio/simple_sources.h"
#include "media/base/audio_encoder.h"
#include "media/base/audio_parameters.h"
@@ -24,9 +26,6 @@ namespace {
constexpr int kAudioSampleRate = 48000;
-constexpr base::TimeDelta kBufferDuration =
- base::TimeDelta::FromMilliseconds(10);
-
// This is the preferred opus buffer duration (60 ms), which corresponds to a
// value of 2880 frames per buffer (|kOpusFramesPerBuffer|).
constexpr base::TimeDelta kOpusBufferDuration =
@@ -36,33 +35,23 @@ constexpr int kOpusFramesPerBuffer = kOpusBufferDuration.InMicroseconds() *
base::Time::kMicrosecondsPerSecond;
struct TestAudioParams {
- const media::AudioParameters::Format format;
- const media::ChannelLayout channel_layout;
+ const int channels;
const int sample_rate;
};
constexpr TestAudioParams kTestAudioParams[] = {
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
- media::CHANNEL_LAYOUT_STEREO, kAudioSampleRate},
+ {2, kAudioSampleRate},
// Change to mono:
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY, media::CHANNEL_LAYOUT_MONO,
- kAudioSampleRate},
+ {1, kAudioSampleRate},
// Different sampling rate as well:
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY, media::CHANNEL_LAYOUT_MONO,
- 24000},
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
- media::CHANNEL_LAYOUT_STEREO, 8000},
+ {1, 24000},
+ {2, 8000},
// Using a non-default Opus sampling rate (48, 24, 16, 12, or 8 kHz).
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY, media::CHANNEL_LAYOUT_MONO,
- 22050},
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
- media::CHANNEL_LAYOUT_STEREO, 44100},
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
- media::CHANNEL_LAYOUT_STEREO, 96000},
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY, media::CHANNEL_LAYOUT_MONO,
- kAudioSampleRate},
- {media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
- media::CHANNEL_LAYOUT_STEREO, kAudioSampleRate},
+ {1, 22050},
+ {2, 44100},
+ {2, 96000},
+ {1, kAudioSampleRate},
+ {2, kAudioSampleRate},
};
} // namespace
@@ -70,142 +59,268 @@ constexpr TestAudioParams kTestAudioParams[] = {
class AudioEncodersTest : public ::testing::TestWithParam<TestAudioParams> {
public:
AudioEncodersTest()
- : input_params_(GetParam().format,
- GetParam().channel_layout,
- GetParam().sample_rate,
- GetParam().sample_rate / 100),
- audio_source_(input_params_.channels(),
+ : audio_source_(GetParam().channels,
/*freq=*/440,
- input_params_.sample_rate()) {}
+ GetParam().sample_rate) {
+ options_.sample_rate = GetParam().sample_rate;
+ options_.channels = GetParam().channels;
+ }
AudioEncodersTest(const AudioEncodersTest&) = delete;
AudioEncodersTest& operator=(const AudioEncodersTest&) = delete;
~AudioEncodersTest() override = default;
- const AudioParameters& input_params() const { return input_params_; }
+ using MaybeDesc = base::Optional<AudioEncoder::CodecDescription>;
+
AudioEncoder* encoder() const { return encoder_.get(); }
- int encode_callback_count() const { return encode_callback_count_; }
- void SetEncoder(std::unique_ptr<AudioEncoder> encoder) {
- encoder_ = std::move(encoder);
- encode_callback_count_ = 0;
+ void SetupEncoder(AudioEncoder::OutputCB output_cb) {
+ encoder_ = std::make_unique<AudioOpusEncoder>();
+
+ bool called_done = false;
+ AudioEncoder::StatusCB done_cb =
+ base::BindLambdaForTesting([&](Status error) {
+ if (!error.is_ok())
+ FAIL() << error.message();
+ called_done = true;
+ });
+
+ encoder_->Initialize(options_, std::move(output_cb), std::move(done_cb));
+
+ RunLoop();
+ EXPECT_TRUE(called_done);
}
- // Produces an audio data that corresponds to a |kBufferDuration| and the
- // sample rate of the current |input_params_|. The produced data is send to
+ // Produces an audio data that corresponds to a |buffer_duration_| and the
+ // sample rate of the current |options_|. The produced data is send to
// |encoder_| to be encoded, and the number of frames generated is returned.
- int ProduceAudioAndEncode() {
+ int ProduceAudioAndEncode(
+ base::TimeTicks timestamp = base::TimeTicks::Now()) {
DCHECK(encoder_);
- const int num_frames =
- input_params_.sample_rate() * kBufferDuration.InSecondsF();
- current_audio_bus_ =
- media::AudioBus::Create(input_params_.channels(), num_frames);
- const auto capture_time = base::TimeTicks::Now();
+ const int num_frames = options_.sample_rate * buffer_duration_.InSecondsF();
+ base::TimeTicks capture_time = timestamp + buffer_duration_;
+ auto audio_bus = AudioBus::Create(options_.channels, num_frames);
audio_source_.OnMoreData(base::TimeDelta(), capture_time, 0,
- current_audio_bus_.get());
- encoder_->EncodeAudio(*current_audio_bus_, capture_time);
+ audio_bus.get());
+
+ bool called_done = false;
+ auto done_cb = base::BindLambdaForTesting([&](Status error) {
+ if (!error.is_ok())
+ FAIL() << error.message();
+ called_done = true;
+ });
+
+ encoder_->Encode(std::move(audio_bus), capture_time, std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
return num_frames;
}
- // Used to verify we get no errors.
- void OnErrorCallback(Status error) { FAIL() << error.message(); }
-
- // Used as the callback of the PCM encoder.
- void VerifyPcmEncoding(EncodedAudioBuffer output) {
- DCHECK(current_audio_bus_);
- ++encode_callback_count_;
- // Verify that PCM doesn't change the input; i.e. it's just a pass through.
- size_t uncompressed_size = current_audio_bus_->frames() *
- current_audio_bus_->channels() * sizeof(float);
- ASSERT_EQ(uncompressed_size, output.encoded_data_size);
- std::unique_ptr<uint8_t[]> uncompressed_audio_data(
- new uint8_t[uncompressed_size]);
- current_audio_bus_->ToInterleaved<Float32SampleTypeTraits>(
- current_audio_bus_->frames(),
- reinterpret_cast<float*>(uncompressed_audio_data.get()));
- EXPECT_EQ(std::memcmp(uncompressed_audio_data.get(),
- output.encoded_data.get(), uncompressed_size),
- 0);
- }
-
- // Used as the callback of the Opus encoder.
- void VerifyOpusEncoding(OpusDecoder* opus_decoder,
- EncodedAudioBuffer output) {
- DCHECK(current_audio_bus_);
- DCHECK(opus_decoder);
+ void RunLoop() { task_environment_.RunUntilIdle(); }
- ++encode_callback_count_;
- // Use the provied |opus_decoder| to decode the |encoded_data| and check we
- // get the expected number of frames per buffer.
- std::vector<float> buffer(kOpusFramesPerBuffer * output.params.channels());
- EXPECT_EQ(kOpusFramesPerBuffer,
- opus_decode_float(opus_decoder, output.encoded_data.get(),
- output.encoded_data_size, buffer.data(),
- kOpusFramesPerBuffer, 0));
- }
+ base::test::TaskEnvironment task_environment_;
- private:
// The input params as initialized from the test's parameter.
- const AudioParameters input_params_;
+ AudioEncoder::Options options_;
// The audio source used to fill in the data of the |current_audio_bus_|.
- media::SineWaveAudioSource audio_source_;
+ SineWaveAudioSource audio_source_;
// The encoder the test is verifying.
std::unique_ptr<AudioEncoder> encoder_;
// The audio bus that was most recently generated and sent to the |encoder_|
// by ProduceAudioAndEncode().
- std::unique_ptr<media::AudioBus> current_audio_bus_;
+ std::unique_ptr<AudioBus> current_audio_bus_;
- // The number of encoder callbacks received.
- int encode_callback_count_ = 0;
+ base::TimeDelta buffer_duration_ = base::TimeDelta::FromMilliseconds(10);
};
-TEST_P(AudioEncodersTest, PcmEncoder) {
- SetEncoder(std::make_unique<AudioPcmEncoder>(
- input_params(),
- base::BindRepeating(&AudioEncodersTest::VerifyPcmEncoding,
- base::Unretained(this)),
- base::BindRepeating(&AudioEncodersTest::OnErrorCallback,
- base::Unretained(this))));
+TEST_P(AudioEncodersTest, OpusTimestamps) {
+ constexpr int kCount = 12;
+ for (base::TimeDelta duration :
+ {kOpusBufferDuration * 10, kOpusBufferDuration,
+ kOpusBufferDuration * 2 / 3}) {
+ buffer_duration_ = duration;
+ size_t expected_outputs = (buffer_duration_ * kCount) / kOpusBufferDuration;
+ base::TimeTicks current_timestamp;
+ std::vector<base::TimeTicks> timestamps;
+
+ auto output_cb =
+ base::BindLambdaForTesting([&](EncodedAudioBuffer output, MaybeDesc) {
+ timestamps.push_back(output.timestamp);
+ });
+
+ SetupEncoder(std::move(output_cb));
+
+ for (int i = 0; i < kCount; ++i) {
+ ProduceAudioAndEncode(current_timestamp);
+ current_timestamp += buffer_duration_;
+ }
+
+ bool flush_done = false;
+ auto done_cb = base::BindLambdaForTesting([&](Status error) {
+ if (!error.is_ok())
+ FAIL() << error.message();
+ flush_done = true;
+ });
+ encoder()->Flush(std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(flush_done);
+ EXPECT_EQ(expected_outputs, timestamps.size());
+
+ current_timestamp = base::TimeTicks();
+ for (auto& ts : timestamps) {
+ EXPECT_EQ(current_timestamp, ts);
+ current_timestamp += kOpusBufferDuration;
+ }
+ }
+}
- constexpr int kCount = 6;
- for (int i = 0; i < kCount; ++i)
- ProduceAudioAndEncode();
+TEST_P(AudioEncodersTest, OpusExtraData) {
+ std::vector<uint8_t> extra;
+ auto output_cb = base::BindLambdaForTesting(
+ [&](EncodedAudioBuffer output, MaybeDesc desc) {
+ DCHECK(desc.has_value());
+ extra = desc.value();
+ });
+
+ SetupEncoder(std::move(output_cb));
+ buffer_duration_ = kOpusBufferDuration;
+ ProduceAudioAndEncode();
+ RunLoop();
+
+ ASSERT_GT(extra.size(), 0u);
+ EXPECT_EQ(extra[0], 'O');
+ EXPECT_EQ(extra[1], 'p');
+ EXPECT_EQ(extra[2], 'u');
+ EXPECT_EQ(extra[3], 's');
+
+ uint16_t* sample_rate_ptr = reinterpret_cast<uint16_t*>(extra.data() + 12);
+ if (options_.sample_rate < std::numeric_limits<uint16_t>::max())
+ EXPECT_EQ(*sample_rate_ptr, options_.sample_rate);
+ else
+ EXPECT_EQ(*sample_rate_ptr, 48000);
+
+ uint8_t* channels_ptr = reinterpret_cast<uint8_t*>(extra.data() + 9);
+ EXPECT_EQ(*channels_ptr, options_.channels);
+
+ uint16_t* skip_ptr = reinterpret_cast<uint16_t*>(extra.data() + 10);
+ EXPECT_GT(*skip_ptr, 0);
+}
- EXPECT_EQ(kCount, encode_callback_count());
+// Check how Opus encoder reacts to breaks in continuity of incoming sound.
+// Capture times are expected to be exactly buffer durations apart,
+// but the encoder should be ready to handle situations when it's not the case.
+TEST_P(AudioEncodersTest, OpusTimeContinuityBreak) {
+ base::TimeTicks current_timestamp;
+ base::TimeDelta small_gap = base::TimeDelta::FromMicroseconds(500);
+ base::TimeDelta large_gap = base::TimeDelta::FromMicroseconds(1500);
+ std::vector<base::TimeTicks> timestamps;
+
+ auto output_cb =
+ base::BindLambdaForTesting([&](EncodedAudioBuffer output, MaybeDesc) {
+ timestamps.push_back(output.timestamp);
+ });
+
+ SetupEncoder(std::move(output_cb));
+
+ // Encode first normal buffer and immediately get an output for it.
+ buffer_duration_ = kOpusBufferDuration;
+ auto ts0 = current_timestamp;
+ ProduceAudioAndEncode(current_timestamp);
+ current_timestamp += buffer_duration_;
+ EXPECT_EQ(1u, timestamps.size());
+ EXPECT_EQ(ts0, timestamps[0]);
+
+ // Add another buffer which is too small and will be buffered
+ buffer_duration_ = kOpusBufferDuration / 2;
+ auto ts1 = current_timestamp;
+ ProduceAudioAndEncode(current_timestamp);
+ current_timestamp += buffer_duration_;
+ EXPECT_EQ(1u, timestamps.size());
+
+ // Add another large buffer after a large gap, 2 outputs are expected
+ // because large gap should trigger a flush.
+ current_timestamp += large_gap;
+ buffer_duration_ = kOpusBufferDuration;
+ auto ts2 = current_timestamp;
+ ProduceAudioAndEncode(current_timestamp);
+ current_timestamp += buffer_duration_;
+ EXPECT_EQ(3u, timestamps.size());
+ EXPECT_EQ(ts1, timestamps[1]);
+ EXPECT_EQ(ts2, timestamps[2]);
+
+ // Add another buffer which is too small and will be buffered
+ buffer_duration_ = kOpusBufferDuration / 2;
+ auto ts3 = current_timestamp;
+ ProduceAudioAndEncode(current_timestamp);
+ current_timestamp += buffer_duration_;
+ EXPECT_EQ(3u, timestamps.size());
+
+ // Add a small gap and a large buffer, only one output is expected because
+ // small gap doesn't trigger a flush.
+ // Small gap itself is not counted in output timestamps.
+ auto ts4 = current_timestamp + kOpusBufferDuration / 2;
+ current_timestamp += small_gap;
+ buffer_duration_ = kOpusBufferDuration;
+ ProduceAudioAndEncode(current_timestamp);
+ EXPECT_EQ(4u, timestamps.size());
+ EXPECT_EQ(ts3, timestamps[3]);
+
+ encoder()->Flush(base::BindOnce([](Status error) {
+ if (!error.is_ok())
+ FAIL() << error.message();
+ }));
+ RunLoop();
+ EXPECT_EQ(5u, timestamps.size());
+ EXPECT_EQ(ts4, timestamps[4]);
}
-TEST_P(AudioEncodersTest, OpusEncoder) {
+TEST_P(AudioEncodersTest, FullCycleEncodeDecode) {
int error;
+ int encode_callback_count = 0;
+ std::vector<float> buffer(kOpusFramesPerBuffer * options_.channels);
OpusDecoder* opus_decoder =
- opus_decoder_create(kAudioSampleRate, input_params().channels(), &error);
+ opus_decoder_create(kAudioSampleRate, options_.channels, &error);
ASSERT_TRUE(error == OPUS_OK && opus_decoder);
+ int total_frames = 0;
+
+ auto verify_opus_encoding = [&](EncodedAudioBuffer output, MaybeDesc) {
+ ++encode_callback_count;
+
+ // Use the libopus decoder to decode the |encoded_data| and check we
+ // get the expected number of frames per buffer.
+ EXPECT_EQ(kOpusFramesPerBuffer,
+ opus_decode_float(opus_decoder, output.encoded_data.get(),
+ output.encoded_data_size, buffer.data(),
+ kOpusFramesPerBuffer, 0));
+ };
- SetEncoder(std::make_unique<AudioOpusEncoder>(
- input_params(),
- base::BindRepeating(&AudioEncodersTest::VerifyOpusEncoding,
- base::Unretained(this), opus_decoder),
- base::BindRepeating(&AudioEncodersTest::OnErrorCallback,
- base::Unretained(this)),
- /*opus_bitrate=*/0));
+ SetupEncoder(base::BindLambdaForTesting(verify_opus_encoding));
// The opus encoder encodes in multiple of 60 ms. Wait for the total number of
// frames that will be generated in 60 ms at the input sampling rate.
const int frames_in_60_ms =
- kOpusBufferDuration.InSecondsF() * input_params().sample_rate();
- int total_frames = 0;
- while (total_frames < frames_in_60_ms)
- total_frames += ProduceAudioAndEncode();
+ kOpusBufferDuration.InSecondsF() * options_.sample_rate;
+
+ base::TimeTicks time;
+ while (total_frames < frames_in_60_ms) {
+ total_frames += ProduceAudioAndEncode(time);
+ time += buffer_duration_;
+ }
- EXPECT_EQ(1, encode_callback_count());
+ EXPECT_EQ(1, encode_callback_count);
// If there are remaining frames in the opus encoder FIFO, we need to flush
// them before we destroy the encoder. Flushing should trigger the encode
// callback and we should be able to decode the resulting encoded frames.
if (total_frames > frames_in_60_ms) {
- encoder()->Flush();
- EXPECT_EQ(2, encode_callback_count());
+ encoder()->Flush(base::BindOnce([](Status error) {
+ if (!error.is_ok())
+ FAIL() << error.message();
+ }));
+ RunLoop();
+ EXPECT_EQ(2, encode_callback_count);
}
opus_decoder_destroy(opus_decoder);
diff --git a/chromium/media/audio/audio_features.cc b/chromium/media/audio/audio_features.cc
index 1b78a09df80..d231e8f0f29 100644
--- a/chromium/media/audio/audio_features.cc
+++ b/chromium/media/audio/audio_features.cc
@@ -32,14 +32,14 @@ const base::Feature kUseAAudioDriver{"UseAAudioDriver",
base::FEATURE_ENABLED_BY_DEFAULT};
#endif
-#if BUILDFLAG(IS_ASH) || BUILDFLAG(IS_LACROS)
+#if BUILDFLAG(IS_CHROMEOS_ASH) || BUILDFLAG(IS_CHROMEOS_LACROS)
const base::Feature kCrOSSystemAEC{"CrOSSystemAEC",
base::FEATURE_ENABLED_BY_DEFAULT};
const base::Feature kCrOSSystemAECDeactivatedGroups{
"CrOSSystemAECDeactivatedGroups", base::FEATURE_ENABLED_BY_DEFAULT};
#endif
-#if defined(OS_MAC) || BUILDFLAG(IS_ASH)
+#if defined(OS_MAC) || BUILDFLAG(IS_CHROMEOS_ASH)
const base::Feature kForceEnableSystemAec{"ForceEnableSystemAec",
base::FEATURE_DISABLED_BY_DEFAULT};
#endif
diff --git a/chromium/media/audio/audio_features.h b/chromium/media/audio/audio_features.h
index a250694690c..0f3dcde21b0 100644
--- a/chromium/media/audio/audio_features.h
+++ b/chromium/media/audio/audio_features.h
@@ -19,12 +19,12 @@ MEDIA_EXPORT extern const base::Feature kDumpOnAudioServiceHang;
MEDIA_EXPORT extern const base::Feature kUseAAudioDriver;
#endif
-#if BUILDFLAG(IS_ASH) || BUILDFLAG(IS_LACROS)
+#if BUILDFLAG(IS_CHROMEOS_ASH) || BUILDFLAG(IS_CHROMEOS_LACROS)
MEDIA_EXPORT extern const base::Feature kCrOSSystemAEC;
MEDIA_EXPORT extern const base::Feature kCrOSSystemAECDeactivatedGroups;
#endif
-#if defined(OS_MAC) || BUILDFLAG(IS_ASH)
+#if defined(OS_MAC) || BUILDFLAG(IS_CHROMEOS_ASH)
MEDIA_EXPORT extern const base::Feature kForceEnableSystemAec;
#endif
diff --git a/chromium/media/audio/audio_input_stream_data_interceptor.h b/chromium/media/audio/audio_input_stream_data_interceptor.h
index 495f3798033..383a28b329e 100644
--- a/chromium/media/audio/audio_input_stream_data_interceptor.h
+++ b/chromium/media/audio/audio_input_stream_data_interceptor.h
@@ -17,7 +17,7 @@ namespace media {
class AudioDebugRecorder;
-// This class wraps an AudioInputStream to be able to intercerpt the data for
+// This class wraps an AudioInputStream to be able to intercept the data for
// debug recording purposes.
class MEDIA_EXPORT AudioInputStreamDataInterceptor
: public AudioInputStream,
diff --git a/chromium/media/audio/audio_low_latency_input_output_unittest.cc b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
index dba6a55e26d..859181d3e33 100644
--- a/chromium/media/audio/audio_low_latency_input_output_unittest.cc
+++ b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
@@ -210,8 +210,12 @@ class FullDuplexAudioSinkSource
EXPECT_EQ(channels_, dest->channels());
size = std::min(dest->frames() * frame_size_, size);
EXPECT_EQ(static_cast<size_t>(size) % sizeof(*dest->channel(0)), 0U);
- dest->FromInterleaved(source, size / frame_size_,
- frame_size_ / channels_);
+
+ // We should only have 16 bits per sample.
+ DCHECK_EQ(frame_size_ / channels_, 2);
+ dest->FromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<const int16_t*>(source), size / channels_);
+
buffer_->Seek(size);
return size / frame_size_;
}
diff --git a/chromium/media/audio/audio_manager.cc b/chromium/media/audio/audio_manager.cc
index 77a0d4326a3..ff94c386043 100644
--- a/chromium/media/audio/audio_manager.cc
+++ b/chromium/media/audio/audio_manager.cc
@@ -48,10 +48,8 @@ class AudioManagerHelper {
}
#endif
-#if defined(OS_LINUX) || defined(OS_CHROMEOS)
void set_app_name(const std::string& app_name) { app_name_ = app_name; }
const std::string& app_name() const { return app_name_; }
-#endif
FakeAudioLogFactory fake_log_factory_;
@@ -59,9 +57,7 @@ class AudioManagerHelper {
std::unique_ptr<base::win::ScopedCOMInitializer> com_initializer_for_testing_;
#endif
-#if defined(OS_LINUX) || defined(OS_CHROMEOS)
std::string app_name_;
-#endif
DISALLOW_COPY_AND_ASSIGN(AudioManagerHelper);
};
@@ -92,7 +88,7 @@ AudioManager::AudioManager(std::unique_ptr<AudioThread> audio_thread)
LOG(WARNING) << "Multiple instances of AudioManager detected";
}
// We always override |g_last_created| irrespective of whether it is already
- // set or not becuase it represents the last created instance.
+ // set or not because it represents the last created instance.
g_last_created = this;
}
@@ -128,7 +124,6 @@ std::unique_ptr<AudioManager> AudioManager::CreateForTesting(
return Create(std::move(audio_thread), GetHelper()->fake_log_factory());
}
-#if defined(OS_LINUX) || defined(OS_CHROMEOS)
// static
void AudioManager::SetGlobalAppName(const std::string& app_name) {
GetHelper()->set_app_name(app_name);
@@ -138,7 +133,6 @@ void AudioManager::SetGlobalAppName(const std::string& app_name) {
const std::string& AudioManager::GetGlobalAppName() {
return GetHelper()->app_name();
}
-#endif
// static
AudioManager* AudioManager::Get() {
diff --git a/chromium/media/audio/audio_manager.h b/chromium/media/audio/audio_manager.h
index 18b47b5d7f6..508775f23f9 100644
--- a/chromium/media/audio/audio_manager.h
+++ b/chromium/media/audio/audio_manager.h
@@ -60,14 +60,11 @@ class MEDIA_EXPORT AudioManager {
static std::unique_ptr<AudioManager> CreateForTesting(
std::unique_ptr<AudioThread> audio_thread);
-#if defined(OS_LINUX) || defined(OS_CHROMEOS)
- // Sets the name of the audio source as seen by external apps. Only actually
- // used with PulseAudio as of this writing.
+ // Sets the name of the audio source as seen by external apps.
static void SetGlobalAppName(const std::string& app_name);
// Returns the app name or an empty string if it is not set.
static const std::string& GetGlobalAppName();
-#endif
// Returns the pointer to the last created instance, or NULL if not yet
// created. This is a utility method for the code outside of media directory,
diff --git a/chromium/media/audio/audio_manager_base.cc b/chromium/media/audio/audio_manager_base.cc
index ae5cec1e71f..2f97c51cbd4 100644
--- a/chromium/media/audio/audio_manager_base.cc
+++ b/chromium/media/audio/audio_manager_base.cc
@@ -343,7 +343,7 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStreamProxy(
std::string output_device_id =
AudioDeviceDescription::IsDefaultDevice(device_id)
?
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH) || BUILDFLAG(IS_CHROMEOS_LACROS)
// On ChromeOS, it is expected that, if the default device is given,
// no specific device ID should be used since the actual output device
// should change dynamically if the system default device changes.
diff --git a/chromium/media/audio/audio_manager_unittest.cc b/chromium/media/audio/audio_manager_unittest.cc
index f3dad5c1534..85aba43cd33 100644
--- a/chromium/media/audio/audio_manager_unittest.cc
+++ b/chromium/media/audio/audio_manager_unittest.cc
@@ -55,19 +55,22 @@
#include "media/audio/pulse/pulse_util.h"
#endif // defined(USE_PULSEAUDIO)
-#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
-#include "chromeos/audio/audio_devices_pref_handler_stub.h"
-#include "chromeos/audio/cras_audio_handler.h"
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
+#include "ash/components/audio/audio_devices_pref_handler_stub.h"
+#include "ash/components/audio/cras_audio_handler.h"
#include "chromeos/dbus/audio/fake_cras_audio_client.h"
#include "media/audio/cras/audio_manager_chromeos.h"
-#elif defined(USE_CRAS) && defined(OS_LINUX)
+#elif defined(USE_CRAS) && (defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS))
#include "media/audio/cras/audio_manager_cras.h"
#endif
namespace media {
-
namespace {
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
+using ::ash::CrasAudioHandler;
+#endif
+
template <typename T>
struct TestAudioManagerFactory {
static std::unique_ptr<AudioManager> Create(
@@ -101,7 +104,7 @@ struct TestAudioManagerFactory<std::nullptr_t> {
}
};
-#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
using chromeos::AudioNode;
using chromeos::AudioNodeList;
@@ -297,9 +300,9 @@ class AudioManagerTest : public ::testing::Test {
device_info_accessor_->GetAssociatedOutputDeviceID(input_device_id);
}
-#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
void TearDown() override {
- chromeos::CrasAudioHandler::Shutdown();
+ CrasAudioHandler::Shutdown();
audio_pref_handler_ = nullptr;
chromeos::CrasAudioClient::Shutdown();
}
@@ -307,17 +310,17 @@ class AudioManagerTest : public ::testing::Test {
void SetUpCrasAudioHandlerWithTestingNodes(const AudioNodeList& audio_nodes) {
chromeos::CrasAudioClient::InitializeFake();
chromeos::FakeCrasAudioClient::Get()->SetAudioNodesForTesting(audio_nodes);
- audio_pref_handler_ = new chromeos::AudioDevicesPrefHandlerStub();
- chromeos::CrasAudioHandler::Initialize(
+ audio_pref_handler_ = new ash::AudioDevicesPrefHandlerStub();
+ CrasAudioHandler::Initialize(
/*media_controller_manager*/ mojo::NullRemote(), audio_pref_handler_);
- cras_audio_handler_ = chromeos::CrasAudioHandler::Get();
+ cras_audio_handler_ = CrasAudioHandler::Get();
base::RunLoop().RunUntilIdle();
}
void SetActiveOutputNode(uint64_t node_id) {
cras_audio_handler_->SwitchToDevice(
*cras_audio_handler_->GetDeviceFromId(node_id), true /* notify */,
- chromeos::CrasAudioHandler::ACTIVATE_BY_USER /* activate_by */);
+ CrasAudioHandler::ACTIVATE_BY_USER /* activate_by */);
}
AudioParameters GetPreferredOutputStreamParameters(
@@ -334,7 +337,7 @@ class AudioManagerTest : public ::testing::Test {
AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
limits::kMaxAudioBufferSize));
}
-#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
protected:
AudioManagerTest() {
@@ -378,7 +381,7 @@ class AudioManagerTest : public ::testing::Test {
}
}
-#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
// Helper method for (USE_CRAS) which verifies that the device list starts
// with a valid default record followed by physical device names.
static void CheckDeviceDescriptionsCras(
@@ -434,7 +437,7 @@ class AudioManagerTest : public ::testing::Test {
EXPECT_NE(it, device_descriptions.end());
return it->group_id;
}
-#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
bool InputDevicesAvailable() {
#if defined(OS_MAC) && defined(ARCH_CPU_ARM64)
@@ -475,13 +478,13 @@ class AudioManagerTest : public ::testing::Test {
std::unique_ptr<AudioManager> audio_manager_;
std::unique_ptr<AudioDeviceInfoAccessorForTests> device_info_accessor_;
-#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
- chromeos::CrasAudioHandler* cras_audio_handler_ = nullptr; // Not owned.
- scoped_refptr<chromeos::AudioDevicesPrefHandlerStub> audio_pref_handler_;
-#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
+ CrasAudioHandler* cras_audio_handler_ = nullptr; // Not owned.
+ scoped_refptr<ash::AudioDevicesPrefHandlerStub> audio_pref_handler_;
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
};
-#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
TEST_F(AudioManagerTest, EnumerateInputDevicesCras) {
// Setup the devices without internal mic, so that it doesn't exist
// beamforming capable mic.
@@ -646,9 +649,9 @@ TEST_F(AudioManagerTest, LookupDefaultInputDeviceWithProperGroupId) {
CheckDeviceDescriptions(device_descriptions);
// Set internal microphone as active.
- chromeos::AudioDevice internal_microphone(kInternalMic);
- cras_audio_handler_->SwitchToDevice(
- internal_microphone, true, chromeos::CrasAudioHandler::ACTIVATE_BY_USER);
+ ash::AudioDevice internal_microphone(kInternalMic);
+ cras_audio_handler_->SwitchToDevice(internal_microphone, true,
+ CrasAudioHandler::ACTIVATE_BY_USER);
auto new_default_device_id = device_info_accessor_->GetDefaultInputDeviceID();
EXPECT_NE(previous_default_device_id, new_default_device_id);
@@ -691,9 +694,9 @@ TEST_F(AudioManagerTest, LookupDefaultOutputDeviceWithProperGroupId) {
CheckDeviceDescriptions(device_descriptions);
// Set internal speaker as active.
- chromeos::AudioDevice internal_speaker(kInternalSpeaker);
- cras_audio_handler_->SwitchToDevice(
- internal_speaker, true, chromeos::CrasAudioHandler::ACTIVATE_BY_USER);
+ ash::AudioDevice internal_speaker(kInternalSpeaker);
+ cras_audio_handler_->SwitchToDevice(internal_speaker, true,
+ CrasAudioHandler::ACTIVATE_BY_USER);
auto new_default_device_id =
device_info_accessor_->GetDefaultOutputDeviceID();
EXPECT_NE(previous_default_device_id, new_default_device_id);
@@ -706,7 +709,7 @@ TEST_F(AudioManagerTest, LookupDefaultOutputDeviceWithProperGroupId) {
EXPECT_EQ(default_device_group_id, speaker_group_id);
EXPECT_EQ(base::NumberToString(kInternalSpeaker.id), new_default_device_id);
}
-#else // !(defined(USE_CRAS) && BUILDFLAG(IS_ASH))
+#else // !(defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH))
TEST_F(AudioManagerTest, HandleDefaultDeviceIDs) {
// Use a fake manager so we can makeup device ids, this will still use the
@@ -850,7 +853,7 @@ TEST_F(AudioManagerTest, GetAssociatedOutputDeviceID) {
EXPECT_TRUE(found_an_associated_device);
#endif // defined(OS_WIN) || defined(OS_MAC)
}
-#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
class TestAudioManager : public FakeAudioManager {
// For testing the default implementation of GetGroupId(Input|Output)
@@ -1013,7 +1016,7 @@ TEST_F(AudioManagerTest, CheckMinMaxAudioBufferSizeCallbacks) {
#if defined(OS_MAC)
CreateAudioManagerForTesting<AudioManagerMac>();
-#elif defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#elif defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
CreateAudioManagerForTesting<AudioManagerChromeOS>();
#endif
diff --git a/chromium/media/audio/audio_opus_encoder.cc b/chromium/media/audio/audio_opus_encoder.cc
index fbc2ed52411..e46bc3b9b76 100644
--- a/chromium/media/audio/audio_opus_encoder.cc
+++ b/chromium/media/audio/audio_opus_encoder.cc
@@ -8,8 +8,11 @@
#include "base/bind.h"
#include "base/logging.h"
+#include "base/numerics/checked_math.h"
#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
+#include "media/base/audio_timestamp_helper.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/status.h"
#include "media/base/status_codes.h"
@@ -31,36 +34,31 @@ constexpr int kOpusPreferredSamplingRate = 48000;
// reasons.
constexpr int kOpusPreferredBufferDurationMs = 60;
-// The amount of Frames in a 60 ms buffer @ 48000 samples/second.
-constexpr int kOpusPreferredFramesPerBuffer =
- kOpusPreferredSamplingRate * kOpusPreferredBufferDurationMs /
- base::Time::kMillisecondsPerSecond;
-
// Deletes the libopus encoder instance pointed to by |encoder_ptr|.
inline void OpusEncoderDeleter(OpusEncoder* encoder_ptr) {
opus_encoder_destroy(encoder_ptr);
}
-// Adjusts the given input |params| to have a frames-per-buffer value that
-// matches that of the FIFO which buffers the input audio before sending it to
-// the converter.
-AudioParameters AdjustInputParamsForOpus(const AudioParameters& params) {
- auto adjusted_params = params;
- adjusted_params.set_frames_per_buffer(params.sample_rate() *
- kOpusPreferredBufferDurationMs /
- base::Time::kMillisecondsPerSecond);
- return adjusted_params;
+AudioParameters CreateInputParams(const AudioEncoder::Options& options) {
+ const int frames_per_buffer = options.sample_rate *
+ kOpusPreferredBufferDurationMs /
+ base::Time::kMillisecondsPerSecond;
+ AudioParameters result(media::AudioParameters::AUDIO_PCM_LINEAR,
+ media::CHANNEL_LAYOUT_DISCRETE, options.sample_rate,
+ frames_per_buffer);
+ result.set_channels_for_discrete(options.channels);
+ return result;
}
// Creates the audio parameters of the converted audio format that Opus prefers,
// which will be used as the input to the libopus encoder.
-AudioParameters CreateOpusInputParams(const AudioParameters& input_params) {
+AudioParameters CreateOpusCompatibleParams(const AudioParameters& params) {
// third_party/libopus supports up to 2 channels (see implementation of
// opus_encoder_create()): force |converted_params| to at most those.
// Also, the libopus encoder can accept sample rates of 8, 12, 16, 24, and the
// default preferred 48 kHz. If the input sample rate is anything else, we'll
// use 48 kHz.
- const int input_rate = input_params.sample_rate();
+ const int input_rate = params.sample_rate();
const int used_rate = (input_rate == 8000 || input_rate == 12000 ||
input_rate == 16000 || input_rate == 24000)
? input_rate
@@ -68,87 +66,10 @@ AudioParameters CreateOpusInputParams(const AudioParameters& input_params) {
const int frames_per_buffer = used_rate * kOpusPreferredBufferDurationMs /
base::Time::kMillisecondsPerSecond;
- const auto converted_params =
- AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
- GuessChannelLayout(std::min(input_params.channels(), 2)),
- used_rate, frames_per_buffer);
- DVLOG(1) << "|input_params|:" << input_params.AsHumanReadableString()
- << " -->|converted_params|:"
- << converted_params.AsHumanReadableString();
- DCHECK(converted_params.IsValid());
-
- return converted_params;
-}
-
-// Creates and returns the libopus encoder instance. Returns nullptr if the
-// encoder creation fails.
-OwnedOpusEncoder CreateOpusEncoder(
- const AudioEncoder::StatusCB& status_callback,
- const AudioParameters& params,
- int32_t bitrate) {
- int opus_result;
- OwnedOpusEncoder encoder(
- opus_encoder_create(params.sample_rate(), params.channels(),
- OPUS_APPLICATION_AUDIO, &opus_result),
- OpusEncoderDeleter);
-
- if (opus_result < 0) {
- status_callback.Run(Status(
- StatusCode::kEncoderInitializationError,
- base::StringPrintf(
- "Couldn't init Opus encoder: %s, sample rate: %d, channels: %d",
- opus_strerror(opus_result), params.sample_rate(),
- params.channels())));
- }
-
- if (encoder &&
- opus_encoder_ctl(encoder.get(), OPUS_SET_BITRATE(bitrate)) != OPUS_OK) {
- status_callback.Run(
- Status(StatusCode::kEncoderInitializationError,
- base::StringPrintf("Failed to set Opus bitrate: %d", bitrate)));
- encoder.reset();
- }
-
- return encoder;
-}
-
-// Tries to encode |in_data|'s |num_samples| into |out_data|. |out_data| is
-// always resized to |kOpusMaxDataBytes| but only filled with |*out_size| actual
-// encoded data if encoding was successful. Returns true if encoding is
-// successful, in which case |*out_size| is guaranteed to be > 1. Returns false
-// if an error occurs or the packet does not need to be transmitted.
-// |status_callback| will be used to report any errors.
-bool DoEncode(const AudioEncoder::StatusCB& status_callback,
- OpusEncoder* opus_encoder,
- float* in_data,
- int num_samples,
- std::unique_ptr<uint8_t[]>* out_data,
- size_t* out_size) {
- DCHECK(opus_encoder);
- DCHECK(in_data);
- DCHECK(out_data);
- DCHECK(out_size);
- DCHECK_LE(num_samples, kOpusPreferredFramesPerBuffer);
-
- out_data->reset(new uint8_t[kOpusMaxDataBytes]);
- const opus_int32 result = opus_encode_float(
- opus_encoder, in_data, num_samples, out_data->get(), kOpusMaxDataBytes);
-
- if (result > 1) {
- // TODO(ajose): Investigate improving this. http://crbug.com/547918
- *out_size = result;
- return true;
- }
-
- // If |result| in {0,1}, do nothing; the documentation says that a return
- // value of zero or one means the packet does not need to be transmitted.
- // Otherwise, we have an error.
- if (result < 0) {
- status_callback.Run(
- Status(StatusCode::kEncoderFailedEncode, opus_strerror(result)));
- }
-
- return false;
+ AudioParameters result(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ GuessChannelLayout(std::min(params.channels(), 2)),
+ used_rate, frames_per_buffer);
+ return result;
}
// During this object's lifetime, it will use its |audio_bus_| to provide input
@@ -180,83 +101,262 @@ class ScopedConverterInputProvider : public AudioConverter::InputCallback {
} // namespace
-AudioOpusEncoder::AudioOpusEncoder(const AudioParameters& input_params,
- EncodeCB encode_callback,
- StatusCB status_callback,
- int32_t opus_bitrate)
- : AudioEncoder(AdjustInputParamsForOpus(input_params),
- std::move(encode_callback),
- std::move(status_callback)),
- bits_per_second_(opus_bitrate > 0 ? opus_bitrate : OPUS_AUTO),
- converted_params_(CreateOpusInputParams(audio_input_params())),
- converter_(audio_input_params(),
- converted_params_,
- /*disable_fifo=*/false),
- fifo_(base::BindRepeating(&AudioOpusEncoder::OnFifoOutput,
- base::Unretained(this))),
- converted_audio_bus_(
- AudioBus::Create(converted_params_.channels(),
- converted_params_.frames_per_buffer())),
- buffer_(converted_params_.channels() *
- converted_params_.frames_per_buffer()),
- opus_encoder_(CreateOpusEncoder(this->status_callback(),
- converted_params_,
- bits_per_second_)) {
- converter_.PrimeWithSilence();
- fifo_.Reset(converter_.GetMaxInputFramesRequested(
+AudioOpusEncoder::AudioOpusEncoder()
+ : opus_encoder_(nullptr, OpusEncoderDeleter) {}
+
+void AudioOpusEncoder::Initialize(const Options& options,
+ OutputCB output_callback,
+ StatusCB done_cb) {
+ DCHECK(!output_callback.is_null());
+ DCHECK(!done_cb.is_null());
+
+ done_cb = BindToCurrentLoop(std::move(done_cb));
+ if (opus_encoder_) {
+ std::move(done_cb).Run(StatusCode::kEncoderInitializeTwice);
+ return;
+ }
+
+ options_ = options;
+ input_params_ = CreateInputParams(options);
+ if (!input_params_.IsValid()) {
+ std::move(done_cb).Run(StatusCode::kEncoderInitializationError);
+ return;
+ }
+
+ converted_params_ = CreateOpusCompatibleParams(input_params_);
+ if (!input_params_.IsValid()) {
+ std::move(done_cb).Run(StatusCode::kEncoderInitializationError);
+ return;
+ }
+
+ converter_ =
+ std::make_unique<AudioConverter>(input_params_, converted_params_,
+ /*disable_fifo=*/false);
+ fifo_ = std::make_unique<AudioPushFifo>(base::BindRepeating(
+ &AudioOpusEncoder::OnFifoOutput, base::Unretained(this)));
+ converted_audio_bus_ = AudioBus::Create(
+ converted_params_.channels(), converted_params_.frames_per_buffer());
+ buffer_.resize(converted_params_.channels() *
+ converted_params_.frames_per_buffer());
+ auto status_or_encoder = CreateOpusEncoder();
+ if (status_or_encoder.has_error()) {
+ std::move(done_cb).Run(std::move(status_or_encoder).error());
+ return;
+ }
+
+ opus_encoder_ = std::move(status_or_encoder).value();
+ converter_->PrimeWithSilence();
+ fifo_->Reset(converter_->GetMaxInputFramesRequested(
converted_params_.frames_per_buffer()));
+
+ output_cb_ = BindToCurrentLoop(std::move(output_callback));
+ std::move(done_cb).Run(OkStatus());
}
-AudioOpusEncoder::~AudioOpusEncoder() {
- DCHECK_EQ(fifo_.queued_frames(), 0)
- << "Must flush the encoder before destroying to avoid dropping frames.";
+AudioOpusEncoder::~AudioOpusEncoder() = default;
+
+AudioOpusEncoder::CodecDescription AudioOpusEncoder::PrepareExtraData() {
+ CodecDescription extra_data;
+ // RFC #7845 Ogg Encapsulation for the Opus Audio Codec
+ // https://tools.ietf.org/html/rfc7845
+ static const uint8_t kExtraDataTemplate[19] = {
+ 'O', 'p', 'u', 's', 'H', 'e', 'a', 'd',
+ 1, // offset 8, version, always 1
+ 0, // offset 9, channel count
+ 0, 0, // offset 10, pre-skip
+ 0, 0, 0, 0, // offset 12, original input sample rate in Hz
+ 0, 0, 0};
+
+ extra_data.assign(kExtraDataTemplate,
+ kExtraDataTemplate + sizeof(kExtraDataTemplate));
+
+ // Save number of channels
+ base::CheckedNumeric<uint8_t> channels(converted_params_.channels());
+ if (channels.IsValid())
+ extra_data.data()[9] = channels.ValueOrDie();
+
+ // Number of samples to skip from the start of the decoder's output.
+ // Real data begins this many samples late. These samples need to be skipped
+ // only at the very beginning of the audio stream, NOT at beginning of each
+ // decoded output.
+ if (opus_encoder_) {
+ int32_t samples_to_skip = 0;
+
+ opus_encoder_ctl(opus_encoder_.get(), OPUS_GET_LOOKAHEAD(&samples_to_skip));
+ base::CheckedNumeric<uint16_t> samples_to_skip_safe = samples_to_skip;
+ if (samples_to_skip_safe.IsValid())
+ *reinterpret_cast<uint16_t*>(extra_data.data() + 10) =
+ samples_to_skip_safe.ValueOrDie();
+ }
+
+ // Save original sample rate
+ base::CheckedNumeric<uint16_t> sample_rate = input_params_.sample_rate();
+ uint16_t* sample_rate_ptr =
+ reinterpret_cast<uint16_t*>(extra_data.data() + 12);
+ if (sample_rate.IsValid())
+ *sample_rate_ptr = sample_rate.ValueOrDie();
+ else
+ *sample_rate_ptr = uint16_t{kOpusPreferredSamplingRate};
+ return extra_data;
}
-void AudioOpusEncoder::EncodeAudioImpl(const AudioBus& audio_bus,
- base::TimeTicks capture_time) {
- // Initializing the opus encoder may have failed.
- if (!opus_encoder_)
+void AudioOpusEncoder::Encode(std::unique_ptr<AudioBus> audio_bus,
+ base::TimeTicks capture_time,
+ StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_EQ(audio_bus->channels(), input_params_.channels());
+ DCHECK(!capture_time.is_null());
+ DCHECK(!done_cb.is_null());
+
+ current_done_cb_ = BindToCurrentLoop(std::move(done_cb));
+ if (!opus_encoder_) {
+ std::move(current_done_cb_)
+ .Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
+ }
+
+ DLOG_IF(ERROR, !last_capture_time_.is_null() &&
+ ((capture_time - last_capture_time_).InSecondsF() >
+ 1.5f * audio_bus->frames() / input_params_.sample_rate()))
+ << "Possibly frames were skipped, which may result in inaccurate "
+ "timestamp calculation.";
+
+ last_capture_time_ = capture_time;
+
+ // If |capture_time| - |audio_bus|'s duration is not equal to the expected
+ // timestamp for the next audio sample. It means there is a gap/overlap,
+ // if it's big enough we flash and start anew, otherwise we ignore it.
+ auto capture_ts = ComputeTimestamp(audio_bus->frames(), capture_time);
+ auto existing_buffer_duration = AudioTimestampHelper::FramesToTime(
+ fifo_->queued_frames(), input_params_.sample_rate());
+ auto end_of_existing_buffer_ts = next_timestamp_ + existing_buffer_duration;
+ base::TimeDelta gap = (capture_ts - end_of_existing_buffer_ts).magnitude();
+ constexpr base::TimeDelta max_gap = base::TimeDelta::FromMilliseconds(1);
+ if (gap > max_gap) {
+ DLOG(ERROR) << "Large gap in sound. Forced flush."
+ << " Gap/overlap duration: " << gap
+ << " capture_ts: " << capture_ts
+ << " next_timestamp_: " << next_timestamp_
+ << " existing_buffer_duration: " << existing_buffer_duration
+ << " end_of_existing_buffer_ts: " << end_of_existing_buffer_ts;
+ FlushInternal();
+ next_timestamp_ = capture_ts;
+ }
// The |fifo_| won't trigger OnFifoOutput() until we have enough frames
// suitable for the converter.
- fifo_.Push(audio_bus);
+ fifo_->Push(*audio_bus);
+ if (!current_done_cb_.is_null()) {
+ // Is |current_done_cb_| is null, it means OnFifoOutput() has already
+ // reported an error.
+ std::move(current_done_cb_).Run(OkStatus());
+ }
}
-void AudioOpusEncoder::FlushImpl() {
- // Initializing the opus encoder may have failed.
- if (!opus_encoder_)
+void AudioOpusEncoder::Flush(StatusCB done_cb) {
+ DCHECK(!done_cb.is_null());
+
+ done_cb = BindToCurrentLoop(std::move(done_cb));
+ if (!opus_encoder_) {
+ std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
+ }
+ current_done_cb_ = BindToCurrentLoop(std::move(done_cb));
+ FlushInternal();
+ if (!current_done_cb_.is_null()) {
+ // Is |current_done_cb_| is null, it means OnFifoOutput() has already
+ // reported an error.
+ std::move(current_done_cb_).Run(OkStatus());
+ }
+}
+
+void AudioOpusEncoder::FlushInternal() {
// This is needed to correctly compute the timestamp, since the number of
// frames of |output_bus| provided to OnFifoOutput() will always be equal to
// the full frames_per_buffer(), as the fifo's Flush() will pad the remaining
// empty frames with zeros.
- number_of_flushed_frames_ = fifo_.queued_frames();
- fifo_.Flush();
- number_of_flushed_frames_ = base::nullopt;
+ number_of_flushed_frames_ = fifo_->queued_frames();
+ fifo_->Flush();
+ number_of_flushed_frames_.reset();
}
void AudioOpusEncoder::OnFifoOutput(const AudioBus& output_bus,
int frame_delay) {
// Provides input to the converter from |output_bus| within this scope only.
- ScopedConverterInputProvider provider(&converter_, &output_bus);
- converter_.Convert(converted_audio_bus_.get());
+ ScopedConverterInputProvider provider(converter_.get(), &output_bus);
+ converter_->Convert(converted_audio_bus_.get());
converted_audio_bus_->ToInterleaved<Float32SampleTypeTraits>(
converted_audio_bus_->frames(), buffer_.data());
- std::unique_ptr<uint8_t[]> encoded_data;
- size_t encoded_data_size;
- if (DoEncode(status_callback(), opus_encoder_.get(), buffer_.data(),
- converted_params_.frames_per_buffer(), &encoded_data,
- &encoded_data_size)) {
- DCHECK_GT(encoded_data_size, 1u);
- encode_callback().Run(EncodedAudioBuffer(
- converted_params_, std::move(encoded_data), encoded_data_size,
- ComputeTimestamp(
- number_of_flushed_frames_.value_or(output_bus.frames()),
- last_capture_time())));
+ std::unique_ptr<uint8_t[]> encoded_data(new uint8_t[kOpusMaxDataBytes]);
+ auto result = opus_encode_float(opus_encoder_.get(), buffer_.data(),
+ converted_params_.frames_per_buffer(),
+ encoded_data.get(), kOpusMaxDataBytes);
+
+ if (result < 0 && !current_done_cb_.is_null()) {
+ std::move(current_done_cb_)
+ .Run(Status(StatusCode::kEncoderFailedEncode, opus_strerror(result)));
+ return;
+ }
+
+ auto encoded_duration = AudioTimestampHelper::FramesToTime(
+ number_of_flushed_frames_.value_or(output_bus.frames()),
+ input_params_.sample_rate());
+
+ size_t encoded_data_size = result;
+ // If |result| in {0,1}, do nothing; the documentation says that a return
+ // value of zero or one means the packet does not need to be transmitted.
+ if (encoded_data_size > 1) {
+ base::Optional<CodecDescription> desc;
+ if (need_to_emit_extra_data_) {
+ desc = PrepareExtraData();
+ need_to_emit_extra_data_ = false;
+ }
+ output_cb_.Run(
+ EncodedAudioBuffer(converted_params_, std::move(encoded_data),
+ encoded_data_size, next_timestamp_),
+ desc);
+ }
+ next_timestamp_ += encoded_duration;
+}
+
+base::TimeTicks AudioOpusEncoder::ComputeTimestamp(
+ int num_frames,
+ base::TimeTicks capture_time) const {
+ return capture_time - AudioTimestampHelper::FramesToTime(
+ num_frames, input_params_.sample_rate());
+}
+
+// Creates and returns the libopus encoder instance. Returns nullptr if the
+// encoder creation fails.
+StatusOr<OwnedOpusEncoder> AudioOpusEncoder::CreateOpusEncoder() {
+ int opus_result;
+ OwnedOpusEncoder encoder(
+ opus_encoder_create(converted_params_.sample_rate(),
+ converted_params_.channels(), OPUS_APPLICATION_AUDIO,
+ &opus_result),
+ OpusEncoderDeleter);
+
+ if (opus_result < 0) {
+ return Status(
+ StatusCode::kEncoderInitializationError,
+ base::StringPrintf(
+ "Couldn't init Opus encoder: %s, sample rate: %d, channels: %d",
+ opus_strerror(opus_result), converted_params_.sample_rate(),
+ converted_params_.channels()));
+ }
+
+ int bitrate =
+ options_.bitrate.has_value() ? options_.bitrate.value() : OPUS_AUTO;
+ if (encoder &&
+ opus_encoder_ctl(encoder.get(), OPUS_SET_BITRATE(bitrate)) != OPUS_OK) {
+ return Status(
+ StatusCode::kEncoderInitializationError,
+ base::StringPrintf("Failed to set Opus bitrate: %d", bitrate));
}
+
+ return encoder;
}
} // namespace media
diff --git a/chromium/media/audio/audio_opus_encoder.h b/chromium/media/audio/audio_opus_encoder.h
index 42983c9862f..4b82020dc1d 100644
--- a/chromium/media/audio/audio_opus_encoder.h
+++ b/chromium/media/audio/audio_opus_encoder.h
@@ -25,31 +25,40 @@ using OwnedOpusEncoder = std::unique_ptr<OpusEncoder, OpusEncoderDeleterType>;
// instance to do the actual encoding.
class MEDIA_EXPORT AudioOpusEncoder : public AudioEncoder {
public:
- AudioOpusEncoder(const AudioParameters& input_params,
- EncodeCB encode_callback,
- StatusCB status_callback,
- int32_t opus_bitrate);
+ AudioOpusEncoder();
AudioOpusEncoder(const AudioOpusEncoder&) = delete;
AudioOpusEncoder& operator=(const AudioOpusEncoder&) = delete;
~AudioOpusEncoder() override;
- protected:
// AudioEncoder:
- void EncodeAudioImpl(const AudioBus& audio_bus,
- base::TimeTicks capture_time) override;
- void FlushImpl() override;
+ void Initialize(const Options& options,
+ OutputCB output_callback,
+ StatusCB done_cb) override;
+
+ void Encode(std::unique_ptr<AudioBus> audio_bus,
+ base::TimeTicks capture_time,
+ StatusCB done_cb) override;
+
+ void Flush(StatusCB done_cb) override;
private:
// Called synchronously by |fifo_| once enough audio frames have been
- // buffered.
+ // buffered. Calls libopus to do actual encoding.
void OnFifoOutput(const AudioBus& output_bus, int frame_delay);
- // Target bitrate for Opus. If 0, Opus-provided automatic bitrate is used.
- // Note: As of 2013-10-31, the encoder in "auto bitrate" mode would use a
- // variable bitrate up to 102 kbps for 2-channel, 48 kHz audio and a 10 ms
- // buffer duration. The Opus library authors may, of course, adjust this in
- // later versions.
- const int32_t bits_per_second_;
+ void FlushInternal();
+
+ CodecDescription PrepareExtraData();
+
+ StatusOr<OwnedOpusEncoder> CreateOpusEncoder();
+
+ // Computes the timestamp of an AudioBus which has |num_frames| and was
+ // captured at |capture_time|. This timestamp is the capture time of the first
+ // sample in that AudioBus.
+ base::TimeTicks ComputeTimestamp(int num_frames,
+ base::TimeTicks capture_time) const;
+
+ AudioParameters input_params_;
// Output parameters after audio conversion. This may differ from the input
// params in the number of channels, sample rate, and the frames per buffer.
@@ -57,11 +66,11 @@ class MEDIA_EXPORT AudioOpusEncoder : public AudioEncoder {
AudioParameters converted_params_;
// Sample rate adapter from the input audio to what OpusEncoder desires.
- AudioConverter converter_;
+ std::unique_ptr<AudioConverter> converter_;
// Buffer for holding the original input audio before it goes to the
// converter.
- AudioPushFifo fifo_;
+ std::unique_ptr<AudioPushFifo> fifo_;
// This is the destination AudioBus where the |converter_| teh audio into.
std::unique_ptr<AudioBus> converted_audio_bus_;
@@ -73,10 +82,24 @@ class MEDIA_EXPORT AudioOpusEncoder : public AudioEncoder {
// encoder fails.
OwnedOpusEncoder opus_encoder_;
+ // The capture time of the most recent |audio_bus| delivered to
+ // EncodeAudio().
+ base::TimeTicks last_capture_time_;
+
// If FlushImpl() was called while |fifo_| has some frames but not full yet,
// this will be the number of flushed frames, which is used to compute the
// timestamp provided in the output |EncodedAudioBuffer|.
base::Optional<int> number_of_flushed_frames_;
+
+ // Timestamp that should be reported by the next call of |output_callback_|
+ base::TimeTicks next_timestamp_;
+
+ // Callback for reporting completion and status of the current Flush() or
+ // Encoder()
+ StatusCB current_done_cb_;
+
+ // True if the next output needs to have extra_data in it, only happens once.
+ bool need_to_emit_extra_data_ = true;
};
} // namespace media
diff --git a/chromium/media/audio/audio_output_device.cc b/chromium/media/audio/audio_output_device.cc
index d18408f0002..e7ff354365e 100644
--- a/chromium/media/audio/audio_output_device.cc
+++ b/chromium/media/audio/audio_output_device.cc
@@ -63,7 +63,12 @@ void AudioOutputDevice::InitializeOnIOThread(const AudioParameters& params,
DCHECK(params.IsValid());
DVLOG(1) << __func__ << ": " << params.AsHumanReadableString();
audio_parameters_ = params;
- callback_ = callback;
+
+ base::AutoLock auto_lock(audio_thread_lock_);
+ // If Stop() has already been called, RenderCallback has already been
+ // destroyed. So |callback| would be a dangling pointer.
+ if (!stopping_hack_)
+ callback_ = callback;
}
AudioOutputDevice::~AudioOutputDevice() {
@@ -206,7 +211,13 @@ void AudioOutputDevice::RequestDeviceAuthorizationOnIOThread() {
void AudioOutputDevice::CreateStreamOnIOThread() {
TRACE_EVENT0("audio", "AudioOutputDevice::Create");
DCHECK(io_task_runner_->BelongsToCurrentThread());
- DCHECK(callback_) << "Initialize hasn't been called";
+#if DCHECK_IS_ON()
+ {
+ base::AutoLock auto_lock(audio_thread_lock_);
+ if (!stopping_hack_)
+ DCHECK(callback_) << "Initialize hasn't been called";
+ }
+#endif
DCHECK_NE(state_, STREAM_CREATION_REQUESTED);
if (!ipc_) {
@@ -368,7 +379,7 @@ void AudioOutputDevice::OnStreamCreated(
base::UnsafeSharedMemoryRegion shared_memory_region,
base::SyncSocket::ScopedHandle socket_handle,
bool playing_automatically) {
- TRACE_EVENT0("audio", "AudioOutputDevice::OnStreamCreated")
+ TRACE_EVENT0("audio", "AudioOutputDevice::OnStreamCreated");
DCHECK(io_task_runner_->BelongsToCurrentThread());
DCHECK(shared_memory_region.IsValid());
diff --git a/chromium/media/audio/audio_pcm_encoder.cc b/chromium/media/audio/audio_pcm_encoder.cc
deleted file mode 100644
index a3722ef103b..00000000000
--- a/chromium/media/audio/audio_pcm_encoder.cc
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/audio/audio_pcm_encoder.h"
-
-#include <utility>
-
-namespace media {
-
-AudioPcmEncoder::AudioPcmEncoder(const AudioParameters& input_params,
- EncodeCB encode_callback,
- StatusCB status_callback)
- : AudioEncoder(input_params,
- std::move(encode_callback),
- std::move(status_callback)) {}
-
-void AudioPcmEncoder::EncodeAudioImpl(const AudioBus& audio_bus,
- base::TimeTicks capture_time) {
- const size_t size = audio_bus.frames() * audio_bus.channels() * sizeof(float);
- std::unique_ptr<uint8_t[]> encoded_data(new uint8_t[size]);
- audio_bus.ToInterleaved<Float32SampleTypeTraits>(
- audio_bus.frames(), reinterpret_cast<float*>(encoded_data.get()));
-
- encode_callback().Run(
- EncodedAudioBuffer(audio_input_params(), std::move(encoded_data), size,
- ComputeTimestamp(audio_bus.frames(), capture_time)));
-}
-
-void AudioPcmEncoder::FlushImpl() {
- // No buffering is done here, so do nothing.
-}
-
-} // namespace media
diff --git a/chromium/media/audio/audio_pcm_encoder.h b/chromium/media/audio/audio_pcm_encoder.h
deleted file mode 100644
index 7bd0da1dc39..00000000000
--- a/chromium/media/audio/audio_pcm_encoder.h
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_AUDIO_AUDIO_PCM_ENCODER_H_
-#define MEDIA_AUDIO_AUDIO_PCM_ENCODER_H_
-
-#include "media/base/audio_encoder.h"
-
-namespace media {
-
-// Defines a PCM encoder, which just passes back the raw uncompressed signed
-// 16-bit linear audio data.
-class MEDIA_EXPORT AudioPcmEncoder : public AudioEncoder {
- public:
- AudioPcmEncoder(const AudioParameters& input_params,
- EncodeCB encode_callback,
- StatusCB status_callback);
- AudioPcmEncoder(const AudioPcmEncoder&) = delete;
- AudioPcmEncoder& operator=(const AudioPcmEncoder&) = delete;
- ~AudioPcmEncoder() override = default;
-
- protected:
- // AudioEncoder:
- void EncodeAudioImpl(const AudioBus& audio_bus,
- base::TimeTicks capture_time) override;
- void FlushImpl() override;
-};
-
-} // namespace media
-
-#endif // MEDIA_AUDIO_AUDIO_PCM_ENCODER_H_
diff --git a/chromium/media/audio/audio_system_test_util.cc b/chromium/media/audio/audio_system_test_util.cc
index b408239c195..dae244c0253 100644
--- a/chromium/media/audio/audio_system_test_util.cc
+++ b/chromium/media/audio/audio_system_test_util.cc
@@ -145,4 +145,7 @@ void AudioSystemCallbackExpectations::OnDeviceId(
std::move(on_cb_received).Run();
}
+// This suite is instantiated in binaries that use //media:test_support.
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(AudioSystemTestTemplate);
+
} // namespace media
diff --git a/chromium/media/audio/audio_thread_impl.cc b/chromium/media/audio/audio_thread_impl.cc
index 957cd092617..db3d4e84b7b 100644
--- a/chromium/media/audio/audio_thread_impl.cc
+++ b/chromium/media/audio/audio_thread_impl.cc
@@ -55,7 +55,7 @@ void AudioThreadImpl::Stop() {
// Note that on MACOSX, we can still have tasks posted on the |task_runner_|,
// since it is the main thread task runner and we do not stop the main thread.
- // But this is fine becuase none of those tasks will actually run.
+ // But this is fine because none of those tasks will actually run.
thread_.Stop();
}
diff --git a/chromium/media/audio/cras/audio_manager_chromeos.cc b/chromium/media/audio/cras/audio_manager_chromeos.cc
index cef4e0a3b61..15d36844128 100644
--- a/chromium/media/audio/cras/audio_manager_chromeos.cc
+++ b/chromium/media/audio/cras/audio_manager_chromeos.cc
@@ -10,6 +10,8 @@
#include <map>
#include <utility>
+#include "ash/components/audio/audio_device.h"
+#include "ash/components/audio/cras_audio_handler.h"
#include "base/bind.h"
#include "base/check_op.h"
#include "base/command_line.h"
@@ -21,8 +23,6 @@
#include "base/synchronization/waitable_event.h"
#include "base/system/sys_info.h"
#include "base/threading/thread_task_runner_handle.h"
-#include "chromeos/audio/audio_device.h"
-#include "chromeos/audio/cras_audio_handler.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/audio_features.h"
#include "media/audio/cras/cras_input.h"
@@ -34,6 +34,10 @@
namespace media {
namespace {
+using ::ash::AudioDevice;
+using ::ash::AudioDeviceList;
+using ::ash::CrasAudioHandler;
+
// Default sample rate for input and output streams.
const int kDefaultSampleRate = 48000;
@@ -54,18 +58,18 @@ enum CrosBeamformingDeviceState {
BEAMFORMING_STATE_MAX = BEAMFORMING_USER_DISABLED
};
-bool HasKeyboardMic(const chromeos::AudioDeviceList& devices) {
+bool HasKeyboardMic(const AudioDeviceList& devices) {
for (const auto& device : devices) {
- if (device.is_input && device.type == chromeos::AUDIO_TYPE_KEYBOARD_MIC) {
+ if (device.is_input &&
+ device.type == chromeos::AudioDeviceType::kKeyboardMic) {
return true;
}
}
return false;
}
-const chromeos::AudioDevice* GetDeviceFromId(
- const chromeos::AudioDeviceList& devices,
- uint64_t id) {
+const AudioDevice* GetDeviceFromId(const AudioDeviceList& devices,
+ uint64_t id) {
for (const auto& device : devices) {
if (device.id == id) {
return &device;
@@ -77,14 +81,16 @@ const chromeos::AudioDevice* GetDeviceFromId(
// Process |device_list| that two shares the same dev_index by creating a
// virtual device name for them.
void ProcessVirtualDeviceName(AudioDeviceNames* device_names,
- const chromeos::AudioDeviceList& device_list) {
+ const AudioDeviceList& device_list) {
DCHECK_EQ(2U, device_list.size());
- if (device_list[0].type == chromeos::AUDIO_TYPE_LINEOUT ||
- device_list[1].type == chromeos::AUDIO_TYPE_LINEOUT) {
+ if (device_list[0].type == chromeos::AudioDeviceType::kLineout ||
+ device_list[1].type == chromeos::AudioDeviceType::kLineout) {
device_names->emplace_back(kHeadphoneLineOutVirtualDevice,
base::NumberToString(device_list[0].id));
- } else if (device_list[0].type == chromeos::AUDIO_TYPE_INTERNAL_SPEAKER ||
- device_list[1].type == chromeos::AUDIO_TYPE_INTERNAL_SPEAKER) {
+ } else if (device_list[0].type ==
+ chromeos::AudioDeviceType::kInternalSpeaker ||
+ device_list[1].type ==
+ chromeos::AudioDeviceType::kInternalSpeaker) {
device_names->emplace_back(kInternalOutputVirtualDevice,
base::NumberToString(device_list[0].id));
} else {
@@ -101,7 +107,7 @@ bool AudioManagerChromeOS::HasAudioOutputDevices() {
}
bool AudioManagerChromeOS::HasAudioInputDevices() {
- chromeos::AudioDeviceList devices;
+ AudioDeviceList devices;
GetAudioDevices(&devices);
for (size_t i = 0; i < devices.size(); ++i) {
if (devices[i].is_input && devices[i].is_for_simple_usage())
@@ -130,11 +136,11 @@ void AudioManagerChromeOS::GetAudioDeviceNamesImpl(
device_names->push_back(AudioDeviceName::CreateDefault());
- chromeos::AudioDeviceList devices;
+ AudioDeviceList devices;
GetAudioDevices(&devices);
// |dev_idx_map| is a map of dev_index and their audio devices.
- std::map<int, chromeos::AudioDeviceList> dev_idx_map;
+ std::map<int, AudioDeviceList> dev_idx_map;
for (const auto& device : devices) {
if (device.is_input != is_input || !device.is_for_simple_usage())
continue;
@@ -144,7 +150,7 @@ void AudioManagerChromeOS::GetAudioDeviceNamesImpl(
for (const auto& item : dev_idx_map) {
if (1 == item.second.size()) {
- const chromeos::AudioDevice& device = item.second.front();
+ const AudioDevice& device = item.second.front();
device_names->emplace_back(device.display_name,
base::NumberToString(device.id));
} else {
@@ -180,7 +186,7 @@ AudioParameters AudioManagerChromeOS::GetInputStreamParameters(
kDefaultSampleRate, buffer_size,
AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
limits::kMaxAudioBufferSize));
- chromeos::AudioDeviceList devices;
+ AudioDeviceList devices;
GetAudioDevices(&devices);
if (HasKeyboardMic(devices))
params.set_effects(AudioParameters::KEYBOARD_MIC);
@@ -211,7 +217,7 @@ AudioParameters AudioManagerChromeOS::GetInputStreamParameters(
std::string AudioManagerChromeOS::GetAssociatedOutputDeviceID(
const std::string& input_device_id) {
- chromeos::AudioDeviceList devices;
+ AudioDeviceList devices;
GetAudioDevices(&devices);
if (input_device_id == AudioDeviceDescription::kDefaultDeviceId) {
@@ -228,8 +234,7 @@ std::string AudioManagerChromeOS::GetAssociatedOutputDeviceID(
// Now search for an output device with the same device name.
auto output_device_it = std::find_if(
- devices.begin(), devices.end(),
- [device_name](const chromeos::AudioDevice& device) {
+ devices.begin(), devices.end(), [device_name](const AudioDevice& device) {
return !device.is_input && device.device_name == device_name;
});
return output_device_it == devices.end()
@@ -249,7 +254,7 @@ std::string AudioManagerChromeOS::GetDefaultOutputDeviceID() {
std::string AudioManagerChromeOS::GetGroupIDOutput(
const std::string& output_device_id) {
- chromeos::AudioDeviceList devices;
+ AudioDeviceList devices;
GetAudioDevices(&devices);
return GetHardwareDeviceFromDeviceId(devices, false, output_device_id);
@@ -257,7 +262,7 @@ std::string AudioManagerChromeOS::GetGroupIDOutput(
std::string AudioManagerChromeOS::GetGroupIDInput(
const std::string& input_device_id) {
- chromeos::AudioDeviceList devices;
+ AudioDeviceList devices;
GetAudioDevices(&devices);
return GetHardwareDeviceFromDeviceId(devices, true, input_device_id);
@@ -313,7 +318,7 @@ bool AudioManagerChromeOS::GetSystemAecSupportedPerBoard() {
int32_t AudioManagerChromeOS::GetSystemAecGroupIdPerBoard() {
DCHECK(GetTaskRunner()->BelongsToCurrentThread());
- int32_t group_id = chromeos::CrasAudioHandler::kSystemAecGroupIdNotAvailable;
+ int32_t group_id = CrasAudioHandler::kSystemAecGroupIdNotAvailable;
base::WaitableEvent event(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
if (main_task_runner_->BelongsToCurrentThread()) {
@@ -366,10 +371,9 @@ AudioParameters AudioManagerChromeOS::GetPreferredOutputStreamParameters(
}
if (preferred_device_id) {
- chromeos::AudioDeviceList devices;
+ AudioDeviceList devices;
GetAudioDevices(&devices);
- const chromeos::AudioDevice* device =
- GetDeviceFromId(devices, preferred_device_id);
+ const AudioDevice* device = GetDeviceFromId(devices, preferred_device_id);
if (device && device->is_input == false) {
channel_layout =
GuessChannelLayout(static_cast<int>(device->max_supported_channels));
@@ -400,7 +404,7 @@ bool AudioManagerChromeOS::IsDefault(const std::string& device_id,
}
std::string AudioManagerChromeOS::GetHardwareDeviceFromDeviceId(
- const chromeos::AudioDeviceList& devices,
+ const AudioDeviceList& devices,
bool is_input,
const std::string& device_id) {
uint64_t u64_device_id = 0;
@@ -412,12 +416,12 @@ std::string AudioManagerChromeOS::GetHardwareDeviceFromDeviceId(
return "";
}
- const chromeos::AudioDevice* device = GetDeviceFromId(devices, u64_device_id);
+ const AudioDevice* device = GetDeviceFromId(devices, u64_device_id);
return device ? device->device_name : "";
}
-void AudioManagerChromeOS::GetAudioDevices(chromeos::AudioDeviceList* devices) {
+void AudioManagerChromeOS::GetAudioDevices(AudioDeviceList* devices) {
DCHECK(GetTaskRunner()->BelongsToCurrentThread());
base::WaitableEvent event(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
@@ -434,12 +438,12 @@ void AudioManagerChromeOS::GetAudioDevices(chromeos::AudioDeviceList* devices) {
}
void AudioManagerChromeOS::GetAudioDevicesOnMainThread(
- chromeos::AudioDeviceList* devices,
+ AudioDeviceList* devices,
base::WaitableEvent* event) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
// CrasAudioHandler is shut down before AudioManagerChromeOS.
- if (chromeos::CrasAudioHandler::Get())
- chromeos::CrasAudioHandler::Get()->GetAudioDevices(devices);
+ if (CrasAudioHandler::Get())
+ CrasAudioHandler::Get()->GetAudioDevices(devices);
event->Signal();
}
@@ -485,9 +489,9 @@ void AudioManagerChromeOS::GetPrimaryActiveInputNodeOnMainThread(
uint64_t* active_input_node_id,
base::WaitableEvent* event) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- if (chromeos::CrasAudioHandler::Get()) {
+ if (CrasAudioHandler::Get()) {
*active_input_node_id =
- chromeos::CrasAudioHandler::Get()->GetPrimaryActiveInputNode();
+ CrasAudioHandler::Get()->GetPrimaryActiveInputNode();
}
event->Signal();
}
@@ -496,9 +500,9 @@ void AudioManagerChromeOS::GetPrimaryActiveOutputNodeOnMainThread(
uint64_t* active_output_node_id,
base::WaitableEvent* event) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- if (chromeos::CrasAudioHandler::Get()) {
+ if (CrasAudioHandler::Get()) {
*active_output_node_id =
- chromeos::CrasAudioHandler::Get()->GetPrimaryActiveOutputNode();
+ CrasAudioHandler::Get()->GetPrimaryActiveOutputNode();
}
event->Signal();
}
@@ -507,8 +511,8 @@ void AudioManagerChromeOS::GetDefaultOutputBufferSizeOnMainThread(
int32_t* buffer_size,
base::WaitableEvent* event) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- if (chromeos::CrasAudioHandler::Get())
- chromeos::CrasAudioHandler::Get()->GetDefaultOutputBufferSize(buffer_size);
+ if (CrasAudioHandler::Get())
+ CrasAudioHandler::Get()->GetDefaultOutputBufferSize(buffer_size);
event->Signal();
}
@@ -516,9 +520,8 @@ void AudioManagerChromeOS::GetSystemAecSupportedOnMainThread(
bool* system_aec_supported,
base::WaitableEvent* event) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- if (chromeos::CrasAudioHandler::Get()) {
- *system_aec_supported =
- chromeos::CrasAudioHandler::Get()->system_aec_supported();
+ if (CrasAudioHandler::Get()) {
+ *system_aec_supported = CrasAudioHandler::Get()->system_aec_supported();
}
event->Signal();
}
@@ -527,8 +530,8 @@ void AudioManagerChromeOS::GetSystemAecGroupIdOnMainThread(
int32_t* group_id,
base::WaitableEvent* event) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- if (chromeos::CrasAudioHandler::Get())
- *group_id = chromeos::CrasAudioHandler::Get()->system_aec_group_id();
+ if (CrasAudioHandler::Get())
+ *group_id = CrasAudioHandler::Get()->system_aec_group_id();
event->Signal();
}
diff --git a/chromium/media/audio/cras/audio_manager_chromeos.h b/chromium/media/audio/cras/audio_manager_chromeos.h
index 72fd689951c..40da0ad6f82 100644
--- a/chromium/media/audio/cras/audio_manager_chromeos.h
+++ b/chromium/media/audio/cras/audio_manager_chromeos.h
@@ -11,10 +11,10 @@
#include <string>
#include <vector>
+#include "ash/components/audio/audio_device.h"
#include "base/compiler_specific.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
-#include "chromeos/audio/audio_device.h"
#include "media/audio/audio_manager_base.h"
#include "media/audio/cras/audio_manager_cras_base.h"
@@ -62,13 +62,12 @@ class MEDIA_EXPORT AudioManagerChromeOS : public AudioManagerCrasBase {
void GetAudioDeviceNamesImpl(bool is_input, AudioDeviceNames* device_names);
- std::string GetHardwareDeviceFromDeviceId(
- const chromeos::AudioDeviceList& devices,
- bool is_input,
- const std::string& device_id);
+ std::string GetHardwareDeviceFromDeviceId(const ash::AudioDeviceList& devices,
+ bool is_input,
+ const std::string& device_id);
- void GetAudioDevices(chromeos::AudioDeviceList* devices);
- void GetAudioDevicesOnMainThread(chromeos::AudioDeviceList* devices,
+ void GetAudioDevices(ash::AudioDeviceList* devices);
+ void GetAudioDevicesOnMainThread(ash::AudioDeviceList* devices,
base::WaitableEvent* event);
uint64_t GetPrimaryActiveInputNode();
uint64_t GetPrimaryActiveOutputNode();
diff --git a/chromium/media/audio/cras/audio_manager_cras.cc b/chromium/media/audio/cras/audio_manager_cras.cc
index a78ad96743e..1f1b15e6e97 100644
--- a/chromium/media/audio/cras/audio_manager_cras.cc
+++ b/chromium/media/audio/cras/audio_manager_cras.cc
@@ -92,6 +92,9 @@ AudioParameters AudioManagerCras::GetInputStreamParameters(
AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
limits::kMaxAudioBufferSize));
+ if (CrasHasKeyboardMic())
+ params.set_effects(AudioParameters::KEYBOARD_MIC);
+
// Allow experimentation with system echo cancellation with all devices,
// but enable it by default on devices that actually support it.
params.set_effects(params.effects() |
@@ -126,6 +129,47 @@ std::string AudioManagerCras::GetDefaultOutputDeviceID() {
return base::NumberToString(GetPrimaryActiveOutputNode());
}
+std::string AudioManagerCras::GetGroupIDInput(const std::string& device_id) {
+ for (const auto& device : CrasGetAudioDevices(DeviceType::kInput)) {
+ if (base::NumberToString(device.id) == device_id ||
+ (AudioDeviceDescription::IsDefaultDevice(device_id) && device.active)) {
+ return device.dev_name;
+ }
+ }
+ return "";
+}
+
+std::string AudioManagerCras::GetGroupIDOutput(const std::string& device_id) {
+ for (const auto& device : CrasGetAudioDevices(DeviceType::kOutput)) {
+ if (base::NumberToString(device.id) == device_id ||
+ (AudioDeviceDescription::IsDefaultDevice(device_id) && device.active)) {
+ return device.dev_name;
+ }
+ }
+ return "";
+}
+
+std::string AudioManagerCras::GetAssociatedOutputDeviceID(
+ const std::string& input_device_id) {
+ if (AudioDeviceDescription::IsDefaultDevice(input_device_id)) {
+ // Note: the default input should not be associated to any output, as this
+ // may lead to accidental uses of a pinned stream.
+ return "";
+ }
+
+ std::string device_name = GetGroupIDInput(input_device_id);
+
+ if (device_name.empty())
+ return "";
+
+ // Now search for an output device with the same device name.
+ for (const auto& device : CrasGetAudioDevices(DeviceType::kOutput)) {
+ if (device.dev_name == device_name)
+ return base::NumberToString(device.id);
+ }
+ return "";
+}
+
AudioParameters AudioManagerCras::GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) {
@@ -144,6 +188,9 @@ AudioParameters AudioManagerCras::GetPreferredOutputStreamParameters(
}
if (!buffer_size) // Not user-provided.
+ buffer_size = CrasGetDefaultOutputBufferSize();
+
+ if (buffer_size <= 0)
buffer_size = kDefaultOutputBufferSize;
return AudioParameters(
@@ -154,17 +201,23 @@ AudioParameters AudioManagerCras::GetPreferredOutputStreamParameters(
}
uint64_t AudioManagerCras::GetPrimaryActiveInputNode() {
- DCHECK(GetTaskRunner()->BelongsToCurrentThread());
+ for (const auto& device : CrasGetAudioDevices(DeviceType::kInput)) {
+ if (device.active)
+ return device.id;
+ }
return 0;
}
uint64_t AudioManagerCras::GetPrimaryActiveOutputNode() {
- DCHECK(GetTaskRunner()->BelongsToCurrentThread());
+ for (const auto& device : CrasGetAudioDevices(DeviceType::kOutput)) {
+ if (device.active)
+ return device.id;
+ }
return 0;
}
bool AudioManagerCras::IsDefault(const std::string& device_id, bool is_input) {
- return device_id == AudioDeviceDescription::kDefaultDeviceId;
+ return AudioDeviceDescription::IsDefaultDevice(device_id);
}
enum CRAS_CLIENT_TYPE AudioManagerCras::GetClientType() {
diff --git a/chromium/media/audio/cras/audio_manager_cras.h b/chromium/media/audio/cras/audio_manager_cras.h
index 3afb730c530..448f0a25312 100644
--- a/chromium/media/audio/cras/audio_manager_cras.h
+++ b/chromium/media/audio/cras/audio_manager_cras.h
@@ -33,6 +33,10 @@ class MEDIA_EXPORT AudioManagerCras : public AudioManagerCrasBase {
const std::string& device_id) override;
std::string GetDefaultInputDeviceID() override;
std::string GetDefaultOutputDeviceID() override;
+ std::string GetGroupIDInput(const std::string& device_id) override;
+ std::string GetGroupIDOutput(const std::string& device_id) override;
+ std::string GetAssociatedOutputDeviceID(
+ const std::string& input_device_id) override;
// AudioManagerCrasBase implementation.
bool IsDefault(const std::string& device_id, bool is_input) override;
diff --git a/chromium/media/audio/cras/cras_input_unittest.cc b/chromium/media/audio/cras/cras_input_unittest.cc
index 9aedd4966fc..04d4deafe09 100644
--- a/chromium/media/audio/cras/cras_input_unittest.cc
+++ b/chromium/media/audio/cras/cras_input_unittest.cc
@@ -7,6 +7,7 @@
#include <memory>
#include <string>
+#include "ash/components/audio/cras_audio_handler.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/synchronization/waitable_event.h"
@@ -14,7 +15,6 @@
#include "base/test/test_timeouts.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
-#include "chromeos/audio/cras_audio_handler.h"
#include "chromeos/dbus/audio/fake_cras_audio_client.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/cras/audio_manager_chromeos.h"
@@ -66,14 +66,14 @@ class CrasInputStreamTest : public testing::Test {
protected:
CrasInputStreamTest() {
chromeos::CrasAudioClient::InitializeFake();
- chromeos::CrasAudioHandler::InitializeForTesting();
+ ash::CrasAudioHandler::InitializeForTesting();
mock_manager_.reset(new StrictMock<MockAudioManagerCrasInput>());
base::RunLoop().RunUntilIdle();
}
~CrasInputStreamTest() override {
mock_manager_->Shutdown();
- chromeos::CrasAudioHandler::Shutdown();
+ ash::CrasAudioHandler::Shutdown();
chromeos::CrasAudioClient::Shutdown();
}
diff --git a/chromium/media/audio/cras/cras_unified_unittest.cc b/chromium/media/audio/cras/cras_unified_unittest.cc
index d6a8ca702bd..cd0c3669fb2 100644
--- a/chromium/media/audio/cras/cras_unified_unittest.cc
+++ b/chromium/media/audio/cras/cras_unified_unittest.cc
@@ -7,6 +7,7 @@
#include <memory>
#include <string>
+#include "ash/components/audio/cras_audio_handler.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/synchronization/waitable_event.h"
@@ -14,7 +15,6 @@
#include "base/test/test_timeouts.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
-#include "chromeos/audio/cras_audio_handler.h"
#include "chromeos/dbus/audio/cras_audio_client.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/cras/audio_manager_chromeos.h"
@@ -62,14 +62,14 @@ class CrasUnifiedStreamTest : public testing::Test {
protected:
CrasUnifiedStreamTest() {
chromeos::CrasAudioClient::InitializeFake();
- chromeos::CrasAudioHandler::InitializeForTesting();
+ ash::CrasAudioHandler::InitializeForTesting();
mock_manager_.reset(new StrictMock<MockAudioManagerCras>());
base::RunLoop().RunUntilIdle();
}
~CrasUnifiedStreamTest() override {
mock_manager_->Shutdown();
- chromeos::CrasAudioHandler::Shutdown();
+ ash::CrasAudioHandler::Shutdown();
chromeos::CrasAudioClient::Shutdown();
}
diff --git a/chromium/media/audio/cras/cras_util.cc b/chromium/media/audio/cras/cras_util.cc
index f17766d5e38..a6a9fa31d70 100644
--- a/chromium/media/audio/cras/cras_util.cc
+++ b/chromium/media/audio/cras/cras_util.cc
@@ -15,6 +15,11 @@ namespace media {
namespace {
+const char kInternalInputVirtualDevice[] = "Built-in mic";
+const char kInternalOutputVirtualDevice[] = "Built-in speaker";
+const char kHeadphoneLineOutVirtualDevice[] = "Headphone/Line Out";
+const char kKeyBoardMic[] = "KEYBOARD_MIC";
+
// Returns if that an input or output audio device is for simple usage like
// playback or recording for user. In contrast, audio device such as loopback,
// always on keyword recognition (HOTWORD), and keyboard mic are not for simple
@@ -63,10 +68,43 @@ CrasDevice::CrasDevice(const cras_ionode_info* node,
DeviceType type)
: type(type) {
id = cras_make_node_id(node->iodev_idx, node->ionode_idx);
+ active = node->active;
name = std::string(node->name);
// If the name of node is not meaningful, use the device name instead.
if (name.empty() || name == "(default)")
name = dev->name;
+ dev_name = dev->name;
+}
+
+// Creates a CrasDevice based on the node list.
+// If there is only one node attached to this device, create it directly.
+// If there are two nodes, create a virtual device instead.
+CrasDevice::CrasDevice(const std::vector<cras_ionode_info>& nodes,
+ const cras_iodev_info* dev,
+ DeviceType type)
+ : CrasDevice(&nodes[0], dev, type) {
+ if (nodes.size() == 1)
+ return;
+
+ if (nodes.size() > 2) {
+ LOG(WARNING) << dev->name << " has more than 2 nodes";
+ return;
+ }
+
+ if (nodes[0].type_enum == CRAS_NODE_TYPE_LINEOUT ||
+ nodes[1].type_enum == CRAS_NODE_TYPE_LINEOUT) {
+ name = kHeadphoneLineOutVirtualDevice;
+ } else if (nodes[0].type_enum == CRAS_NODE_TYPE_INTERNAL_SPEAKER ||
+ nodes[1].type_enum == CRAS_NODE_TYPE_INTERNAL_SPEAKER) {
+ name = kInternalOutputVirtualDevice;
+ } else if (nodes[0].type_enum == CRAS_NODE_TYPE_MIC ||
+ nodes[1].type_enum == CRAS_NODE_TYPE_MIC) {
+ name = kInternalInputVirtualDevice;
+ } else {
+ LOG(WARNING) << "Failed to create virtual device for " << dev->name;
+ }
+
+ active = nodes[0].active || nodes[1].active;
}
std::vector<CrasDevice> CrasGetAudioDevices(DeviceType type) {
@@ -90,24 +128,55 @@ std::vector<CrasDevice> CrasGetAudioDevices(DeviceType type) {
}
if (rc < 0) {
LOG(ERROR) << "Failed to get devices: " << std::strerror(rc);
+ CrasDisconnect(&client);
return devices;
}
- for (size_t i = 0; i < num_nodes; i++) {
- if (!nodes[i].plugged || !IsForSimpleUsage(nodes[i].type_enum))
- continue;
- for (size_t j = 0; j < num_devs; j++) {
- if (nodes[i].iodev_idx == devs[j].idx) {
- devices.emplace_back(&nodes[i], &devs[j], type);
- break;
- }
+ for (size_t i = 0; i < num_devs; i++) {
+ std::vector<cras_ionode_info> dev_nodes;
+ for (size_t j = 0; j < num_nodes; j++) {
+ if (!nodes[j].plugged || !IsForSimpleUsage(nodes[j].type_enum))
+ continue;
+ if (devs[i].idx == nodes[j].iodev_idx)
+ dev_nodes.emplace_back(nodes[j]);
}
+ if (dev_nodes.empty())
+ continue;
+ devices.emplace_back(dev_nodes, &devs[i], type);
}
CrasDisconnect(&client);
return devices;
}
+bool CrasHasKeyboardMic() {
+ cras_client* client = CrasConnect();
+ if (!client)
+ return false;
+
+ struct cras_iodev_info devs[CRAS_MAX_IODEVS];
+ struct cras_ionode_info nodes[CRAS_MAX_IONODES];
+ size_t num_devs = CRAS_MAX_IODEVS, num_nodes = CRAS_MAX_IONODES;
+
+ int rc =
+ cras_client_get_input_devices(client, devs, nodes, &num_devs, &num_nodes);
+ if (rc < 0) {
+ LOG(ERROR) << "Failed to get devices: " << std::strerror(rc);
+ CrasDisconnect(&client);
+ return false;
+ }
+
+ for (size_t i = 0; i < num_nodes; i++) {
+ if (std::string(nodes[i].type) == kKeyBoardMic) {
+ CrasDisconnect(&client);
+ return true;
+ }
+ }
+
+ CrasDisconnect(&client);
+ return false;
+}
+
int CrasGetAecSupported() {
cras_client* client = CrasConnect();
if (!client)
@@ -129,4 +198,15 @@ int CrasGetAecGroupId() {
return rc;
}
+int CrasGetDefaultOutputBufferSize() {
+ cras_client* client = CrasConnect();
+ if (!client)
+ return -1;
+
+ int rc = cras_client_get_default_output_buffer_size(client);
+ CrasDisconnect(&client);
+
+ return rc;
+}
+
} // namespace media
diff --git a/chromium/media/audio/cras/cras_util.h b/chromium/media/audio/cras/cras_util.h
index 99c791bc4d1..60add4e4486 100644
--- a/chromium/media/audio/cras/cras_util.h
+++ b/chromium/media/audio/cras/cras_util.h
@@ -20,14 +20,22 @@ struct CrasDevice {
explicit CrasDevice(const cras_ionode_info* node,
const cras_iodev_info* dev,
DeviceType type);
+ explicit CrasDevice(const std::vector<cras_ionode_info>& nodes,
+ const cras_iodev_info* dev,
+ DeviceType type);
DeviceType type;
uint64_t id;
+ int32_t active;
std::string name;
+ std::string dev_name;
};
// Enumerates all devices of |type|.
std::vector<CrasDevice> CrasGetAudioDevices(DeviceType type);
+// Returns if there is a keyboard mic in CRAS.
+bool CrasHasKeyboardMic();
+
// Returns if system AEC is supported in CRAS.
int CrasGetAecSupported();
@@ -35,6 +43,9 @@ int CrasGetAecSupported();
// returned.
int CrasGetAecGroupId();
+// Returns the default output buffer size.
+int CrasGetDefaultOutputBufferSize();
+
} // namespace media
#endif // MEDIA_AUDIO_CRAS_CRAS_UTIL_H_
diff --git a/chromium/media/audio/linux/audio_manager_linux.cc b/chromium/media/audio/linux/audio_manager_linux.cc
index 84fc4317017..def8da7dc2a 100644
--- a/chromium/media/audio/linux/audio_manager_linux.cc
+++ b/chromium/media/audio/linux/audio_manager_linux.cc
@@ -15,7 +15,7 @@
#include "media/audio/alsa/audio_manager_alsa.h"
#endif
-#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
+#if defined(USE_CRAS) && BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/audio/cras/audio_manager_chromeos.h"
#elif defined(USE_CRAS)
#include "media/audio/cras/audio_manager_cras.h"
@@ -48,7 +48,7 @@ std::unique_ptr<media::AudioManager> CreateAudioManager(
#if defined(USE_CRAS)
if (base::CommandLine::ForCurrentProcess()->HasSwitch(switches::kUseCras)) {
UMA_HISTOGRAM_ENUMERATION("Media.LinuxAudioIO", kCras, kAudioIOMax + 1);
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return std::make_unique<AudioManagerChromeOS>(std::move(audio_thread),
audio_log_factory);
#else
diff --git a/chromium/media/audio/mac/audio_auhal_mac.cc b/chromium/media/audio/mac/audio_auhal_mac.cc
index 3663cab37c8..1e8efd71539 100644
--- a/chromium/media/audio/mac/audio_auhal_mac.cc
+++ b/chromium/media/audio/mac/audio_auhal_mac.cc
@@ -182,6 +182,7 @@ AUHALStream::~AUHALStream() {
DCHECK(thread_checker_.CalledOnValidThread());
CHECK(!audio_unit_);
+ base::AutoLock al(lock_);
ReportAndResetStats();
}
@@ -208,6 +209,20 @@ bool AUHALStream::Open() {
void AUHALStream::Close() {
DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (audio_unit_) {
+ Stop();
+
+ // Clear the render callback to try and prevent any callbacks from coming
+ // in after we've called stop. https://crbug.com/737527.
+ AURenderCallbackStruct callback = {0};
+ auto result = AudioUnitSetProperty(
+ audio_unit_->audio_unit(), kAudioUnitProperty_SetRenderCallback,
+ kAudioUnitScope_Input, AUElement::OUTPUT, &callback, sizeof(callback));
+ OSSTATUS_DLOG_IF(ERROR, result != noErr, result)
+ << "Failed to clear input callback.";
+ }
+
audio_unit_.reset();
// Inform the audio manager that we have been closed. This will cause our
// destruction. Also include the device ID as a signal to the audio manager
@@ -225,6 +240,7 @@ void AUHALStream::Start(AudioSourceCallback* callback) {
}
if (!stopped_) {
+ base::AutoLock al(lock_);
CHECK_EQ(source_, callback);
return;
}
@@ -243,8 +259,12 @@ void AUHALStream::Start(AudioSourceCallback* callback) {
}
stopped_ = false;
- audio_fifo_.reset();
- source_ = callback;
+
+ {
+ base::AutoLock al(lock_);
+ audio_fifo_.reset();
+ source_ = callback;
+ }
OSStatus result = AudioOutputUnitStart(audio_unit_->audio_unit());
if (result == noErr)
@@ -268,10 +288,16 @@ void AUHALStream::Stop() {
OSStatus result = AudioOutputUnitStop(audio_unit_->audio_unit());
OSSTATUS_DLOG_IF(ERROR, result != noErr, result)
<< "AudioOutputUnitStop() failed.";
- if (result != noErr)
- source_->OnError(AudioSourceCallback::ErrorType::kUnknown);
- ReportAndResetStats();
- source_ = nullptr;
+
+ {
+ base::AutoLock al(lock_);
+ if (result != noErr)
+ source_->OnError(AudioSourceCallback::ErrorType::kUnknown);
+
+ ReportAndResetStats();
+ source_ = nullptr;
+ }
+
stopped_ = true;
}
@@ -295,6 +321,13 @@ OSStatus AUHALStream::Render(AudioUnitRenderActionFlags* flags,
TRACE_EVENT2("audio", "AUHALStream::Render", "input buffer size",
number_of_frames_, "output buffer size", number_of_frames);
+ base::AutoLock al(lock_);
+
+ // There's no documentation on what we should return here, but if we're here
+ // something is wrong so just return an AudioUnit error that looks reasonable.
+ if (!source_)
+ return kAudioUnitErr_Uninitialized;
+
UpdatePlayoutTimestamp(output_time_stamp);
// If the stream parameters change for any reason, we need to insert a FIFO
@@ -331,6 +364,7 @@ OSStatus AUHALStream::Render(AudioUnitRenderActionFlags* flags,
}
void AUHALStream::ProvideInput(int frame_delay, AudioBus* dest) {
+ lock_.AssertAcquired();
DCHECK(source_);
const base::TimeTicks playout_time =
@@ -377,6 +411,8 @@ base::TimeTicks AUHALStream::GetPlayoutTime(
}
void AUHALStream::UpdatePlayoutTimestamp(const AudioTimeStamp* timestamp) {
+ lock_.AssertAcquired();
+
if ((timestamp->mFlags & kAudioTimeStampSampleTimeValid) == 0)
return;
@@ -402,6 +438,8 @@ void AUHALStream::UpdatePlayoutTimestamp(const AudioTimeStamp* timestamp) {
}
void AUHALStream::ReportAndResetStats() {
+ lock_.AssertAcquired();
+
if (!last_sample_time_)
return; // No stats gathered to report.
diff --git a/chromium/media/audio/mac/audio_auhal_mac.h b/chromium/media/audio/mac/audio_auhal_mac.h
index faa5ef28916..c71400cce40 100644
--- a/chromium/media/audio/mac/audio_auhal_mac.h
+++ b/chromium/media/audio/mac/audio_auhal_mac.h
@@ -28,6 +28,7 @@
#include "base/cancelable_callback.h"
#include "base/compiler_specific.h"
#include "base/macros.h"
+#include "base/synchronization/lock.h"
#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "media/audio/audio_io.h"
@@ -136,6 +137,9 @@ class AUHALStream : public AudioOutputStream {
const AudioParameters params_;
+ // We may get some callbacks after AudioUnitStop() has been called.
+ base::Lock lock_;
+
// Size of audio buffer requested at construction. The actual buffer size
// is given by |actual_io_buffer_frame_size_| and it can differ from the
// requested size.
@@ -144,10 +148,10 @@ class AUHALStream : public AudioOutputStream {
// Stores the number of frames that we actually get callbacks for.
// This may be different from what we ask for, so we use this for stats in
// order to understand how often this happens and what are the typical values.
- size_t number_of_frames_requested_;
+ size_t number_of_frames_requested_ GUARDED_BY(lock_);
// Pointer to the object that will provide the audio samples.
- AudioSourceCallback* source_;
+ AudioSourceCallback* source_ GUARDED_BY(lock_);
// Holds the stream format details such as bitrate.
AudioStreamBasicDescription output_format_;
@@ -173,7 +177,7 @@ class AUHALStream : public AudioOutputStream {
// Dynamically allocated FIFO used when CoreAudio asks for unexpected frame
// sizes.
- std::unique_ptr<AudioPullFifo> audio_fifo_;
+ std::unique_ptr<AudioPullFifo> audio_fifo_ GUARDED_BY(lock_);
// Current playout time. Set by Render().
base::TimeTicks current_playout_time_;
@@ -191,11 +195,11 @@ class AUHALStream : public AudioOutputStream {
// These variables are only touched on the callback thread and then read
// in the dtor (when no longer receiving callbacks).
// NOTE: Float64 and UInt32 types are used for native API compatibility.
- Float64 last_sample_time_;
- UInt32 last_number_of_frames_;
- UInt32 total_lost_frames_;
- UInt32 largest_glitch_frames_;
- int glitches_detected_;
+ Float64 last_sample_time_ GUARDED_BY(lock_);
+ UInt32 last_number_of_frames_ GUARDED_BY(lock_);
+ UInt32 total_lost_frames_ GUARDED_BY(lock_);
+ UInt32 largest_glitch_frames_ GUARDED_BY(lock_);
+ int glitches_detected_ GUARDED_BY(lock_);
// Used to defer Start() to workaround http://crbug.com/160920.
base::CancelableOnceClosure deferred_start_cb_;
diff --git a/chromium/media/audio/pulse/audio_manager_pulse.cc b/chromium/media/audio/pulse/audio_manager_pulse.cc
index cc02c2c9655..6705e04cb95 100644
--- a/chromium/media/audio/pulse/audio_manager_pulse.cc
+++ b/chromium/media/audio/pulse/audio_manager_pulse.cc
@@ -190,7 +190,7 @@ std::string AudioManagerPulse::GetDefaultOutputDeviceID() {
std::string AudioManagerPulse::GetAssociatedOutputDeviceID(
const std::string& input_device_id) {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return AudioManagerBase::GetAssociatedOutputDeviceID(input_device_id);
#else
DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.cc b/chromium/media/audio/win/audio_low_latency_input_win.cc
index 8aaa71d6a7d..a5e5daf3ce7 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win.cc
@@ -918,7 +918,7 @@ HRESULT WASAPIAudioInputStream::SetCaptureDevice() {
hr =
enumerator->GetDefaultAudioEndpoint(data_flow, role, &endpoint_device_);
} else {
- hr = enumerator->GetDevice(base::UTF8ToUTF16(device_id_).c_str(),
+ hr = enumerator->GetDevice(base::UTF8ToWide(device_id_).c_str(),
&endpoint_device_);
}
if (FAILED(hr)) {
diff --git a/chromium/media/audio/win/core_audio_util_win.cc b/chromium/media/audio/win/core_audio_util_win.cc
index 47597841dc0..0281167b4be 100644
--- a/chromium/media/audio/win/core_audio_util_win.cc
+++ b/chromium/media/audio/win/core_audio_util_win.cc
@@ -477,7 +477,7 @@ ComPtr<IMMDevice> CreateDeviceInternal(const std::string& device_id,
// interface for the *rendering* endpoint device.
hr = device_enum->GetDefaultAudioEndpoint(eRender, role, &endpoint_device);
} else {
- hr = device_enum->GetDevice(base::UTF8ToUTF16(device_id).c_str(),
+ hr = device_enum->GetDevice(base::UTF8ToWide(device_id).c_str(),
&endpoint_device);
}
DVLOG_IF(1, FAILED(hr)) << "Create Device failed: " << std::hex << hr;
@@ -685,7 +685,7 @@ std::string CoreAudioUtil::ErrorToString(HRESULT hresult) {
// method returns the HRESULT minus 0x80040200; otherwise, it returns zero.
return base::StringPrintf("HRESULT: 0x%08lX, WCode: %u, message: \"%s\"",
error.Error(), error.WCode(),
- base::UTF16ToUTF8(error.ErrorMessage()).c_str());
+ base::WideToUTF8(error.ErrorMessage()).c_str());
}
std::string CoreAudioUtil::WaveFormatToString(const WaveFormatWrapper format) {
diff --git a/chromium/media/audio/win/waveout_output_win.cc b/chromium/media/audio/win/waveout_output_win.cc
index d36d8571af0..c2c96fcba62 100644
--- a/chromium/media/audio/win/waveout_output_win.cc
+++ b/chromium/media/audio/win/waveout_output_win.cc
@@ -344,8 +344,10 @@ void PCMWaveOutAudioOutputStream::QueueNextPacket(WAVEHDR *buffer) {
// Note: If this ever changes to output raw float the data must be clipped
// and sanitized since it may come from an untrusted source such as NaCl.
audio_bus_->Scale(volume_);
- audio_bus_->ToInterleaved(
- frames_filled, format_.Format.wBitsPerSample / 8, buffer->lpData);
+
+ DCHECK_EQ(format_.Format.wBitsPerSample, 16);
+ audio_bus_->ToInterleaved<SignedInt16SampleTypeTraits>(
+ frames_filled, reinterpret_cast<int16_t*>(buffer->lpData));
buffer->dwBufferLength = used * format_.Format.nChannels / channels_;
} else {
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index f3ec9d91fd9..bcdea7612cb 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -156,8 +156,6 @@ source_set("base") {
"encryption_scheme.h",
"fake_audio_worker.cc",
"fake_audio_worker.h",
- "fallback_video_decoder.cc",
- "fallback_video_decoder.h",
"feedback_signal_accumulator.h",
"flinging_controller.h",
"format_utils.cc",
@@ -230,6 +228,8 @@ source_set("base") {
"multi_channel_resampler.h",
"null_video_sink.cc",
"null_video_sink.h",
+ "offloading_audio_encoder.cc",
+ "offloading_audio_encoder.h",
"offloading_video_encoder.cc",
"offloading_video_encoder.h",
"output_device_info.cc",
@@ -265,6 +265,8 @@ source_set("base") {
"seekable_buffer.h",
"serial_runner.cc",
"serial_runner.h",
+ "shared_memory_pool.cc",
+ "shared_memory_pool.h",
"silent_sink_suspender.cc",
"silent_sink_suspender.h",
"simple_sync_token_client.cc",
@@ -285,6 +287,9 @@ source_set("base") {
"subsample_entry.h",
"supported_types.cc",
"supported_types.h",
+ "supported_types.h",
+ "supported_video_decoder_config.cc",
+ "supported_video_decoder_config.h",
"text_cue.cc",
"text_cue.h",
"text_ranges.cc",
@@ -361,8 +366,10 @@ source_set("base") {
"//build:chromeos_buildflags",
"//components/system_media_controls/linux/buildflags",
"//gpu/command_buffer/client:interface_base",
+ "//gpu/command_buffer/client:raster_interface",
"//gpu/command_buffer/common",
"//gpu/ipc/common:common",
+ "//skia",
"//third_party/libyuv",
"//third_party/widevine/cdm:headers",
"//ui/display:display",
@@ -480,6 +487,7 @@ static_library("test_support") {
"//media/base/android:test_support",
"//media/filters:test_support",
"//media/formats:test_support",
+ "//media/renderers:test_support",
"//media/video:test_support",
]
testonly = true
@@ -553,7 +561,6 @@ source_set("unit_tests") {
"audio_sample_types_unittest.cc",
"audio_shifter_unittest.cc",
"audio_timestamp_helper_unittest.cc",
- "bind_to_current_loop_unittest.cc",
"bit_reader_unittest.cc",
"callback_holder_unittest.cc",
"callback_registry_unittest.cc",
@@ -567,7 +574,6 @@ source_set("unit_tests") {
"djb2_unittest.cc",
"fake_audio_worker_unittest.cc",
"fake_demuxer_stream_unittest.cc",
- "fallback_video_decoder_unittest.cc",
"feedback_signal_accumulator_unittest.cc",
"frame_rate_estimator_unittest.cc",
"key_systems_unittest.cc",
@@ -578,6 +584,7 @@ source_set("unit_tests") {
"moving_average_unittest.cc",
"multi_channel_resampler_unittest.cc",
"null_video_sink_unittest.cc",
+ "offloading_audio_encoder_unittest.cc",
"offloading_video_encoder_unittest.cc",
"pipeline_impl_unittest.cc",
"ranges_unittest.cc",
@@ -585,12 +592,14 @@ source_set("unit_tests") {
"renderer_factory_selector_unittest.cc",
"seekable_buffer_unittest.cc",
"serial_runner_unittest.cc",
+ "shared_memory_pool_unittest.cc",
"silent_sink_suspender_unittest.cc",
"sinc_resampler_unittest.cc",
"status_unittest.cc",
"stream_parser_unittest.cc",
"subsample_entry_unittest.cc",
"supported_types_unittest.cc",
+ "supported_video_decoder_config_unittest.cc",
"text_ranges_unittest.cc",
"text_renderer_unittest.cc",
"time_delta_interpolator_unittest.cc",
@@ -634,7 +643,10 @@ source_set("unit_tests") {
if (is_win) {
sources += [ "win/dxgi_device_scope_handle_unittest.cc" ]
- deps += [ "//media/base/win:media_foundation_util" ]
+ deps += [
+ "//media/base/win:media_foundation_util",
+ "//ui/events:test_support",
+ ]
libs = [
"d3d11.lib",
"mfplat.lib",
diff --git a/chromium/media/base/OWNERS b/chromium/media/base/OWNERS
index f4ecf46c4ae..7f62fe2fb3e 100644
--- a/chromium/media/base/OWNERS
+++ b/chromium/media/base/OWNERS
@@ -1,4 +1,3 @@
per-file *audio*=file://media/audio/OWNERS
-per-file media_switches.*=beccahughes@chromium.org
per-file media_switches.*=mlamouri@chromium.org
diff --git a/chromium/media/base/android/BUILD.gn b/chromium/media/base/android/BUILD.gn
index 39766b7c387..886dbbf132e 100644
--- a/chromium/media/base/android/BUILD.gn
+++ b/chromium/media/base/android/BUILD.gn
@@ -163,7 +163,7 @@ if (is_android) {
":media_java_resources",
"//base:base_java",
"//base:jni_java",
- "//third_party/android_deps:androidx_annotation_annotation_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
]
annotation_processor_deps = [ "//base/android/jni_generator:jni_processor" ]
srcjar_deps = [
@@ -198,7 +198,7 @@ if (is_android) {
android_library("display_java") {
sources = [ "java/src/org/chromium/media/DisplayCompat.java" ]
- deps = [ "//third_party/android_deps:androidx_annotation_annotation_java" ]
+ deps = [ "//third_party/androidx:androidx_annotation_annotation_java" ]
}
junit_binary("media_base_junit_tests") {
@@ -211,7 +211,7 @@ if (is_android) {
":media_java",
"//base:base_java",
"//base:base_java_test_support",
- "//third_party/android_deps:androidx_test_runner_java",
+ "//third_party/androidx:androidx_test_runner_java",
"//third_party/junit",
]
}
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index e5d7978a28f..182dc13ad28 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -914,7 +914,7 @@ MediaDrmBridge::~MediaDrmBridge() {
}
// Rejects all pending promises.
- cdm_promise_adapter_.Clear();
+ cdm_promise_adapter_.Clear(CdmPromiseAdapter::ClearReason::kDestruction);
}
MediaDrmBridge::SecurityLevel MediaDrmBridge::GetSecurityLevel() {
diff --git a/chromium/media/base/android/media_player_bridge.cc b/chromium/media/base/android/media_player_bridge.cc
index eeab111c274..41d0ab6356d 100644
--- a/chromium/media/base/android/media_player_bridge.cc
+++ b/chromium/media/base/android/media_player_bridge.cc
@@ -80,6 +80,8 @@ MediaPlayerBridge::MediaPlayerBridge(const GURL& url,
url_(url),
site_for_cookies_(site_for_cookies),
top_frame_origin_(top_frame_origin),
+ pending_retrieve_cookies_(false),
+ should_prepare_on_retrieved_cookies_(false),
user_agent_(user_agent),
hide_url_log_(hide_url_log),
width_(0),
@@ -127,6 +129,7 @@ void MediaPlayerBridge::Initialize() {
media::MediaResourceGetter* resource_getter =
client_->GetMediaResourceGetter();
+ pending_retrieve_cookies_ = true;
resource_getter->GetCookies(
url_, site_for_cookies_, top_frame_origin_,
base::BindOnce(&MediaPlayerBridge::OnCookiesRetrieved,
@@ -164,6 +167,17 @@ void MediaPlayerBridge::SetVideoSurface(gl::ScopedJavaSurface surface) {
surface_.j_surface());
}
+void MediaPlayerBridge::SetPlaybackRate(double playback_rate) {
+ if (j_media_player_bridge_.is_null())
+ return;
+
+ JNIEnv* env = base::android::AttachCurrentThread();
+ CHECK(env);
+
+ Java_MediaPlayerBridge_setPlaybackRate(env, j_media_player_bridge_,
+ playback_rate);
+}
+
void MediaPlayerBridge::Prepare() {
DCHECK(j_media_player_bridge_.is_null());
@@ -200,31 +214,52 @@ void MediaPlayerBridge::SetDataSource(const std::string& url) {
OnMediaError(MEDIA_ERROR_FORMAT);
return;
}
- } else {
- // Create a Java String for the URL.
- ScopedJavaLocalRef<jstring> j_url_string =
- ConvertUTF8ToJavaString(env, url);
-
- const std::string data_uri_prefix("data:");
- if (base::StartsWith(url, data_uri_prefix, base::CompareCase::SENSITIVE)) {
- if (!Java_MediaPlayerBridge_setDataUriDataSource(
- env, j_media_player_bridge_, j_url_string)) {
- OnMediaError(MEDIA_ERROR_FORMAT);
- }
- return;
- }
- ScopedJavaLocalRef<jstring> j_cookies = ConvertUTF8ToJavaString(
- env, cookies_);
- ScopedJavaLocalRef<jstring> j_user_agent = ConvertUTF8ToJavaString(
- env, user_agent_);
+ if (!Java_MediaPlayerBridge_prepareAsync(env, j_media_player_bridge_))
+ OnMediaError(MEDIA_ERROR_FORMAT);
+
+ return;
+ }
- if (!Java_MediaPlayerBridge_setDataSource(env, j_media_player_bridge_,
- j_url_string, j_cookies,
- j_user_agent, hide_url_log_)) {
+ // Create a Java String for the URL.
+ ScopedJavaLocalRef<jstring> j_url_string = ConvertUTF8ToJavaString(env, url);
+
+ const std::string data_uri_prefix("data:");
+ if (base::StartsWith(url, data_uri_prefix, base::CompareCase::SENSITIVE)) {
+ if (!Java_MediaPlayerBridge_setDataUriDataSource(
+ env, j_media_player_bridge_, j_url_string)) {
OnMediaError(MEDIA_ERROR_FORMAT);
- return;
}
+ return;
+ }
+
+ // Cookies may not have been retrieved yet, delay prepare until they are
+ // retrieved.
+ if (pending_retrieve_cookies_) {
+ should_prepare_on_retrieved_cookies_ = true;
+ return;
+ }
+ SetDataSourceInternal();
+}
+
+void MediaPlayerBridge::SetDataSourceInternal() {
+ DCHECK(!pending_retrieve_cookies_);
+
+ JNIEnv* env = base::android::AttachCurrentThread();
+ CHECK(env);
+
+ ScopedJavaLocalRef<jstring> j_cookies =
+ ConvertUTF8ToJavaString(env, cookies_);
+ ScopedJavaLocalRef<jstring> j_user_agent =
+ ConvertUTF8ToJavaString(env, user_agent_);
+ ScopedJavaLocalRef<jstring> j_url_string =
+ ConvertUTF8ToJavaString(env, url_.spec());
+
+ if (!Java_MediaPlayerBridge_setDataSource(env, j_media_player_bridge_,
+ j_url_string, j_cookies,
+ j_user_agent, hide_url_log_)) {
+ OnMediaError(MEDIA_ERROR_FORMAT);
+ return;
}
if (!Java_MediaPlayerBridge_prepareAsync(env, j_media_player_bridge_))
@@ -267,9 +302,15 @@ void MediaPlayerBridge::OnDidSetDataUriDataSource(
void MediaPlayerBridge::OnCookiesRetrieved(const std::string& cookies) {
cookies_ = cookies;
+ pending_retrieve_cookies_ = false;
client_->GetMediaResourceGetter()->GetAuthCredentials(
url_, base::BindOnce(&MediaPlayerBridge::OnAuthCredentialsRetrieved,
weak_factory_.GetWeakPtr()));
+
+ if (should_prepare_on_retrieved_cookies_) {
+ SetDataSourceInternal();
+ should_prepare_on_retrieved_cookies_ = false;
+ }
}
void MediaPlayerBridge::OnAuthCredentialsRetrieved(
diff --git a/chromium/media/base/android/media_player_bridge.h b/chromium/media/base/android/media_player_bridge.h
index 8757e09b24a..52648721aa1 100644
--- a/chromium/media/base/android/media_player_bridge.h
+++ b/chromium/media/base/android/media_player_bridge.h
@@ -92,6 +92,7 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Methods to partially expose the underlying MediaPlayer.
void SetVideoSurface(gl::ScopedJavaSurface surface);
+ void SetPlaybackRate(double playback_rate);
void Pause();
void SeekTo(base::TimeDelta timestamp);
base::TimeDelta GetCurrentTime();
@@ -153,6 +154,7 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Set the data source for the media player.
void SetDataSource(const std::string& url);
+ void SetDataSourceInternal();
// Functions that implements media player control.
void StartInternal();
@@ -220,6 +222,12 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Used to check for cookie content settings.
url::Origin top_frame_origin_;
+ // Waiting to retrieve cookies for |url_|.
+ bool pending_retrieve_cookies_;
+
+ // Whether to prepare after cookies retrieved.
+ bool should_prepare_on_retrieved_cookies_;
+
// User agent string to be used for media player.
const std::string user_agent_;
diff --git a/chromium/media/base/async_destroy_video_decoder.h b/chromium/media/base/async_destroy_video_decoder.h
index 921109b08f1..948fa8c5a7c 100644
--- a/chromium/media/base/async_destroy_video_decoder.h
+++ b/chromium/media/base/async_destroy_video_decoder.h
@@ -35,6 +35,11 @@ class AsyncDestroyVideoDecoder final : public VideoDecoder {
T::DestroyAsync(std::move(wrapped_decoder_));
}
+ VideoDecoderType GetDecoderType() const override {
+ DCHECK(wrapped_decoder_);
+ return wrapped_decoder_->GetDecoderType();
+ }
+
std::string GetDisplayName() const override {
DCHECK(wrapped_decoder_);
return wrapped_decoder_->GetDisplayName();
diff --git a/chromium/media/base/audio_block_fifo.cc b/chromium/media/base/audio_block_fifo.cc
index 85fa6d27b93..ad49cff0637 100644
--- a/chromium/media/base/audio_block_fifo.cc
+++ b/chromium/media/base/audio_block_fifo.cc
@@ -116,13 +116,33 @@ void AudioBlockFifo::PushInternal(const void* source,
std::min(block_frames_ - write_pos_, frames_to_push);
if (source) {
- // Deinterleave the content to the FIFO and update the |write_pos_|.
- current_block->FromInterleavedPartial(source_ptr, write_pos_, push_frames,
- bytes_per_sample);
+ // Deinterleave the content to the FIFO.
+ switch (bytes_per_sample) {
+ case 1:
+ current_block->FromInterleavedPartial<UnsignedInt8SampleTypeTraits>(
+ source_ptr, write_pos_, push_frames);
+ break;
+ case 2:
+ current_block->FromInterleavedPartial<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<const int16_t*>(source_ptr), write_pos_,
+ push_frames);
+ break;
+ case 4:
+ current_block->FromInterleavedPartial<SignedInt32SampleTypeTraits>(
+ reinterpret_cast<const int32_t*>(source_ptr), write_pos_,
+ push_frames);
+ break;
+ default:
+ NOTREACHED() << "Unsupported bytes per sample encountered: "
+ << bytes_per_sample;
+ current_block->ZeroFramesPartial(write_pos_, push_frames);
+ }
} else {
current_block->ZeroFramesPartial(write_pos_, push_frames);
}
+
write_pos_ = (write_pos_ + push_frames) % block_frames_;
+
if (!write_pos_) {
// The current block is completely filled, increment |write_block_| and
// |available_blocks_|.
diff --git a/chromium/media/base/audio_buffer.cc b/chromium/media/base/audio_buffer.cc
index ad6bd58c9c7..99cbfb6af4c 100644
--- a/chromium/media/base/audio_buffer.cc
+++ b/chromium/media/base/audio_buffer.cc
@@ -14,6 +14,29 @@
namespace media {
+namespace {
+
+// TODO(https://crbug.com/619628): Use vector instructions to speed this up.
+template <class SourceSampleTypeTraits>
+void CopyConvertFromInterleaved(
+ const typename SourceSampleTypeTraits::ValueType* source_buffer,
+ int num_frames_to_write,
+ const std::vector<float*> dest) {
+ const int channels = dest.size();
+ for (int ch = 0; ch < channels; ++ch) {
+ float* dest_data = dest[ch];
+ for (int target_frame_index = 0, read_pos_in_source = ch;
+ target_frame_index < num_frames_to_write;
+ ++target_frame_index, read_pos_in_source += channels) {
+ auto source_value = source_buffer[read_pos_in_source];
+ dest_data[target_frame_index] =
+ SourceSampleTypeTraits::ToFloat(source_value);
+ }
+ }
+}
+
+} // namespace
+
static base::TimeDelta CalculateDuration(int frames, double sample_rate) {
DCHECK_GT(sample_rate, 0);
return base::TimeDelta::FromMicroseconds(
@@ -332,6 +355,72 @@ void AudioBuffer::ReadFrames(int frames_to_copy,
}
}
+void AudioBuffer::ReadAllFrames(const std::vector<float*>& dest) const {
+ // Deinterleave each channel (if necessary) and convert to 32bit
+ // floating-point with nominal range -1.0 -> +1.0 (if necessary).
+
+ // |dest| must have the same number of channels, and the number of frames
+ // specified must be in range.
+ DCHECK(!end_of_stream());
+ CHECK_EQ(dest.size(), static_cast<size_t>(channel_count_));
+ DCHECK(!IsBitstreamFormat());
+
+ if (!data_) {
+ // Special case for an empty buffer.
+ for (int i = 0; i < channel_count_; ++i)
+ memset(dest[i], 0, adjusted_frame_count_ * sizeof(float));
+ return;
+ }
+
+ // Note: The conversion steps below will clip values to [1.0, -1.0f].
+
+ if (sample_format_ == kSampleFormatPlanarF32) {
+ for (int ch = 0; ch < channel_count_; ++ch) {
+ float* dest_data = dest[ch];
+ const float* source_data =
+ reinterpret_cast<const float*>(channel_data_[ch]);
+ for (int i = 0; i < adjusted_frame_count_; ++i)
+ dest_data[i] = Float32SampleTypeTraits::FromFloat(source_data[i]);
+ }
+ return;
+ }
+
+ if (sample_format_ == kSampleFormatPlanarS16) {
+ // Format is planar signed16. Convert each value into float and insert into
+ // output channel data.
+ for (int ch = 0; ch < channel_count_; ++ch) {
+ const int16_t* source_data =
+ reinterpret_cast<const int16_t*>(channel_data_[ch]);
+ float* dest_data = dest[ch];
+ for (int i = 0; i < adjusted_frame_count_; ++i)
+ dest_data[i] = SignedInt16SampleTypeTraits::ToFloat(source_data[i]);
+ }
+ return;
+ }
+
+ const uint8_t* source_data = data_.get();
+
+ if (sample_format_ == kSampleFormatF32) {
+ CopyConvertFromInterleaved<Float32SampleTypeTraits>(
+ reinterpret_cast<const float*>(source_data), adjusted_frame_count_,
+ dest);
+ } else if (sample_format_ == kSampleFormatU8) {
+ CopyConvertFromInterleaved<UnsignedInt8SampleTypeTraits>(
+ source_data, adjusted_frame_count_, dest);
+ } else if (sample_format_ == kSampleFormatS16) {
+ CopyConvertFromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<const int16_t*>(source_data), adjusted_frame_count_,
+ dest);
+ } else if (sample_format_ == kSampleFormatS24 ||
+ sample_format_ == kSampleFormatS32) {
+ CopyConvertFromInterleaved<SignedInt32SampleTypeTraits>(
+ reinterpret_cast<const int32_t*>(source_data), adjusted_frame_count_,
+ dest);
+ } else {
+ NOTREACHED() << "Unsupported audio sample type: " << sample_format_;
+ }
+}
+
void AudioBuffer::TrimStart(int frames_to_trim) {
CHECK_GE(frames_to_trim, 0);
CHECK_LE(frames_to_trim, adjusted_frame_count_);
diff --git a/chromium/media/base/audio_buffer.h b/chromium/media/base/audio_buffer.h
index 55adc82837f..9a90ef1d88d 100644
--- a/chromium/media/base/audio_buffer.h
+++ b/chromium/media/base/audio_buffer.h
@@ -131,6 +131,14 @@ class MEDIA_EXPORT AudioBuffer
int dest_frame_offset,
AudioBus* dest) const;
+ // Copy all |adjusted_frame_count_| frames into |dest|. Each of |dest|'s
+ // elements correspond to a different channel. It's the caller's
+ // responsibility to make sure enough memory per channel was allocated.
+ // The frames are converted and clipped from their source format into planar
+ // float32 data.
+ // Note: Bitstream formats are not supported.
+ void ReadAllFrames(const std::vector<float*>& dest) const;
+
// Trim an AudioBuffer by removing |frames_to_trim| frames from the start.
// Timestamp and duration are adjusted to reflect the fewer frames.
// Note that repeated calls to TrimStart() may result in timestamp() and
diff --git a/chromium/media/base/audio_buffer_unittest.cc b/chromium/media/base/audio_buffer_unittest.cc
index c769bd5adc2..3a30db21d2d 100644
--- a/chromium/media/base/audio_buffer_unittest.cc
+++ b/chromium/media/base/audio_buffer_unittest.cc
@@ -7,6 +7,7 @@
#include <limits>
#include <memory>
+#include "base/test/gtest_util.h"
#include "media/base/audio_buffer.h"
#include "media/base/audio_bus.h"
#include "media/base/test_helpers.h"
@@ -36,6 +37,14 @@ static void VerifyBusWithOffset(AudioBus* bus,
}
}
+static std::vector<float*> WrapChannelsAsVector(AudioBus* bus) {
+ std::vector<float*> channels(bus->channels());
+ for (size_t ch = 0; ch < channels.size(); ++ch)
+ channels[ch] = bus->channel(ch);
+
+ return channels;
+}
+
static void VerifyBus(AudioBus* bus,
int frames,
float start,
@@ -253,6 +262,15 @@ TEST(AudioBufferTest, ReadBitstream) {
EXPECT_EQ(frames, bus->GetBitstreamFrames());
EXPECT_EQ(data_size, bus->GetBitstreamDataSize());
VerifyBitstreamAudioBus(bus.get(), data_size, 1, 1);
+
+#if GTEST_HAS_DEATH_TEST
+ auto vector_backing = AudioBus::Create(channels, frames);
+ std::vector<float*> wrapped_channels =
+ WrapChannelsAsVector(vector_backing.get());
+
+ // ReadAllFrames() does not support bitstream formats.
+ EXPECT_DCHECK_DEATH(buffer->ReadAllFrames(wrapped_channels));
+#endif // GTEST_HAS_DEATH_TEST
}
TEST(AudioBufferTest, ReadU8) {
@@ -272,6 +290,12 @@ TEST(AudioBufferTest, ReadU8) {
for (int i = 0; i < frames; ++i)
buffer->ReadFrames(1, i, i, bus.get());
VerifyBus(bus.get(), frames, 0, 1.0f / 127.0f);
+
+ // Verify ReadAllFrames() works for U8.
+ bus->Zero();
+ std::vector<float*> wrapped_channels = WrapChannelsAsVector(bus.get());
+ buffer->ReadAllFrames(wrapped_channels);
+ VerifyBus(bus.get(), frames, 0, 1.0f / 127.0f);
}
TEST(AudioBufferTest, ReadS16) {
@@ -293,6 +317,13 @@ TEST(AudioBufferTest, ReadS16) {
buffer->ReadFrames(1, i, i, bus.get());
VerifyBus(bus.get(), frames, 1.0f / std::numeric_limits<int16_t>::max(),
1.0f / std::numeric_limits<int16_t>::max());
+
+ // Verify ReadAllFrames() works for S16.
+ bus->Zero();
+ std::vector<float*> wrapped_channels = WrapChannelsAsVector(bus.get());
+ buffer->ReadAllFrames(wrapped_channels);
+ VerifyBus(bus.get(), frames, 1.0f / std::numeric_limits<int16_t>::max(),
+ 1.0f / std::numeric_limits<int16_t>::max());
}
TEST(AudioBufferTest, ReadS32) {
@@ -313,6 +344,13 @@ TEST(AudioBufferTest, ReadS32) {
buffer->ReadFrames(10, 10, 0, bus.get());
VerifyBus(bus.get(), 10, 11.0f / std::numeric_limits<int32_t>::max(),
1.0f / std::numeric_limits<int32_t>::max());
+
+ // Verify ReadAllFrames() works for S32.
+ bus->Zero();
+ std::vector<float*> wrapped_channels = WrapChannelsAsVector(bus.get());
+ buffer->ReadAllFrames(wrapped_channels);
+ VerifyBus(bus.get(), frames, 1.0f / std::numeric_limits<int32_t>::max(),
+ 1.0f / std::numeric_limits<int32_t>::max());
}
TEST(AudioBufferTest, ReadF32) {
@@ -336,6 +374,12 @@ TEST(AudioBufferTest, ReadF32) {
bus->Zero();
buffer->ReadFrames(10, 10, 0, bus.get());
VerifyBus(bus.get(), 10, 11, 1, ValueType::kFloat);
+
+ // Verify ReadAllFrames() works for F32.
+ bus->Zero();
+ std::vector<float*> wrapped_channels = WrapChannelsAsVector(bus.get());
+ buffer->ReadAllFrames(wrapped_channels);
+ VerifyBus(bus.get(), frames, 1, 1, ValueType::kFloat);
}
TEST(AudioBufferTest, ReadS16Planar) {
@@ -369,6 +413,13 @@ TEST(AudioBufferTest, ReadS16Planar) {
buffer->ReadFrames(0, 10, 0, bus.get());
VerifyBus(bus.get(), frames, 1.0f / std::numeric_limits<int16_t>::max(),
1.0f / std::numeric_limits<int16_t>::max());
+
+ // Verify ReadAllFrames() works for S16Planar.
+ bus->Zero();
+ std::vector<float*> wrapped_channels = WrapChannelsAsVector(bus.get());
+ buffer->ReadAllFrames(wrapped_channels);
+ VerifyBus(bus.get(), frames, 1.0f / std::numeric_limits<int16_t>::max(),
+ 1.0f / std::numeric_limits<int16_t>::max());
}
TEST(AudioBufferTest, ReadF32Planar) {
@@ -397,6 +448,12 @@ TEST(AudioBufferTest, ReadF32Planar) {
bus->Zero();
buffer->ReadFrames(20, 50, 0, bus.get());
VerifyBus(bus.get(), 20, 51, 1, ValueType::kFloat);
+
+ // Verify ReadAllFrames() works for F32Planar.
+ bus->Zero();
+ std::vector<float*> wrapped_channels = WrapChannelsAsVector(bus.get());
+ buffer->ReadAllFrames(wrapped_channels);
+ VerifyBus(bus.get(), frames, 1, 1, ValueType::kFloat);
}
TEST(AudioBufferTest, EmptyBuffer) {
@@ -411,10 +468,19 @@ TEST(AudioBufferTest, EmptyBuffer) {
EXPECT_EQ(base::TimeDelta::FromMilliseconds(10), buffer->duration());
EXPECT_FALSE(buffer->end_of_stream());
- // Read all 100 frames from the buffer. All data should be 0.
+ // Read all frames from the buffer. All data should be 0.
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, frames);
buffer->ReadFrames(frames, 0, 0, bus.get());
VerifyBus(bus.get(), frames, 0, 0);
+
+ // Set some data to confirm the overwrite.
+ std::vector<float*> wrapped_channels = WrapChannelsAsVector(bus.get());
+ for (float* wrapped_channel : wrapped_channels)
+ memset(wrapped_channel, 123, frames * sizeof(float));
+
+ // Verify ReadAllFrames() overrites empty buffers.
+ buffer->ReadAllFrames(wrapped_channels);
+ VerifyBus(bus.get(), frames, 0, 0);
}
TEST(AudioBufferTest, TrimEmptyBuffer) {
diff --git a/chromium/media/base/audio_bus.cc b/chromium/media/base/audio_bus.cc
index 694b53e8c24..76ee5c24218 100644
--- a/chromium/media/base/audio_bus.cc
+++ b/chromium/media/base/audio_bus.cc
@@ -273,81 +273,6 @@ void AudioBus::BuildChannelData(int channels, int aligned_frames, float* data) {
channel_data_.push_back(data + i * aligned_frames);
}
-// Forwards to non-deprecated version.
-void AudioBus::FromInterleaved(const void* source,
- int frames,
- int bytes_per_sample) {
- DCHECK(!is_bitstream_format_);
- switch (bytes_per_sample) {
- case 1:
- FromInterleaved<UnsignedInt8SampleTypeTraits>(
- reinterpret_cast<const uint8_t*>(source), frames);
- break;
- case 2:
- FromInterleaved<SignedInt16SampleTypeTraits>(
- reinterpret_cast<const int16_t*>(source), frames);
- break;
- case 4:
- FromInterleaved<SignedInt32SampleTypeTraits>(
- reinterpret_cast<const int32_t*>(source), frames);
- break;
- default:
- NOTREACHED() << "Unsupported bytes per sample encountered: "
- << bytes_per_sample;
- ZeroFrames(frames);
- }
-}
-
-// Forwards to non-deprecated version.
-void AudioBus::FromInterleavedPartial(const void* source,
- int start_frame,
- int frames,
- int bytes_per_sample) {
- DCHECK(!is_bitstream_format_);
- switch (bytes_per_sample) {
- case 1:
- FromInterleavedPartial<UnsignedInt8SampleTypeTraits>(
- reinterpret_cast<const uint8_t*>(source), start_frame, frames);
- break;
- case 2:
- FromInterleavedPartial<SignedInt16SampleTypeTraits>(
- reinterpret_cast<const int16_t*>(source), start_frame, frames);
- break;
- case 4:
- FromInterleavedPartial<SignedInt32SampleTypeTraits>(
- reinterpret_cast<const int32_t*>(source), start_frame, frames);
- break;
- default:
- NOTREACHED() << "Unsupported bytes per sample encountered: "
- << bytes_per_sample;
- ZeroFramesPartial(start_frame, frames);
- }
-}
-
-// Forwards to non-deprecated version.
-void AudioBus::ToInterleaved(int frames,
- int bytes_per_sample,
- void* dest) const {
- DCHECK(!is_bitstream_format_);
- switch (bytes_per_sample) {
- case 1:
- ToInterleaved<UnsignedInt8SampleTypeTraits>(
- frames, reinterpret_cast<uint8_t*>(dest));
- break;
- case 2:
- ToInterleaved<SignedInt16SampleTypeTraits>(
- frames, reinterpret_cast<int16_t*>(dest));
- break;
- case 4:
- ToInterleaved<SignedInt32SampleTypeTraits>(
- frames, reinterpret_cast<int32_t*>(dest));
- break;
- default:
- NOTREACHED() << "Unsupported bytes per sample encountered: "
- << bytes_per_sample;
- }
-}
-
void AudioBus::CopyTo(AudioBus* dest) const {
dest->set_is_bitstream_format(is_bitstream_format());
if (is_bitstream_format()) {
diff --git a/chromium/media/base/audio_bus.h b/chromium/media/base/audio_bus.h
index 024fee8968f..1c520279146 100644
--- a/chromium/media/base/audio_bus.h
+++ b/chromium/media/base/audio_bus.h
@@ -104,11 +104,6 @@ class MEDIA_SHMEM_EXPORT AudioBus {
const typename SourceSampleTypeTraits::ValueType* source_buffer,
int num_frames_to_write);
- // DEPRECATED (https://crbug.com/580391)
- // Please use the version templated with SourceSampleTypeTraits instead.
- // TODO(chfremer): Remove (https://crbug.com/619623)
- void FromInterleaved(const void* source, int frames, int bytes_per_sample);
-
// Similar to FromInterleaved...(), but overwrites the frames starting at a
// given offset |write_offset_in_frames| and does not zero out frames that are
// not overwritten.
@@ -118,12 +113,6 @@ class MEDIA_SHMEM_EXPORT AudioBus {
int write_offset_in_frames,
int num_frames_to_write);
- // DEPRECATED (https://crbug.com/580391)
- // Please use the version templated with SourceSampleTypeTraits instead.
- // TODO(chfremer): Remove (https://crbug.com/619623)
- void FromInterleavedPartial(const void* source, int start_frame, int frames,
- int bytes_per_sample);
-
// Reads the sample values stored in this AudioBus instance and places them
// into the given |dest_buffer| in interleaved format using the sample format
// specified by TargetSampleTypeTraits. For a list of ready-to-use
@@ -134,11 +123,6 @@ class MEDIA_SHMEM_EXPORT AudioBus {
int num_frames_to_read,
typename TargetSampleTypeTraits::ValueType* dest_buffer) const;
- // DEPRECATED (https://crbug.com/580391)
- // Please use the version templated with TargetSampleTypeTraits instead.
- // TODO(chfremer): Remove (https://crbug.com/619623)
- void ToInterleaved(int frames, int bytes_per_sample, void* dest) const;
-
// Similar to ToInterleaved(), but reads the frames starting at a given
// offset |read_offset_in_frames|.
template <class TargetSampleTypeTraits>
diff --git a/chromium/media/base/audio_bus_unittest.cc b/chromium/media/base/audio_bus_unittest.cc
index 79bb101b2eb..38d5cbd09da 100644
--- a/chromium/media/base/audio_bus_unittest.cc
+++ b/chromium/media/base/audio_bus_unittest.cc
@@ -335,40 +335,6 @@ TEST_F(AudioBusTest, FromInterleaved) {
kTestVectorFrameCount * sizeof(*expected->channel(ch)));
}
- // Test deprecated version that takes |bytes_per_sample| as an input.
- {
- SCOPED_TRACE("uint8_t");
- bus->Zero();
- bus->FromInterleaved(kTestVectorUint8, kTestVectorFrameCount,
- sizeof(*kTestVectorUint8));
-
- // Biased uint8_t calculations have poor precision, so the epsilon here is
- // slightly more permissive than int16_t and int32_t calculations.
- VerifyAreEqualWithEpsilon(bus.get(), expected.get(),
- 1.0f / (std::numeric_limits<uint8_t>::max() - 1));
- }
- {
- SCOPED_TRACE("int16_t");
- bus->Zero();
- bus->FromInterleaved(kTestVectorInt16, kTestVectorFrameCount,
- sizeof(*kTestVectorInt16));
- VerifyAreEqualWithEpsilon(
- bus.get(), expected.get(),
- 1.0f / (std::numeric_limits<uint16_t>::max() + 1.0f));
- }
- {
- SCOPED_TRACE("int32_t");
- bus->Zero();
- bus->FromInterleaved(kTestVectorInt32, kTestVectorFrameCount,
- sizeof(*kTestVectorInt32));
-
- VerifyAreEqualWithEpsilon(
- bus.get(), expected.get(),
- 1.0f / (std::numeric_limits<uint32_t>::max() + 1.0f));
- }
-
- // Test non-deprecated version that takes SampleTypeTraits as a template
- // parameter.
{
SCOPED_TRACE("UnsignedInt8SampleTypeTraits");
bus->Zero();
@@ -424,18 +390,6 @@ TEST_F(AudioBusTest, FromInterleavedPartial) {
kPartialFrames * sizeof(*expected->channel(ch)));
}
- // Test deprecated version that takes |bytes_per_sample| as an input.
- {
- SCOPED_TRACE("int32_t");
- bus->Zero();
- bus->FromInterleavedPartial(
- kTestVectorInt32 + kPartialStart * bus->channels(), kPartialStart,
- kPartialFrames, sizeof(*kTestVectorInt32));
- VerifyAreEqual(bus.get(), expected.get());
- }
-
- // Test non-deprecated version that takes SampleTypeTraits as a template
- // parameter.
{
SCOPED_TRACE("SignedInt32SampleTypeTraits");
bus->Zero();
@@ -456,43 +410,6 @@ TEST_F(AudioBusTest, ToInterleaved) {
kTestVectorFrameCount * sizeof(*bus->channel(ch)));
}
- // Test deprecated version that takes |bytes_per_sample| as an input.
- {
- SCOPED_TRACE("uint8_t");
- uint8_t test_array[base::size(kTestVectorUint8)];
- bus->ToInterleaved(bus->frames(), sizeof(*kTestVectorUint8), test_array);
- ASSERT_EQ(0,
- memcmp(test_array, kTestVectorUint8, sizeof(kTestVectorUint8)));
- }
- {
- SCOPED_TRACE("int16_t");
- int16_t test_array[base::size(kTestVectorInt16)];
- bus->ToInterleaved(bus->frames(), sizeof(*kTestVectorInt16), test_array);
- ASSERT_EQ(0,
- memcmp(test_array, kTestVectorInt16, sizeof(kTestVectorInt16)));
- }
- {
- SCOPED_TRACE("int32_t");
- int32_t test_array[base::size(kTestVectorInt32)];
- bus->ToInterleaved(bus->frames(), sizeof(*kTestVectorInt32), test_array);
-
- // Some compilers get better precision than others on the half-max test, so
- // let the test pass with an off by one check on the half-max.
- int32_t alternative_acceptable_result[base::size(kTestVectorInt32)];
- memcpy(alternative_acceptable_result, kTestVectorInt32,
- sizeof(kTestVectorInt32));
- ASSERT_EQ(alternative_acceptable_result[4],
- std::numeric_limits<int32_t>::max() / 2);
- alternative_acceptable_result[4]++;
-
- ASSERT_TRUE(
- memcmp(test_array, kTestVectorInt32, sizeof(kTestVectorInt32)) == 0 ||
- memcmp(test_array, alternative_acceptable_result,
- sizeof(alternative_acceptable_result)) == 0);
- }
-
- // Test non-deprecated version that takes SampleTypeTraits as a template
- // parameter.
{
SCOPED_TRACE("UnsignedInt8SampleTypeTraits");
uint8_t test_array[base::size(kTestVectorUint8)];
diff --git a/chromium/media/base/audio_decoder.h b/chromium/media/base/audio_decoder.h
index 7361e251036..245ac2979a0 100644
--- a/chromium/media/base/audio_decoder.h
+++ b/chromium/media/base/audio_decoder.h
@@ -87,6 +87,9 @@ class MEDIA_EXPORT AudioDecoder : public Decoder {
// Returns true if the decoder needs bitstream conversion before decoding.
virtual bool NeedsBitstreamConversion() const;
+ // Returns the type of the decoder for statistics recording purposes.
+ virtual AudioDecoderType GetDecoderType() const = 0;
+
private:
DISALLOW_COPY_AND_ASSIGN(AudioDecoder);
};
diff --git a/chromium/media/base/audio_encoder.cc b/chromium/media/base/audio_encoder.cc
index 64b5cab9f0a..8e0b81cf3c5 100644
--- a/chromium/media/base/audio_encoder.cc
+++ b/chromium/media/base/audio_encoder.cc
@@ -5,13 +5,15 @@
#include "media/base/audio_encoder.h"
#include "base/logging.h"
+#include "base/no_destructor.h"
#include "base/time/time.h"
#include "media/base/audio_timestamp_helper.h"
namespace media {
-// -----------------------------------------------------------------------------
-// EncodedAudioBuffer:
+AudioEncoder::Options::Options() = default;
+AudioEncoder::Options::Options(const Options&) = default;
+AudioEncoder::Options::~Options() = default;
EncodedAudioBuffer::EncodedAudioBuffer(const AudioParameters& params,
std::unique_ptr<uint8_t[]> data,
@@ -26,18 +28,7 @@ EncodedAudioBuffer::EncodedAudioBuffer(EncodedAudioBuffer&&) = default;
EncodedAudioBuffer::~EncodedAudioBuffer() = default;
-// -----------------------------------------------------------------------------
-// AudioEncoder:
-
-AudioEncoder::AudioEncoder(const AudioParameters& input_params,
- EncodeCB encode_callback,
- StatusCB status_callback)
- : audio_input_params_(input_params),
- encode_callback_(std::move(encode_callback)),
- status_callback_(std::move(status_callback)) {
- DCHECK(audio_input_params_.IsValid());
- DCHECK(!encode_callback_.is_null());
- DCHECK(!status_callback_.is_null());
+AudioEncoder::AudioEncoder() {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
@@ -45,35 +36,4 @@ AudioEncoder::~AudioEncoder() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
-void AudioEncoder::EncodeAudio(const AudioBus& audio_bus,
- base::TimeTicks capture_time) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK_EQ(audio_bus.channels(), audio_input_params_.channels());
- DCHECK(!capture_time.is_null());
-
- DLOG_IF(ERROR,
- !last_capture_time_.is_null() &&
- ((capture_time - last_capture_time_).InSecondsF() >
- 1.5f * audio_bus.frames() / audio_input_params().sample_rate()))
- << "Possibly frames were skipped, which may result in inaccuarate "
- "timestamp calculation.";
-
- last_capture_time_ = capture_time;
-
- EncodeAudioImpl(audio_bus, capture_time);
-}
-
-void AudioEncoder::Flush() {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
-
- FlushImpl();
-}
-
-base::TimeTicks AudioEncoder::ComputeTimestamp(
- int num_frames,
- base::TimeTicks capture_time) const {
- return capture_time - AudioTimestampHelper::FramesToTime(
- num_frames, audio_input_params_.sample_rate());
-}
-
} // namespace media
diff --git a/chromium/media/base/audio_encoder.h b/chromium/media/base/audio_encoder.h
index c25218c9ee8..2d3362bebca 100644
--- a/chromium/media/base/audio_encoder.h
+++ b/chromium/media/base/audio_encoder.h
@@ -6,6 +6,7 @@
#define MEDIA_BASE_AUDIO_ENCODER_H_
#include <memory>
+#include <vector>
#include "base/callback.h"
#include "base/sequence_checker.h"
@@ -46,71 +47,72 @@ struct MEDIA_EXPORT EncodedAudioBuffer {
const base::TimeTicks timestamp;
};
-// Defines an interface for audio encoders. Concrete encoders must implement the
-// EncodeAudioImpl() function.
+// Defines an interface for audio encoders.
class MEDIA_EXPORT AudioEncoder {
public:
+ struct MEDIA_EXPORT Options {
+ Options();
+ Options(const Options&);
+ ~Options();
+
+ base::Optional<int> bitrate;
+
+ int channels;
+
+ int sample_rate;
+ };
+
+ // A sequence of codec specific bytes, commonly known as extradata.
+ using CodecDescription = std::vector<uint8_t>;
+
// Signature of the callback invoked to provide the encoded audio data. It is
- // invoked on the same sequence on which EncodeAudio() is called. The utility
- // media::BindToCurrentLoop() can be used to create a callback that will be
- // invoked on the same sequence it is constructed on.
- using EncodeCB = base::RepeatingCallback<void(EncodedAudioBuffer output)>;
+ // invoked on the same sequence on which EncodeAudio() is called.
+ using OutputCB =
+ base::RepeatingCallback<void(EncodedAudioBuffer output,
+ base::Optional<CodecDescription>)>;
// Signature of the callback to report errors.
- using StatusCB = base::RepeatingCallback<void(Status error)>;
-
- // Constructs the encoder given the audio parameters of the input to this
- // encoder, and a callback to trigger to provide the encoded audio data.
- // |input_params| must be valid, and |encode_callback| and |status_callback|
- // must not be null callbacks. All calls to EncodeAudio() must happen on the
- // same sequence (usually an encoder blocking pool sequence), but the encoder
- // itself can be constructed on any sequence.
- AudioEncoder(const AudioParameters& input_params,
- EncodeCB encode_callback,
- StatusCB status_callback);
+ using StatusCB = base::OnceCallback<void(Status error)>;
+
+ AudioEncoder();
AudioEncoder(const AudioEncoder&) = delete;
AudioEncoder& operator=(const AudioEncoder&) = delete;
virtual ~AudioEncoder();
- const AudioParameters& audio_input_params() const {
- return audio_input_params_;
- }
-
- // Performs various checks before calling EncodeAudioImpl() which does the
- // actual encoding.
- void EncodeAudio(const AudioBus& audio_bus, base::TimeTicks capture_time);
+ // Initializes an AudioEncoder with the given input option, executing
+ // the |done_cb| upon completion. |output_cb| is called for each encoded audio
+ // chunk.
+ //
+ // No AudioEncoder calls should be made before |done_cb| is executed.
+ virtual void Initialize(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) = 0;
+
+ // Requests contents of |audio_bus| to be encoded.
+ // |capture_time| is a media time at the end of the audio piece in the
+ // |audio_bus|.
+ //
+ // |done_cb| is called upon encode completion and can possible convey an
+ // encoding error. It doesn't depend on future call to encoder's methods.
+ // |done_cb| will not be called from within this method.
+ //
+ // After the input, or several inputs, are encoded the encoder calls
+ // |output_cb|.
+ // |output_cb| may be called before or after |done_cb|,
+ // including before Encode() returns.
+ virtual void Encode(std::unique_ptr<AudioBus> audio_bus,
+ base::TimeTicks capture_time,
+ StatusCB done_cb) = 0;
// Some encoders may choose to buffer audio frames before they encode them.
- // This function provides a mechanism to drain and encode any buffered frames
- // (if any). Must be called on the encoder sequence.
- void Flush();
+ // Requests all outputs for already encoded frames to be
+ // produced via |output_cb| and calls |done_cb| after that.
+ virtual void Flush(StatusCB done_cb) = 0;
protected:
- const EncodeCB& encode_callback() const { return encode_callback_; }
- const StatusCB& status_callback() const { return status_callback_; }
- base::TimeTicks last_capture_time() const { return last_capture_time_; }
-
- virtual void EncodeAudioImpl(const AudioBus& audio_bus,
- base::TimeTicks capture_time) = 0;
-
- virtual void FlushImpl() = 0;
-
- // Computes the timestamp of an AudioBus which has |num_frames| and was
- // captured at |capture_time|. This timestamp is the capture time of the first
- // sample in that AudioBus.
- base::TimeTicks ComputeTimestamp(int num_frames,
- base::TimeTicks capture_time) const;
-
- private:
- const AudioParameters audio_input_params_;
-
- const EncodeCB encode_callback_;
-
- const StatusCB status_callback_;
+ Options options_;
- // The capture time of the most recent |audio_bus| delivered to
- // EncodeAudio().
- base::TimeTicks last_capture_time_;
+ OutputCB output_cb_;
SEQUENCE_CHECKER(sequence_checker_);
};
diff --git a/chromium/media/base/audio_fifo.cc b/chromium/media/base/audio_fifo.cc
index 75408b335fc..e1307316b98 100644
--- a/chromium/media/base/audio_fifo.cc
+++ b/chromium/media/base/audio_fifo.cc
@@ -7,6 +7,7 @@
#include <cstring>
#include "base/check_op.h"
+#include "base/trace_event/trace_event.h"
namespace media {
@@ -62,6 +63,8 @@ void AudioFifo::Push(const AudioBus* source) {
const int source_size = source->frames();
CHECK_LE(source_size + frames(), max_frames_);
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("audio"), "AudioFifo::Push", "this",
+ static_cast<void*>(this), "frames", source_size);
// Figure out if wrapping is needed and if so what segment sizes we need
// when adding the new audio bus content to the FIFO.
int append_size = 0;
@@ -99,6 +102,9 @@ void AudioFifo::Consume(AudioBus* destination,
// allocated memory in |destination| is sufficient.
CHECK_LE(frames_to_consume + start_frame, destination->frames());
+ TRACE_EVENT2(TRACE_DISABLED_BY_DEFAULT("audio"), "AudioFifo::Consume", "this",
+ static_cast<void*>(this), "frames", frames_to_consume);
+
// Figure out if wrapping is needed and if so what segment sizes we need
// when removing audio bus content from the FIFO.
int consume_size = 0;
diff --git a/chromium/media/base/audio_latency.cc b/chromium/media/base/audio_latency.cc
index b55a2da37e1..f15e8f95ba5 100644
--- a/chromium/media/base/audio_latency.cc
+++ b/chromium/media/base/audio_latency.cc
@@ -25,6 +25,7 @@
namespace media {
namespace {
+
#if !defined(OS_WIN)
// Taken from "Bit Twiddling Hacks"
// http://graphics.stanford.edu/~seander/bithacks.html#RoundUpPowerOf2
@@ -39,11 +40,30 @@ uint32_t RoundUpToPowerOfTwo(uint32_t v) {
return v;
}
#endif
+
+#if defined(OS_ANDROID)
+// WebAudio renderer's quantum size (frames per callback) that is used for
+// calculating the "interactive" buffer size.
+// TODO(crbug.com/988121): This number needs to be passed down from Blink when
+// user-selectable render quantum size is implemented.
+const int kWebAudioRenderQuantumSize = 128;
+
+// From media/renderers/paint_canvas_video_renderer.cc. To calculate the optimum
+// buffer size for Pixel 3/4/5 devices, which has a HW buffer size of 96 frames.
+int GCD(int a, int b) {
+ return a == 0 ? b : GCD(b % a, a);
+}
+
+int LCM(int a, int b) {
+ return a / GCD(a, b) * b;
+}
+#endif
+
} // namespace
// static
bool AudioLatency::IsResamplingPassthroughSupported(LatencyType type) {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#elif defined(OS_ANDROID)
// Only N MR1+ has support for OpenSLES performance modes which allow for
@@ -139,13 +159,28 @@ int AudioLatency::GetRtcBufferSize(int sample_rate, int hardware_buffer_size) {
// static
int AudioLatency::GetInteractiveBufferSize(int hardware_buffer_size) {
+ CHECK_GT(hardware_buffer_size, 0);
+
#if defined(OS_ANDROID)
// Always log this because it's relatively hard to get this
// information out.
LOG(INFO) << "audioHardwareBufferSize = " << hardware_buffer_size;
-#endif
+ if (hardware_buffer_size >= kWebAudioRenderQuantumSize)
+ return hardware_buffer_size;
+
+ // HW buffer size is smaller than the Web Audio's render quantum size, so
+ // compute LCM to avoid glitches and regulate the workload per callback.
+ // (e.g. 96 vs 128 -> 384) Also cap the buffer size to 4 render quanta
+ // (512 frames ~= 10ms at 48K) if LCM goes beyond interactive latency range.
+ int sensible_buffer_size = std::min(
+ LCM(hardware_buffer_size, kWebAudioRenderQuantumSize),
+ kWebAudioRenderQuantumSize * 4);
+
+ return sensible_buffer_size;
+#else
return hardware_buffer_size;
+#endif // defined(OS_ANDROID)
}
int AudioLatency::GetExactBufferSize(base::TimeDelta duration,
diff --git a/chromium/media/base/audio_latency_unittest.cc b/chromium/media/base/audio_latency_unittest.cc
index 1f627afe191..aa9ac668785 100644
--- a/chromium/media/base/audio_latency_unittest.cc
+++ b/chromium/media/base/audio_latency_unittest.cc
@@ -145,8 +145,31 @@ TEST(AudioLatency, HighLatencyBufferSizes) {
}
TEST(AudioLatency, InteractiveBufferSizes) {
- for (int i = 6400; i <= 204800; i *= 2)
- EXPECT_EQ(i / 100, AudioLatency::GetInteractiveBufferSize(i / 100));
+ // The |first| is a requested buffer size and and the |second| is a computed
+ // "interactive" buffer size from the method.
+ std::vector<std::pair<int, int>> buffer_size_pairs = {
+#if defined(OS_ANDROID)
+ {64, 128},
+ {96, 384}, // Pixel 3, 4, 5. (See crbug.com/1090441)
+ {240, 240}, // Nexus 7
+ {144, 144}, // Galaxy Nexus
+ // Irregular device buffer size
+ {100, 512},
+ {127, 512},
+#else
+ {64, 64},
+#endif // defined(OS_ANDROID)
+ {128, 128},
+ {256, 256},
+ {512, 512},
+ {1024, 1024},
+ {2048, 2048}
+ };
+
+ for (auto & buffer_size_pair : buffer_size_pairs) {
+ EXPECT_EQ(buffer_size_pair.second,
+ AudioLatency::GetInteractiveBufferSize(buffer_size_pair.first));
+ }
}
TEST(AudioLatency, RtcBufferSizes) {
diff --git a/chromium/media/base/audio_renderer.h b/chromium/media/base/audio_renderer.h
index 533c46150d7..15701b34294 100644
--- a/chromium/media/base/audio_renderer.h
+++ b/chromium/media/base/audio_renderer.h
@@ -69,6 +69,9 @@ class MEDIA_EXPORT AudioRenderer {
// preservation when playing back at speeds other than 1.0.
virtual void SetPreservesPitch(bool preserves_pitch) = 0;
+ // Sets a flag indicating whether the audio stream was initiated by autoplay.
+ virtual void SetAutoplayInitiated(bool autoplay_initiated) = 0;
+
private:
DISALLOW_COPY_AND_ASSIGN(AudioRenderer);
};
diff --git a/chromium/media/base/bind_to_current_loop.h b/chromium/media/base/bind_to_current_loop.h
index ec1352a6786..4ddf4fc31c1 100644
--- a/chromium/media/base/bind_to_current_loop.h
+++ b/chromium/media/base/bind_to_current_loop.h
@@ -5,121 +5,30 @@
#ifndef MEDIA_BASE_BIND_TO_CURRENT_LOOP_H_
#define MEDIA_BASE_BIND_TO_CURRENT_LOOP_H_
-#include <memory>
-
-#include "base/bind.h"
+#include "base/bind_post_task.h"
+#include "base/callback.h"
#include "base/location.h"
-#include "base/sequenced_task_runner.h"
-#include "base/single_thread_task_runner.h"
#include "base/threading/sequenced_task_runner_handle.h"
-// This is a helper utility for binding a OnceCallback or RepeatingCallback to a
-// given TaskRunner. The typical use is when |a| (of class |A|) wants to hand a
-// callback such as base::BindOnce(&A::AMethod, a) to |b|, but needs to ensure
-// that when |b| executes the callback, it does so on |a|'s task_runner's
-// MessageLoop.
-//
-// Typical usage: request to be called back on the current thread:
-// other->StartAsyncProcessAndCallMeBack(
-// media::BindToLoop(task_runner, base::BindOnce(&MyClass::MyMethod, this)));
-//
-// media::BindToLoop returns the same type of callback to the given
-// callback. I.e. it returns a RepeatingCallback for a given RepeatingCallback,
-// and returns OnceCallback for a given OnceCallback.
-//
-// The function BindToCurrentLoop is shorthand to bind to the calling function's
-// current MessageLoop.
+// Helpers for using base::BindPostTask() with the TaskRunner for the current
+// sequence, ie. base::SequencedTaskRunnerHandle::Get().
namespace media {
-namespace internal {
-
-template <typename Signature, typename... Args>
-base::OnceClosure MakeClosure(base::RepeatingCallback<Signature>* callback,
- Args&&... args) {
- return base::BindOnce(*callback, std::forward<Args>(args)...);
-}
-
-template <typename Signature, typename... Args>
-base::OnceClosure MakeClosure(base::OnceCallback<Signature>* callback,
- Args&&... args) {
- return base::BindOnce(std::move(*callback), std::forward<Args>(args)...);
-}
-
-template <typename CallbackType>
-class TrampolineHelper {
- public:
- TrampolineHelper(const base::Location& posted_from,
- scoped_refptr<base::SequencedTaskRunner> task_runner,
- CallbackType callback)
- : posted_from_(posted_from),
- task_runner_(std::move(task_runner)),
- callback_(std::move(callback)) {
- DCHECK(task_runner_);
- DCHECK(callback_);
- }
-
- template <typename... Args>
- void Run(Args... args) {
- // MakeClosure consumes |callback_| if it's OnceCallback.
- task_runner_->PostTask(
- posted_from_, MakeClosure(&callback_, std::forward<Args>(args)...));
- }
-
- ~TrampolineHelper() {
- if (callback_) {
- task_runner_->PostTask(
- posted_from_,
- base::BindOnce(&TrampolineHelper::ClearCallbackOnTargetTaskRunner,
- std::move(callback_)));
- }
- }
-
- private:
- static void ClearCallbackOnTargetTaskRunner(CallbackType) {}
-
- base::Location posted_from_;
- scoped_refptr<base::SequencedTaskRunner> task_runner_;
- CallbackType callback_;
-};
-
-} // namespace internal
-
-template <typename... Args>
-inline base::RepeatingCallback<void(Args...)> BindToLoop(
- scoped_refptr<base::SequencedTaskRunner> task_runner,
- base::RepeatingCallback<void(Args...)> cb) {
- using CallbackType = base::RepeatingCallback<void(Args...)>;
- using Helper = internal::TrampolineHelper<CallbackType>;
- using RunnerType = void (Helper::*)(Args...);
- RunnerType run = &Helper::Run;
- // TODO(tzik): Propagate FROM_HERE from the caller.
- return base::BindRepeating(
- run, std::make_unique<Helper>(FROM_HERE, task_runner, std::move(cb)));
-}
-
-template <typename... Args>
-inline base::OnceCallback<void(Args...)> BindToLoop(
- scoped_refptr<base::SequencedTaskRunner> task_runner,
- base::OnceCallback<void(Args...)> cb) {
- using CallbackType = base::OnceCallback<void(Args...)>;
- using Helper = internal::TrampolineHelper<CallbackType>;
- using RunnerType = void (Helper::*)(Args...);
- RunnerType run = &Helper::Run;
- // TODO(tzik): Propagate FROM_HERE from the caller.
- return base::BindOnce(
- run, std::make_unique<Helper>(FROM_HERE, task_runner, std::move(cb)));
-}
template <typename... Args>
inline base::RepeatingCallback<void(Args...)> BindToCurrentLoop(
- base::RepeatingCallback<void(Args...)> cb) {
- return BindToLoop(base::SequencedTaskRunnerHandle::Get(), std::move(cb));
+ base::RepeatingCallback<void(Args...)> cb,
+ const base::Location& location = FROM_HERE) {
+ return base::BindPostTask(base::SequencedTaskRunnerHandle::Get(),
+ std::move(cb), location);
}
template <typename... Args>
inline base::OnceCallback<void(Args...)> BindToCurrentLoop(
- base::OnceCallback<void(Args...)> cb) {
- return BindToLoop(base::SequencedTaskRunnerHandle::Get(), std::move(cb));
+ base::OnceCallback<void(Args...)> cb,
+ const base::Location& location = FROM_HERE) {
+ return base::BindPostTask(base::SequencedTaskRunnerHandle::Get(),
+ std::move(cb), location);
}
} // namespace media
diff --git a/chromium/media/base/bind_to_current_loop_unittest.cc b/chromium/media/base/bind_to_current_loop_unittest.cc
deleted file mode 100644
index faddb41d379..00000000000
--- a/chromium/media/base/bind_to_current_loop_unittest.cc
+++ /dev/null
@@ -1,376 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/bind_to_current_loop.h"
-
-#include <memory>
-#include <utility>
-
-#include "base/bind.h"
-#include "base/memory/free_deleter.h"
-#include "base/run_loop.h"
-#include "base/synchronization/waitable_event.h"
-#include "base/test/task_environment.h"
-#include "base/threading/thread.h"
-#include "base/threading/thread_checker_impl.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace media {
-
-void BoundBoolSet(bool* var, bool val) {
- *var = val;
-}
-
-void BoundBoolSetFromUniquePtr(bool* var, std::unique_ptr<bool> val) {
- *var = *val;
-}
-
-void BoundBoolSetFromUniquePtrFreeDeleter(
- bool* var,
- std::unique_ptr<bool, base::FreeDeleter> val) {
- *var = *val;
-}
-
-void BoundBoolSetFromUniquePtrArray(bool* var, std::unique_ptr<bool[]> val) {
- *var = val[0];
-}
-
-void BoundBoolSetFromConstRef(bool* var, const bool& val) {
- *var = val;
-}
-
-void BoundIntegersSet(int* a_var, int* b_var, int a_val, int b_val) {
- *a_var = a_val;
- *b_var = b_val;
-}
-
-struct ThreadRestrictionChecker {
- void Run() { EXPECT_TRUE(thread_checker_.CalledOnValidThread()); }
-
- ~ThreadRestrictionChecker() {
- EXPECT_TRUE(thread_checker_.CalledOnValidThread());
- }
-
- base::ThreadCheckerImpl thread_checker_;
-};
-
-void ClearReference(base::OnceClosure cb) {}
-
-// Various tests that check that the bound function is only actually executed
-// on the message loop, not during the original Run.
-class BindToCurrentLoopTest : public ::testing::Test {
- protected:
- base::test::SingleThreadTaskEnvironment task_environment_;
-};
-
-TEST_F(BindToCurrentLoopTest, RepeatingClosure) {
- // Test the closure is run inside the loop, not outside it.
- base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- base::RepeatingClosure cb = BindToCurrentLoop(base::BindRepeating(
- &base::WaitableEvent::Signal, base::Unretained(&waiter)));
- cb.Run();
- EXPECT_FALSE(waiter.IsSignaled());
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(waiter.IsSignaled());
-}
-
-TEST_F(BindToCurrentLoopTest, OnceClosure) {
- // Test the closure is run inside the loop, not outside it.
- base::WaitableEvent waiter(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- base::OnceClosure cb = BindToCurrentLoop(
- base::BindOnce(&base::WaitableEvent::Signal, base::Unretained(&waiter)));
- std::move(cb).Run();
- EXPECT_FALSE(waiter.IsSignaled());
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(waiter.IsSignaled());
-}
-
-TEST_F(BindToCurrentLoopTest, BoolRepeating) {
- bool bool_var = false;
- base::RepeatingCallback<void(bool)> cb =
- BindToCurrentLoop(base::BindRepeating(&BoundBoolSet, &bool_var));
- cb.Run(true);
- EXPECT_FALSE(bool_var);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_var);
-
- cb.Run(false);
- EXPECT_TRUE(bool_var);
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(bool_var);
-}
-
-TEST_F(BindToCurrentLoopTest, BoolOnce) {
- bool bool_var = false;
- base::OnceCallback<void(bool)> cb =
- BindToCurrentLoop(base::BindOnce(&BoundBoolSet, &bool_var));
- std::move(cb).Run(true);
- EXPECT_FALSE(bool_var);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_var);
-}
-
-TEST_F(BindToCurrentLoopTest, BoundUniquePtrBoolRepeating) {
- bool bool_val = false;
- std::unique_ptr<bool> unique_ptr_bool(new bool(true));
- base::RepeatingClosure cb = BindToCurrentLoop(base::BindRepeating(
- &BoundBoolSetFromUniquePtr, &bool_val, base::Passed(&unique_ptr_bool)));
- cb.Run();
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, BoundUniquePtrBoolOnce) {
- bool bool_val = false;
- std::unique_ptr<bool> unique_ptr_bool(new bool(true));
- base::OnceClosure cb = BindToCurrentLoop(base::BindOnce(
- &BoundBoolSetFromUniquePtr, &bool_val, std::move(unique_ptr_bool)));
- std::move(cb).Run();
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, PassedUniquePtrBoolRepeating) {
- bool bool_val = false;
- base::RepeatingCallback<void(std::unique_ptr<bool>)> cb = BindToCurrentLoop(
- base::BindRepeating(&BoundBoolSetFromUniquePtr, &bool_val));
- cb.Run(std::make_unique<bool>(true));
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-
- cb.Run(std::make_unique<bool>(false));
- EXPECT_TRUE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, PassedUniquePtrBoolOnce) {
- bool bool_val = false;
- base::OnceCallback<void(std::unique_ptr<bool>)> cb =
- BindToCurrentLoop(base::BindOnce(&BoundBoolSetFromUniquePtr, &bool_val));
- std::move(cb).Run(std::make_unique<bool>(true));
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, BoundUniquePtrArrayBoolRepeating) {
- bool bool_val = false;
- std::unique_ptr<bool[]> unique_ptr_array_bool(new bool[1]);
- unique_ptr_array_bool[0] = true;
- base::RepeatingClosure cb = BindToCurrentLoop(
- base::BindRepeating(&BoundBoolSetFromUniquePtrArray, &bool_val,
- base::Passed(&unique_ptr_array_bool)));
- cb.Run();
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, BoundUniquePtrArrayBoolOnce) {
- bool bool_val = false;
- std::unique_ptr<bool[]> unique_ptr_array_bool(new bool[1]);
- unique_ptr_array_bool[0] = true;
- base::OnceClosure cb = BindToCurrentLoop(
- base::BindOnce(&BoundBoolSetFromUniquePtrArray, &bool_val,
- std::move(unique_ptr_array_bool)));
- std::move(cb).Run();
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, PassedUniquePtrArrayBoolRepeating) {
- bool bool_val = false;
- base::RepeatingCallback<void(std::unique_ptr<bool[]>)> cb = BindToCurrentLoop(
- base::BindRepeating(&BoundBoolSetFromUniquePtrArray, &bool_val));
-
- std::unique_ptr<bool[]> unique_ptr_array_bool(new bool[1]);
- unique_ptr_array_bool[0] = true;
- cb.Run(std::move(unique_ptr_array_bool));
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-
- unique_ptr_array_bool.reset(new bool[1]);
- unique_ptr_array_bool[0] = false;
- cb.Run(std::move(unique_ptr_array_bool));
- EXPECT_TRUE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, PassedUniquePtrArrayBoolOnce) {
- bool bool_val = false;
- base::OnceCallback<void(std::unique_ptr<bool[]>)> cb = BindToCurrentLoop(
- base::BindOnce(&BoundBoolSetFromUniquePtrArray, &bool_val));
-
- std::unique_ptr<bool[]> unique_ptr_array_bool(new bool[1]);
- unique_ptr_array_bool[0] = true;
- std::move(cb).Run(std::move(unique_ptr_array_bool));
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, BoundUniquePtrFreeDeleterBoolRepeating) {
- bool bool_val = false;
- std::unique_ptr<bool, base::FreeDeleter> unique_ptr_free_deleter_bool(
- static_cast<bool*>(malloc(sizeof(bool))));
- *unique_ptr_free_deleter_bool = true;
- base::RepeatingClosure cb = BindToCurrentLoop(
- base::BindRepeating(&BoundBoolSetFromUniquePtrFreeDeleter, &bool_val,
- base::Passed(&unique_ptr_free_deleter_bool)));
- cb.Run();
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, BoundUniquePtrFreeDeleterBoolOnce) {
- bool bool_val = false;
- std::unique_ptr<bool, base::FreeDeleter> unique_ptr_free_deleter_bool(
- static_cast<bool*>(malloc(sizeof(bool))));
- *unique_ptr_free_deleter_bool = true;
- base::OnceClosure cb = BindToCurrentLoop(
- base::BindOnce(&BoundBoolSetFromUniquePtrFreeDeleter, &bool_val,
- std::move(unique_ptr_free_deleter_bool)));
- std::move(cb).Run();
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, PassedUniquePtrFreeDeleterBoolRepeating) {
- bool bool_val = false;
- base::RepeatingCallback<void(std::unique_ptr<bool, base::FreeDeleter>)> cb =
- BindToCurrentLoop(base::BindRepeating(
- &BoundBoolSetFromUniquePtrFreeDeleter, &bool_val));
-
- std::unique_ptr<bool, base::FreeDeleter> unique_ptr_free_deleter_bool(
- static_cast<bool*>(malloc(sizeof(bool))));
- *unique_ptr_free_deleter_bool = true;
- cb.Run(std::move(unique_ptr_free_deleter_bool));
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-
- unique_ptr_free_deleter_bool.reset(static_cast<bool*>(malloc(sizeof(bool))));
- *unique_ptr_free_deleter_bool = false;
- cb.Run(std::move(unique_ptr_free_deleter_bool));
- EXPECT_TRUE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_FALSE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, PassedUniquePtrFreeDeleterBoolOnce) {
- bool bool_val = false;
- base::OnceCallback<void(std::unique_ptr<bool, base::FreeDeleter>)> cb =
- BindToCurrentLoop(
- base::BindOnce(&BoundBoolSetFromUniquePtrFreeDeleter, &bool_val));
-
- std::unique_ptr<bool, base::FreeDeleter> unique_ptr_free_deleter_bool(
- static_cast<bool*>(malloc(sizeof(bool))));
- *unique_ptr_free_deleter_bool = true;
- std::move(cb).Run(std::move(unique_ptr_free_deleter_bool));
- EXPECT_FALSE(bool_val);
- base::RunLoop().RunUntilIdle();
- EXPECT_TRUE(bool_val);
-}
-
-TEST_F(BindToCurrentLoopTest, IntegersRepeating) {
- int a = 0;
- int b = 0;
- base::RepeatingCallback<void(int, int)> cb =
- BindToCurrentLoop(base::BindRepeating(&BoundIntegersSet, &a, &b));
- cb.Run(1, -1);
- EXPECT_EQ(a, 0);
- EXPECT_EQ(b, 0);
- base::RunLoop().RunUntilIdle();
- EXPECT_EQ(a, 1);
- EXPECT_EQ(b, -1);
-
- cb.Run(2, -2);
- EXPECT_EQ(a, 1);
- EXPECT_EQ(b, -1);
- base::RunLoop().RunUntilIdle();
- EXPECT_EQ(a, 2);
- EXPECT_EQ(b, -2);
-}
-
-TEST_F(BindToCurrentLoopTest, IntegersOnce) {
- int a = 0;
- int b = 0;
- base::OnceCallback<void(int, int)> cb =
- BindToCurrentLoop(base::BindOnce(&BoundIntegersSet, &a, &b));
- std::move(cb).Run(1, -1);
- EXPECT_EQ(a, 0);
- EXPECT_EQ(b, 0);
- base::RunLoop().RunUntilIdle();
- EXPECT_EQ(a, 1);
- EXPECT_EQ(b, -1);
-}
-
-TEST_F(BindToCurrentLoopTest, DestroyedOnBoundLoopRepeating) {
- base::Thread target_thread("testing");
- ASSERT_TRUE(target_thread.Start());
-
- // Ensure that the bound object is also destroyed on the correct thread even
- // if the last reference to the callback is dropped on the other thread.
- base::RepeatingClosure cb = BindToCurrentLoop(
- base::BindRepeating(&ThreadRestrictionChecker::Run,
- std::make_unique<ThreadRestrictionChecker>()));
- target_thread.task_runner()->PostTask(FROM_HERE, std::move(cb));
- ASSERT_FALSE(cb);
- target_thread.FlushForTesting();
- base::RunLoop().RunUntilIdle();
-
- // Ensure that the bound object is destroyed on the target thread even if
- // the callback is destroyed without invocation.
- cb = BindToCurrentLoop(
- base::BindRepeating(&ThreadRestrictionChecker::Run,
- std::make_unique<ThreadRestrictionChecker>()));
- target_thread.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&ClearReference, std::move(cb)));
- target_thread.FlushForTesting();
- ASSERT_FALSE(cb);
- base::RunLoop().RunUntilIdle();
-
- target_thread.Stop();
-}
-
-TEST_F(BindToCurrentLoopTest, DestroyedOnBoundLoopOnce) {
- base::Thread target_thread("testing");
- ASSERT_TRUE(target_thread.Start());
-
- // Ensure that the bound object is also destroyed on the correct thread even
- // if the last reference to the callback is dropped on the other thread.
- base::OnceClosure cb = BindToCurrentLoop(
- base::BindOnce(&ThreadRestrictionChecker::Run,
- std::make_unique<ThreadRestrictionChecker>()));
- target_thread.task_runner()->PostTask(FROM_HERE, std::move(cb));
- ASSERT_FALSE(cb);
- target_thread.FlushForTesting();
- base::RunLoop().RunUntilIdle();
-
- // Ensure that the bound object is destroyed on the target thread even if
- // the callback is destroyed without invocation.
- cb = BindToCurrentLoop(
- base::BindOnce(&ThreadRestrictionChecker::Run,
- std::make_unique<ThreadRestrictionChecker>()));
- target_thread.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&ClearReference, std::move(cb)));
- target_thread.FlushForTesting();
- ASSERT_FALSE(cb);
- base::RunLoop().RunUntilIdle();
-
- target_thread.Stop();
-}
-
-} // namespace media
diff --git a/chromium/media/base/cdm_context.cc b/chromium/media/base/cdm_context.cc
index 8f8cd843f64..9b30303dd4b 100644
--- a/chromium/media/base/cdm_context.cc
+++ b/chromium/media/base/cdm_context.cc
@@ -30,11 +30,11 @@ std::string CdmContext::CdmIdToString(const base::UnguessableToken* cdm_id) {
return cdm_id ? cdm_id->ToString() : "null";
}
+#if defined(OS_WIN)
bool CdmContext::RequiresMediaFoundationRenderer() {
return false;
}
-#if defined(OS_WIN)
bool CdmContext::GetMediaFoundationCdmProxy(
GetMediaFoundationCdmProxyCB get_mf_cdm_proxy_cb) {
return false;
@@ -53,7 +53,7 @@ FuchsiaCdmContext* CdmContext::GetFuchsiaCdmContext() {
}
#endif
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
chromeos::ChromeOsCdmContext* CdmContext::GetChromeOsCdmContext() {
return nullptr;
}
diff --git a/chromium/media/base/cdm_context.h b/chromium/media/base/cdm_context.h
index d555e21e360..aacea9ec764 100644
--- a/chromium/media/base/cdm_context.h
+++ b/chromium/media/base/cdm_context.h
@@ -19,7 +19,7 @@
struct IMFCdmProxy;
#endif
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
namespace chromeos {
class ChromeOsCdmContext;
}
@@ -86,10 +86,6 @@ class MEDIA_EXPORT CdmContext {
// occurs implicitly along with decoding).
virtual Decryptor* GetDecryptor();
- // Returns whether the CDM requires Media Foundation-based media Renderer.
- // Should only return true on Windows.
- virtual bool RequiresMediaFoundationRenderer();
-
// Returns an ID that can be used to find a remote CDM, in which case this CDM
// serves as a proxy to the remote one. Returns base::nullopt when remote CDM
// is not supported (e.g. this CDM is a local CDM).
@@ -98,6 +94,11 @@ class MEDIA_EXPORT CdmContext {
static std::string CdmIdToString(const base::UnguessableToken* cdm_id);
#if defined(OS_WIN)
+ // Returns whether the CDM requires Media Foundation-based media Renderer.
+ // This is separate from GetMediaFoundationCdmProxy() since it needs to be
+ // a sync call called in the render process to setup the media pipeline.
+ virtual bool RequiresMediaFoundationRenderer();
+
using GetMediaFoundationCdmProxyCB =
base::OnceCallback<void(Microsoft::WRL::ComPtr<IMFCdmProxy>)>;
// This allows a CdmContext to expose an IMFTrustedInput instance for use in
@@ -122,7 +123,7 @@ class MEDIA_EXPORT CdmContext {
virtual FuchsiaCdmContext* GetFuchsiaCdmContext();
#endif
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
// Returns a ChromeOsCdmContext interface when the context is backed by the
// ChromeOS CdmFactoryDaemon. Otherwise return nullptr.
virtual chromeos::ChromeOsCdmContext* GetChromeOsCdmContext();
diff --git a/chromium/media/base/cdm_promise_adapter.cc b/chromium/media/base/cdm_promise_adapter.cc
index af1b604963d..da0ae36e712 100644
--- a/chromium/media/base/cdm_promise_adapter.cc
+++ b/chromium/media/base/cdm_promise_adapter.cc
@@ -8,13 +8,26 @@
namespace media {
+namespace {
+
+CdmPromise::SystemCode ToSystemCode(CdmPromiseAdapter::ClearReason reason) {
+ switch (reason) {
+ case CdmPromiseAdapter::ClearReason::kDestruction:
+ return CdmPromise::SystemCode::kAborted;
+ case CdmPromiseAdapter::ClearReason::kConnectionError:
+ return CdmPromise::SystemCode::kConnectionError;
+ }
+}
+
+} // namespace
+
CdmPromiseAdapter::CdmPromiseAdapter()
: next_promise_id_(kInvalidPromiseId + 1) {}
CdmPromiseAdapter::~CdmPromiseAdapter() {
DCHECK(thread_checker_.CalledOnValidThread());
DLOG_IF(WARNING, !promises_.empty()) << "There are unfulfilled promises";
- Clear();
+ Clear(ClearReason::kDestruction);
}
uint32_t CdmPromiseAdapter::SavePromise(std::unique_ptr<CdmPromise> promise) {
@@ -62,13 +75,12 @@ void CdmPromiseAdapter::RejectPromise(uint32_t promise_id,
promise->reject(exception_code, system_code, error_message);
}
-void CdmPromiseAdapter::Clear() {
+void CdmPromiseAdapter::Clear(ClearReason reason) {
// Reject all outstanding promises.
DCHECK(thread_checker_.CalledOnValidThread());
for (auto& promise : promises_) {
promise.second->reject(CdmPromise::Exception::INVALID_STATE_ERROR,
- CdmPromise::SystemCode::kAborted,
- "Operation aborted.");
+ ToSystemCode(reason), "Operation aborted.");
}
promises_.clear();
}
diff --git a/chromium/media/base/cdm_promise_adapter.h b/chromium/media/base/cdm_promise_adapter.h
index d6a088617eb..f678df4b340 100644
--- a/chromium/media/base/cdm_promise_adapter.h
+++ b/chromium/media/base/cdm_promise_adapter.h
@@ -42,8 +42,13 @@ class MEDIA_EXPORT CdmPromiseAdapter {
uint32_t system_code,
const std::string& error_message);
+ enum class ClearReason {
+ kDestruction,
+ kConnectionError,
+ };
+
// Rejects and clears all |promises_|.
- void Clear();
+ void Clear(ClearReason reason);
private:
// A map between promise IDs and CdmPromises.
diff --git a/chromium/media/base/decoder.cc b/chromium/media/base/decoder.cc
index 50c2b8d52fa..8e06f1e2514 100644
--- a/chromium/media/base/decoder.cc
+++ b/chromium/media/base/decoder.cc
@@ -18,4 +18,56 @@ bool Decoder::SupportsDecryption() const {
return false;
}
+std::string GetDecoderName(VideoDecoderType type) {
+ switch (type) {
+ case VideoDecoderType::kUnknown:
+ return "Unknown Video Decoder";
+ case VideoDecoderType::kFFmpeg:
+ return "FFmpegVideoDecoder";
+ case VideoDecoderType::kVpx:
+ return "VpxVideoDecoder";
+ case VideoDecoderType::kAom:
+ return "AomVideoDecoder";
+ case VideoDecoderType::kMojo:
+ return "MojoVideoDecoder";
+ case VideoDecoderType::kDecrypting:
+ return "DecryptingVideoDecoder";
+ case VideoDecoderType::kDav1d:
+ return "Dav1dVideoDecoder";
+ case VideoDecoderType::kFuchsia:
+ return "FuchsiaVideoDecoder";
+ case VideoDecoderType::kMediaCodec:
+ return "MediaCodecVideoDecoder";
+ case VideoDecoderType::kGav1:
+ return "Gav1VideoDecoder";
+ case VideoDecoderType::kD3D11:
+ return "D3D11VideoDecoder";
+ case VideoDecoderType::kVaapi:
+ return "VaapiVideoDecodeAccelerator";
+ case VideoDecoderType::kBroker:
+ return "VideoDecoderBroker";
+ case VideoDecoderType::kChromeOs:
+ return "VideoDecoderPipeline (ChromeOs)";
+ case VideoDecoderType::kVda:
+ return "VideoDecodeAccelerator";
+ }
+}
+
+std::string GetDecoderName(AudioDecoderType type) {
+ switch (type) {
+ case AudioDecoderType::kUnknown:
+ return "Unknown Audio Decoder";
+ case AudioDecoderType::kFFmpeg:
+ return "FFmpegAudioDecoder";
+ case AudioDecoderType::kMojo:
+ return "MojoAudioDecoder";
+ case AudioDecoderType::kDecrypting:
+ return "DecryptingAudioDecoder";
+ case AudioDecoderType::kMediaCodec:
+ return "MediaCodecAudioDecoder";
+ case AudioDecoderType::kBroker:
+ return "AudioDecoderBroker";
+ }
+}
+
} // namespace media
diff --git a/chromium/media/base/decoder.h b/chromium/media/base/decoder.h
index 8ebec529466..fb43f9ca369 100644
--- a/chromium/media/base/decoder.h
+++ b/chromium/media/base/decoder.h
@@ -14,6 +14,47 @@
namespace media {
+// List of known AudioDecoder implementations; recorded to UKM, always add new
+// values to the end and do not reorder or delete values from this list.
+enum class AudioDecoderType : int {
+ kUnknown = 0, // Decoder name string is not recognized or n/a.
+ kFFmpeg = 1, // FFmpegAudioDecoder
+ kMojo = 2, // MojoAudioDecoder
+ kDecrypting = 3, // DecryptingAudioDecoder
+ kMediaCodec = 4, // MediaCodecAudioDecoder (Android)
+ kBroker = 5, // AudioDecoderBroker
+
+ kMaxValue = kBroker // Keep this at the end and equal to the last entry.
+};
+
+// List of known VideoDecoder implementations; recorded to UKM, always add new
+// values to the end and do not reorder or delete values from this list.
+enum class VideoDecoderType : int {
+ kUnknown = 0, // Decoder name string is not recognized or n/a.
+ // kGpu = 1, // GpuVideoDecoder (DEPRECATED)
+ kFFmpeg = 2, // FFmpegVideoDecoder
+ kVpx = 3, // VpxVideoDecoder
+ kAom = 4, // AomVideoDecoder
+ kMojo = 5, // MojoVideoDecoder
+ kDecrypting = 6, // DecryptingVideoDecoder
+ kDav1d = 7, // Dav1dVideoDecoder
+ kFuchsia = 8, // FuchsiaVideoDecoder
+ kMediaCodec = 9, // MediaCodecVideoDecoder (Android)
+ kGav1 = 10, // Gav1VideoDecoder
+ kD3D11 = 11, // D3D11VideoDecoder
+ kVaapi = 12, // VaapiVideoDecodeAccelerator
+ kBroker = 13, // VideoDecoderBroker (Webcodecs)
+ kVda = 14, // VDAVideoDecoder
+
+ // Chromeos uses VideoDecoderPipeline. This could potentially become more
+ // granulated in the future.
+ kChromeOs = 15,
+ kMaxValue = kChromeOs // Keep this at the end and equal to the last entry.
+};
+
+MEDIA_EXPORT std::string GetDecoderName(AudioDecoderType type);
+MEDIA_EXPORT std::string GetDecoderName(VideoDecoderType type);
+
class MEDIA_EXPORT Decoder {
public:
virtual ~Decoder();
diff --git a/chromium/media/base/decoder_factory.cc b/chromium/media/base/decoder_factory.cc
index 52028b6901c..79209054199 100644
--- a/chromium/media/base/decoder_factory.cc
+++ b/chromium/media/base/decoder_factory.cc
@@ -17,6 +17,11 @@ void DecoderFactory::CreateAudioDecoders(
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) {}
+SupportedVideoDecoderConfigs
+DecoderFactory::GetSupportedVideoDecoderConfigsForWebRTC() {
+ return {};
+}
+
void DecoderFactory::CreateVideoDecoders(
scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
diff --git a/chromium/media/base/decoder_factory.h b/chromium/media/base/decoder_factory.h
index 2af1af58f9d..fee2f69ff28 100644
--- a/chromium/media/base/decoder_factory.h
+++ b/chromium/media/base/decoder_factory.h
@@ -12,6 +12,7 @@
#include "base/memory/ref_counted.h"
#include "media/base/media_export.h"
#include "media/base/overlay_info.h"
+#include "media/base/supported_video_decoder_config.h"
namespace base {
class SequencedTaskRunner;
@@ -41,6 +42,13 @@ class MEDIA_EXPORT DecoderFactory {
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders);
+ // Returns the union of all decoder configs supported by the decoders created
+ // when CreateVideoDecoders is called.
+ // TODO(crbug.com/1173503): Rename to GetSupportedVideoDecoderConfigs after
+ // being properly implemented for all factories.
+ virtual SupportedVideoDecoderConfigs
+ GetSupportedVideoDecoderConfigsForWebRTC();
+
// Creates video decoders and append them to the end of |video_decoders|.
// Decoders are single-threaded, each decoder should run on |task_runner|.
virtual void CreateVideoDecoders(
diff --git a/chromium/media/base/decryptor.h b/chromium/media/base/decryptor.h
index dbc91c79805..d88aac96da2 100644
--- a/chromium/media/base/decryptor.h
+++ b/chromium/media/base/decryptor.h
@@ -109,10 +109,9 @@ class MEDIA_EXPORT Decryptor {
// - Set to kError if unexpected error has occurred. In this case the
// returned frame(s) must be NULL/empty.
// Second parameter: The decoded video frame or audio buffers.
- typedef base::RepeatingCallback<void(Status, const AudioFrames&)>
- AudioDecodeCB;
- typedef base::RepeatingCallback<void(Status, scoped_refptr<VideoFrame>)>
- VideoDecodeCB;
+ using AudioDecodeCB = base::OnceCallback<void(Status, const AudioFrames&)>;
+ using VideoDecodeCB =
+ base::OnceCallback<void(Status, scoped_refptr<VideoFrame>)>;
// Decrypts and decodes the |encrypted| buffer. The status and the decrypted
// buffer are returned via the provided callback.
@@ -125,9 +124,9 @@ class MEDIA_EXPORT Decryptor {
// AudioDecodeCB has completed. Thus, only one AudioDecodeCB may be pending at
// any time. Same for DecryptAndDecodeVideo();
virtual void DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) = 0;
+ AudioDecodeCB audio_decode_cb) = 0;
virtual void DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) = 0;
+ VideoDecodeCB video_decode_cb) = 0;
// Resets the decoder to an initialized clean state, cancels any scheduled
// decrypt-and-decode operations, and fires any pending
diff --git a/chromium/media/base/eme_constants.h b/chromium/media/base/eme_constants.h
index 2664065f051..11e7c1ed3ff 100644
--- a/chromium/media/base/eme_constants.h
+++ b/chromium/media/base/eme_constants.h
@@ -31,7 +31,7 @@ enum EmeCodec : uint32_t {
EME_CODEC_AAC = 1 << 4,
EME_CODEC_AVC1 = 1 << 5,
EME_CODEC_VP9_PROFILE2 = 1 << 6, // VP9 profiles 2
- EME_CODEC_HEVC = 1 << 7,
+ EME_CODEC_HEVC_PROFILE_MAIN = 1 << 7,
EME_CODEC_DOLBY_VISION_AVC = 1 << 8,
EME_CODEC_DOLBY_VISION_HEVC = 1 << 9,
EME_CODEC_AC3 = 1 << 10,
@@ -39,6 +39,7 @@ enum EmeCodec : uint32_t {
EME_CODEC_MPEG_H_AUDIO = 1 << 12,
EME_CODEC_FLAC = 1 << 13,
EME_CODEC_AV1 = 1 << 14,
+ EME_CODEC_HEVC_PROFILE_MAIN10 = 1 << 15,
};
// *_ALL values should only be used for masking, do not use them to specify
@@ -70,7 +71,8 @@ constexpr SupportedCodecs GetMp4VideoCodecs() {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
codecs |= EME_CODEC_AVC1;
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- codecs |= EME_CODEC_HEVC;
+ codecs |= EME_CODEC_HEVC_PROFILE_MAIN;
+ codecs |= EME_CODEC_HEVC_PROFILE_MAIN10;
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
codecs |= EME_CODEC_DOLBY_VISION_AVC;
@@ -186,14 +188,18 @@ enum class EmeConfigRule {
// The configuration option prevents use of hardware-secure codecs.
// This rule only has meaning on platforms that distinguish hardware-secure
- // codecs (i.e. Android and Windows).
+ // codecs (i.e. Android, Windows and ChromeOS).
HW_SECURE_CODECS_NOT_ALLOWED,
// The configuration option is supported if hardware-secure codecs are used.
// This rule only has meaning on platforms that distinguish hardware-secure
- // codecs (i.e. Android and Windows).
+ // codecs (i.e. Android, Windows and ChromeOS).
HW_SECURE_CODECS_REQUIRED,
+ // The configuration option is supported on platforms where hardware-secure
+ // codecs are used and an identifier is also required (i.e. ChromeOS).
+ IDENTIFIER_AND_HW_SECURE_CODECS_REQUIRED,
+
// The configuration option is supported without conditions.
SUPPORTED,
};
diff --git a/chromium/media/base/fake_audio_worker.cc b/chromium/media/base/fake_audio_worker.cc
index 2d0bbefd537..8d57a0908f1 100644
--- a/chromium/media/base/fake_audio_worker.cc
+++ b/chromium/media/base/fake_audio_worker.cc
@@ -57,7 +57,7 @@ class FakeAudioWorker::Worker
int64_t frames_elapsed_;
// Used to cancel any delayed tasks still inside the worker loop's queue.
- base::CancelableClosure worker_task_cb_;
+ base::CancelableRepeatingClosure worker_task_cb_;
THREAD_CHECKER(thread_checker_);
diff --git a/chromium/media/base/fallback_video_decoder.cc b/chromium/media/base/fallback_video_decoder.cc
deleted file mode 100644
index a10ac08bec8..00000000000
--- a/chromium/media/base/fallback_video_decoder.cc
+++ /dev/null
@@ -1,107 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <utility>
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "base/threading/sequenced_task_runner_handle.h"
-#include "media/base/decoder_buffer.h"
-#include "media/base/fallback_video_decoder.h"
-#include "media/base/video_decoder_config.h"
-
-namespace media {
-
-FallbackVideoDecoder::FallbackVideoDecoder(
- std::unique_ptr<VideoDecoder> preferred,
- std::unique_ptr<VideoDecoder> fallback)
- : preferred_decoder_(std::move(preferred)),
- fallback_decoder_(std::move(fallback)) {}
-
-void FallbackVideoDecoder::Initialize(const VideoDecoderConfig& config,
- bool low_delay,
- CdmContext* cdm_context,
- InitCB init_cb,
- const OutputCB& output_cb,
- const WaitingCB& waiting_cb) {
- // If we've already fallen back, just reinitialize the selected decoder.
- if (selected_decoder_ && did_fallback_) {
- selected_decoder_->Initialize(config, low_delay, cdm_context,
- std::move(init_cb), output_cb, waiting_cb);
- return;
- }
-
- InitCB fallback_initialize_cb =
- base::BindOnce(&FallbackVideoDecoder::FallbackInitialize,
- weak_factory_.GetWeakPtr(), config, low_delay, cdm_context,
- std::move(init_cb), output_cb, waiting_cb);
-
- preferred_decoder_->Initialize(config, low_delay, cdm_context,
- std::move(fallback_initialize_cb), output_cb,
- waiting_cb);
-}
-
-void FallbackVideoDecoder::FallbackInitialize(const VideoDecoderConfig& config,
- bool low_delay,
- CdmContext* cdm_context,
- InitCB init_cb,
- const OutputCB& output_cb,
- const WaitingCB& waiting_cb,
- Status status) {
- // The preferred decoder was successfully initialized.
- if (status.is_ok()) {
- selected_decoder_ = preferred_decoder_.get();
- std::move(init_cb).Run(OkStatus());
- return;
- }
-
- did_fallback_ = true;
- // Post destruction of |preferred_decoder_| so that we don't destroy the
- // object during the callback. DeleteSoon doesn't handle custom deleters, so
- // we post a do-nothing task instead.
- base::SequencedTaskRunnerHandle::Get()->PostTask(
- FROM_HERE,
- base::BindOnce(base::DoNothing::Once<std::unique_ptr<VideoDecoder>>(),
- std::move(preferred_decoder_)));
- selected_decoder_ = fallback_decoder_.get();
- fallback_decoder_->Initialize(config, low_delay, cdm_context,
- std::move(init_cb), output_cb, waiting_cb);
-}
-
-void FallbackVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
- DecodeCB decode_cb) {
- DCHECK(selected_decoder_);
- selected_decoder_->Decode(std::move(buffer), std::move(decode_cb));
-}
-
-void FallbackVideoDecoder::Reset(base::OnceClosure reset_cb) {
- DCHECK(selected_decoder_);
- selected_decoder_->Reset(std::move(reset_cb));
-}
-
-bool FallbackVideoDecoder::NeedsBitstreamConversion() const {
- DCHECK(selected_decoder_);
- return selected_decoder_->NeedsBitstreamConversion();
-}
-
-bool FallbackVideoDecoder::CanReadWithoutStalling() const {
- DCHECK(selected_decoder_);
- return selected_decoder_->CanReadWithoutStalling();
-}
-
-int FallbackVideoDecoder::GetMaxDecodeRequests() const {
- DCHECK(selected_decoder_);
- return selected_decoder_->GetMaxDecodeRequests();
-}
-
-std::string FallbackVideoDecoder::GetDisplayName() const {
- // MojoVideoDecoder always identifies itself as such, and never asks for the
- // name of the underlying decoder.
- NOTREACHED();
- return "FallbackVideoDecoder";
-}
-
-FallbackVideoDecoder::~FallbackVideoDecoder() = default;
-
-} // namespace media
diff --git a/chromium/media/base/fallback_video_decoder.h b/chromium/media/base/fallback_video_decoder.h
deleted file mode 100644
index 98c06c82949..00000000000
--- a/chromium/media/base/fallback_video_decoder.h
+++ /dev/null
@@ -1,60 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_BASE_FALLBACK_VIDEO_DECODER_H_
-#define MEDIA_BASE_FALLBACK_VIDEO_DECODER_H_
-
-#include <memory>
-#include <string>
-
-#include "base/memory/weak_ptr.h"
-#include "media/base/video_decoder.h"
-
-namespace media {
-
-// A Wrapper VideoDecoder which supports a fallback and a preferred decoder.
-class MEDIA_EXPORT FallbackVideoDecoder : public VideoDecoder {
- public:
- FallbackVideoDecoder(std::unique_ptr<VideoDecoder> preferred,
- std::unique_ptr<VideoDecoder> fallback);
-
- // media::VideoDecoder implementation.
- std::string GetDisplayName() const override;
- void Initialize(const VideoDecoderConfig& config,
- bool low_delay,
- CdmContext* cdm_context,
- InitCB init_cb,
- const OutputCB& output_cb,
- const WaitingCB& waiting_cb) override;
- void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
- void Reset(base::OnceClosure reset_cb) override;
- bool NeedsBitstreamConversion() const override;
- bool CanReadWithoutStalling() const override;
- int GetMaxDecodeRequests() const override;
-
- protected:
- ~FallbackVideoDecoder() override;
-
- private:
- void FallbackInitialize(const VideoDecoderConfig& config,
- bool low_delay,
- CdmContext* cdm_context,
- InitCB init_cb,
- const OutputCB& output_cb,
- const WaitingCB& waiting_cb,
- Status status);
-
- std::unique_ptr<media::VideoDecoder> preferred_decoder_;
- std::unique_ptr<media::VideoDecoder> fallback_decoder_;
- media::VideoDecoder* selected_decoder_ = nullptr;
- bool did_fallback_ = false;
-
- base::WeakPtrFactory<FallbackVideoDecoder> weak_factory_{this};
-
- DISALLOW_COPY_AND_ASSIGN(FallbackVideoDecoder);
-};
-
-} // namespace media
-
-#endif // MEDIA_BASE_FALLBACK_VIDEO_DECODER_H_
diff --git a/chromium/media/base/fallback_video_decoder_unittest.cc b/chromium/media/base/fallback_video_decoder_unittest.cc
deleted file mode 100644
index e996eb5b50c..00000000000
--- a/chromium/media/base/fallback_video_decoder_unittest.cc
+++ /dev/null
@@ -1,166 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <tuple>
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "base/run_loop.h"
-#include "base/test/gmock_callback_support.h"
-#include "base/test/task_environment.h"
-#include "media/base/decoder_buffer.h"
-#include "media/base/fallback_video_decoder.h"
-#include "media/base/mock_filters.h"
-#include "media/base/test_helpers.h"
-#include "media/base/video_decoder.h"
-#include "media/base/video_decoder_config.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest-param-test.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-using ::base::test::RunOnceCallback;
-using ::testing::_;
-using ::testing::StrictMock;
-
-namespace media {
-
-class FallbackVideoDecoderUnittest : public ::testing::TestWithParam<bool> {
- public:
- FallbackVideoDecoderUnittest()
- : backup_decoder_(nullptr),
- preferred_decoder_(nullptr),
- fallback_decoder_(nullptr) {}
-
- ~FallbackVideoDecoderUnittest() override { Destroy(); }
-
- std::unique_ptr<VideoDecoder> MakeMockDecoderWithExpectations(
- bool is_fallback,
- bool preferred_should_succeed) {
- std::string n = is_fallback ? "Fallback" : "Preferred";
- StrictMock<MockVideoDecoder>* result = new StrictMock<MockVideoDecoder>(n);
-
- if (is_fallback && !preferred_should_succeed) {
- EXPECT_CALL(*result, Initialize_(_, _, _, _, _, _))
- .WillOnce(RunOnceCallback<3>(OkStatus()));
- }
-
- if (!is_fallback) {
- preferred_decoder_ = result;
- EXPECT_CALL(*result, Initialize_(_, _, _, _, _, _))
- .WillOnce(RunOnceCallback<3>(preferred_should_succeed
- ? OkStatus()
- : StatusCode::kCodeOnlyForTesting));
- } else {
- backup_decoder_ = result;
- }
-
- return std::unique_ptr<VideoDecoder>(result);
- }
-
- void Initialize(bool preferred_should_succeed) {
- fallback_decoder_ = new FallbackVideoDecoder(
- MakeMockDecoderWithExpectations(false, preferred_should_succeed),
- MakeMockDecoderWithExpectations(true, preferred_should_succeed));
-
- fallback_decoder_->Initialize(
- video_decoder_config_, false, nullptr,
- base::BindOnce([](Status status) { EXPECT_TRUE(status.is_ok()); }),
- base::DoNothing(), base::DoNothing());
- }
-
- protected:
- void Destroy() { std::default_delete<VideoDecoder>()(fallback_decoder_); }
-
- bool PreferredShouldSucceed() { return GetParam(); }
-
- base::test::TaskEnvironment task_environment_;
-
- StrictMock<MockVideoDecoder>* backup_decoder_;
- StrictMock<MockVideoDecoder>* preferred_decoder_;
- VideoDecoder* fallback_decoder_;
- VideoDecoderConfig video_decoder_config_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(FallbackVideoDecoderUnittest);
-};
-
-INSTANTIATE_TEST_SUITE_P(DoesPreferredInitFail,
- FallbackVideoDecoderUnittest,
- testing::ValuesIn({true, false}));
-
-#define EXPECT_ON_CORRECT_DECODER(method) \
- if (PreferredShouldSucceed()) \
- EXPECT_CALL(*preferred_decoder_, method); \
- else \
- EXPECT_CALL(*backup_decoder_, method) // Intentionally leave off semicolon.
-
-// Do not test the name lookup; it is NOTREACHED.
-TEST_P(FallbackVideoDecoderUnittest, MethodsRedirectedAsExpected) {
- Initialize(PreferredShouldSucceed());
-
- EXPECT_ON_CORRECT_DECODER(Decode_(_, _));
- fallback_decoder_->Decode(nullptr, base::DoNothing());
-
- EXPECT_ON_CORRECT_DECODER(Reset_(_));
- fallback_decoder_->Reset(base::DoNothing());
-
- EXPECT_ON_CORRECT_DECODER(NeedsBitstreamConversion());
- fallback_decoder_->NeedsBitstreamConversion();
-
- EXPECT_ON_CORRECT_DECODER(CanReadWithoutStalling());
- fallback_decoder_->CanReadWithoutStalling();
-
- EXPECT_ON_CORRECT_DECODER(GetMaxDecodeRequests());
- fallback_decoder_->GetMaxDecodeRequests();
-}
-
-// │ first initialization │ second initialization │
-// preferred │ preferred │ backup │ preferred │ backup │
-// will succeed │ init called │ init called │ init called │ init called │
-//───────────────┼─────────────┼─────────────┼─────────────┼─────────────┤
-// false │ ✓ │ ✓ │ x │ ✓ │
-// true │ ✓ │ x │ ✓ │ ✓ │
-TEST_P(FallbackVideoDecoderUnittest, ReinitializeWithPreferredFailing) {
- Initialize(PreferredShouldSucceed());
-
- // If we succeedd the first time, it should still be alive.
- if (PreferredShouldSucceed()) { // fail initialization
- EXPECT_CALL(*preferred_decoder_, Initialize_(_, _, _, _, _, _))
- .WillOnce(RunOnceCallback<3>(StatusCode::kCodeOnlyForTesting));
- }
- EXPECT_CALL(*backup_decoder_, Initialize_(_, _, _, _, _, _))
- .WillOnce(RunOnceCallback<3>(OkStatus()));
-
- fallback_decoder_->Initialize(
- video_decoder_config_, false, nullptr,
- base::BindOnce([](Status status) { EXPECT_TRUE(status.is_ok()); }),
- base::DoNothing(), base::DoNothing());
-}
-
-// │ first initialization │ second initialization │
-// preferred │ preferred │ backup │ preferred │ backup │
-// will succeed │ init called │ init called │ init called │ init called │
-//───────────────┼─────────────┼─────────────┼─────────────┼─────────────┤
-// false │ ✓ │ ✓ │ x │ ✓ │
-// true │ ✓ │ x │ ✓ │ x │
-TEST_P(FallbackVideoDecoderUnittest, ReinitializeWithPreferredSuccessful) {
- Initialize(PreferredShouldSucceed());
-
- // If we succeedd the first time, it should still be alive.
- if (PreferredShouldSucceed()) {
- EXPECT_CALL(*preferred_decoder_, Initialize_(_, _, _, _, _, _))
- .WillOnce(RunOnceCallback<3>(OkStatus())); // pass initialization
- } else {
- // Otherwise, preferred was deleted, and we only backup still exists.
- EXPECT_CALL(*backup_decoder_, Initialize_(_, _, _, _, _, _))
- .WillOnce(RunOnceCallback<3>(OkStatus()));
- }
-
- fallback_decoder_->Initialize(
- video_decoder_config_, false, nullptr,
- base::BindOnce([](Status status) { EXPECT_TRUE(status.is_ok()); }),
- base::DoNothing(), base::DoNothing());
-}
-
-} // namespace media
diff --git a/chromium/media/base/ipc/DEPS b/chromium/media/base/ipc/DEPS
index 5d8ba2b3c2e..27ed9d90740 100644
--- a/chromium/media/base/ipc/DEPS
+++ b/chromium/media/base/ipc/DEPS
@@ -2,3 +2,9 @@ include_rules = [
"+ipc",
"-media/base/media_export.h",
]
+
+specific_include_rules = {
+ "media_param_traits_macros.h": [
+ "+third_party/blink/public/platform/web_fullscreen_video_status.h",
+ ]
+}
diff --git a/chromium/media/base/ipc/media_param_traits_macros.h b/chromium/media/base/ipc/media_param_traits_macros.h
index ecebc25c85b..6bedf46d006 100644
--- a/chromium/media/base/ipc/media_param_traits_macros.h
+++ b/chromium/media/base/ipc/media_param_traits_macros.h
@@ -17,11 +17,13 @@
#include "media/base/container_names.h"
#include "media/base/content_decryption_module.h"
#include "media/base/decode_status.h"
+#include "media/base/decoder.h"
#include "media/base/decrypt_config.h"
#include "media/base/decryptor.h"
#include "media/base/demuxer_stream.h"
#include "media/base/eme_constants.h"
#include "media/base/encryption_scheme.h"
+#include "media/base/media_content_type.h"
#include "media/base/media_log_record.h"
#include "media/base/media_status.h"
#include "media/base/output_device_info.h"
@@ -30,6 +32,7 @@
#include "media/base/sample_format.h"
#include "media/base/status_codes.h"
#include "media/base/subsample_entry.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_codecs.h"
#include "media/base/video_color_space.h"
#include "media/base/video_transformation.h"
@@ -37,7 +40,7 @@
#include "media/base/waiting.h"
#include "media/base/watch_time_keys.h"
#include "media/media_buildflags.h"
-#include "media/video/supported_video_decoder_config.h"
+#include "third_party/blink/public/platform/web_fullscreen_video_status.h"
#include "ui/gfx/hdr_metadata.h"
#include "ui/gfx/ipc/color/gfx_param_traits_macros.h"
@@ -47,6 +50,9 @@
// Enum traits.
+IPC_ENUM_TRAITS_MAX_VALUE(blink::WebFullscreenVideoStatus,
+ blink::WebFullscreenVideoStatus::kMaxValue)
+
IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodec, media::AudioCodec::kAudioCodecMax)
IPC_ENUM_TRAITS_MAX_VALUE(media::AudioCodecProfile,
media::AudioCodecProfile::kMaxValue)
@@ -95,6 +101,8 @@ IPC_ENUM_TRAITS_MAX_VALUE(media::EncryptionScheme,
IPC_ENUM_TRAITS_MAX_VALUE(media::HdcpVersion,
media::HdcpVersion::kHdcpVersionMax)
+IPC_ENUM_TRAITS_MAX_VALUE(media::MediaContentType, media::MediaContentType::Max)
+
IPC_ENUM_TRAITS_MAX_VALUE(media::MediaLogRecord::Type,
media::MediaLogRecord::Type::kMaxValue)
@@ -123,6 +131,12 @@ IPC_ENUM_TRAITS_MIN_MAX_VALUE(media::VideoCodecProfile,
IPC_ENUM_TRAITS_MAX_VALUE(media::VideoDecoderImplementation,
media::VideoDecoderImplementation::kMaxValue)
+IPC_ENUM_TRAITS_MAX_VALUE(media::VideoDecoderType,
+ media::VideoDecoderType::kMaxValue)
+
+IPC_ENUM_TRAITS_MAX_VALUE(media::AudioDecoderType,
+ media::AudioDecoderType::kMaxValue)
+
IPC_ENUM_TRAITS_MAX_VALUE(media::VideoPixelFormat, media::PIXEL_FORMAT_MAX)
IPC_ENUM_TRAITS_MAX_VALUE(media::VideoRotation, media::VIDEO_ROTATION_MAX)
diff --git a/chromium/media/base/key_systems.cc b/chromium/media/base/key_systems.cc
index d40a7c017e6..d81b872ff33 100644
--- a/chromium/media/base/key_systems.cc
+++ b/chromium/media/base/key_systems.cc
@@ -103,7 +103,12 @@ EmeCodec ToVideoEmeCodec(VideoCodec codec, VideoCodecProfile profile) {
return EME_CODEC_NONE;
}
case kCodecHEVC:
- return EME_CODEC_HEVC;
+ // Only handle Main and Main10 profiles for HEVC.
+ if (profile == HEVCPROFILE_MAIN)
+ return EME_CODEC_HEVC_PROFILE_MAIN;
+ if (profile == HEVCPROFILE_MAIN10)
+ return EME_CODEC_HEVC_PROFILE_MAIN10;
+ return EME_CODEC_NONE;
case kCodecDolbyVision:
// Only profiles 0, 4, 5, 7, 8, 9 are valid. Profile 0 and 9 are encoded
// based on AVC while profile 4, 5, 7 and 8 are based on HEVC.
@@ -674,7 +679,8 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
// SupportedCodecs | SupportedSecureCodecs | Result
// yes | yes | SUPPORTED
// yes | no | HW_SECURE_CODECS_NOT_ALLOWED
- // no | any | NOT_SUPPORTED
+ // no | yes | HW_SECURE_CODECS_REQUIRED
+ // no | no | NOT_SUPPORTED
EmeConfigRule support = EmeConfigRule::SUPPORTED;
for (size_t i = 0; i < codecs.size(); i++) {
EmeCodec codec =
@@ -688,22 +694,29 @@ EmeConfigRule KeySystemsImpl::GetContentTypeConfigRule(
// codecs with multiple bits set, e.g. to cover multiple profiles, we check
// (codec & mask) == codec instead of (codec & mask) != 0 to make sure all
// bits are set. Same below.
- if ((codec & key_system_codec_mask & mime_type_codec_mask) != codec) {
+ if ((codec & key_system_codec_mask & mime_type_codec_mask) != codec &&
+ (codec & key_system_hw_secure_codec_mask & mime_type_codec_mask) !=
+ codec) {
DVLOG(2) << "Container/codec pair (" << container_mime_type << " / "
<< codecs[i] << ") not supported by " << key_system;
return EmeConfigRule::NOT_SUPPORTED;
}
- // Check whether the codec supports a hardware-secure mode (any level). The
- // goal is to prevent mixing of non-hardware-secure codecs with
- // hardware-secure codecs, since the mode is fixed at CDM creation.
- //
- // Because the check for regular codec support is early-exit, we don't have
- // to consider codecs that are only supported in hardware-secure mode. We
- // could do so, and make use of HW_SECURE_CODECS_REQUIRED, if it turns out
- // that hardware-secure-only codecs actually exist and are useful.
- if ((codec & key_system_hw_secure_codec_mask) != codec)
+ // Check whether the codec supports a hardware-secure mode (any level).
+ if ((codec & key_system_hw_secure_codec_mask) != codec) {
+ DCHECK_EQ(codec & key_system_codec_mask, codec);
+ if (support == EmeConfigRule::HW_SECURE_CODECS_REQUIRED)
+ return EmeConfigRule::NOT_SUPPORTED;
support = EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
+ }
+
+ // Check whether the codec requires a hardware-secure mode (any level).
+ if ((codec & key_system_codec_mask) != codec) {
+ DCHECK_EQ(codec & key_system_hw_secure_codec_mask, codec);
+ if (support == EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED)
+ return EmeConfigRule::NOT_SUPPORTED;
+ support = EmeConfigRule::HW_SECURE_CODECS_REQUIRED;
+ }
}
return support;
diff --git a/chromium/media/base/key_systems_unittest.cc b/chromium/media/base/key_systems_unittest.cc
index 64a7a4b4336..78685a7b8a2 100644
--- a/chromium/media/base/key_systems_unittest.cc
+++ b/chromium/media/base/key_systems_unittest.cc
@@ -830,10 +830,7 @@ TEST_F(KeySystemsTest, HardwareSecureCodecs) {
EmeConfigRule::SUPPORTED,
GetVideoContentTypeConfigRule(kVideoFoo, foovideo_codec(), kExternal));
- // Codec that is supported by hardware secure codec but not otherwise is
- // treated as NOT_SUPPORTED instead of HW_SECURE_CODECS_REQUIRED. See
- // KeySystemsImpl::GetContentTypeConfigRule() for details.
- EXPECT_EQ(EmeConfigRule::NOT_SUPPORTED,
+ EXPECT_EQ(EmeConfigRule::HW_SECURE_CODECS_REQUIRED,
GetVideoContentTypeConfigRule(kVideoFoo, securefoovideo_codec(),
kExternal));
}
diff --git a/chromium/media/base/limits.h b/chromium/media/base/limits.h
index 4d1709424ad..edc8fc6a39e 100644
--- a/chromium/media/base/limits.h
+++ b/chromium/media/base/limits.h
@@ -31,8 +31,8 @@ enum {
// - Vorbis used to be limited to 96 kHz, but no longer has that
// restriction.
// - Most PC audio hardware is limited to 192 kHz, some specialized DAC
- // devices will use 384 kHz though.
- kMaxSampleRate = 384000,
+ // devices will use 768 kHz though.
+ kMaxSampleRate = 768000,
kMinSampleRate = 3000,
kMaxChannels = 32,
kMaxBytesPerSample = 4,
diff --git a/chromium/media/base/logging_override_if_enabled.h b/chromium/media/base/logging_override_if_enabled.h
index ee274e79f9f..eeae26ab1c6 100644
--- a/chromium/media/base/logging_override_if_enabled.h
+++ b/chromium/media/base/logging_override_if_enabled.h
@@ -9,6 +9,7 @@
// Warning: Do NOT include this file in .h files to avoid unexpected override.
// TODO(xhwang): Provide a way to choose which |verboselevel| to override.
+#include "build/build_config.h"
#include "media/media_buildflags.h"
#if BUILDFLAG(ENABLE_LOGGING_OVERRIDE)
@@ -16,9 +17,20 @@
#error This file must be included after base/logging.h.
#endif
+#if defined(OS_FUCHSIA)
+
+#define __DVLOG_0 VLOG(0)
+#define __DVLOG_1 VLOG(1)
+#define __DVLOG_2 VLOG(2)
+
+#else
+
#define __DVLOG_0 LOG(INFO)
#define __DVLOG_1 LOG(INFO)
#define __DVLOG_2 LOG(INFO)
+
+#endif // defined(OS_FUCHSIA)
+
#define __DVLOG_3 EAT_STREAM_PARAMETERS
#define __DVLOG_4 EAT_STREAM_PARAMETERS
#define __DVLOG_5 EAT_STREAM_PARAMETERS
diff --git a/chromium/media/base/mac/color_space_util_mac.h b/chromium/media/base/mac/color_space_util_mac.h
index 938b7fd46ab..ab2a4459aad 100644
--- a/chromium/media/base/mac/color_space_util_mac.h
+++ b/chromium/media/base/mac/color_space_util_mac.h
@@ -5,11 +5,13 @@
#ifndef MEDIA_BASE_MAC_COLOR_SPACE_UTIL_MAC_H_
#define MEDIA_BASE_MAC_COLOR_SPACE_UTIL_MAC_H_
+#include <CoreFoundation/CoreFoundation.h>
#include <CoreMedia/CoreMedia.h>
#include <CoreVideo/CoreVideo.h>
#include "media/base/media_export.h"
#include "ui/gfx/color_space.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
@@ -17,7 +19,13 @@ MEDIA_EXPORT gfx::ColorSpace GetImageBufferColorSpace(
CVImageBufferRef image_buffer);
MEDIA_EXPORT gfx::ColorSpace GetFormatDescriptionColorSpace(
- CMFormatDescriptionRef format_description) API_AVAILABLE(macos(10.11));
+ CMFormatDescriptionRef format_description);
+
+MEDIA_EXPORT CFDataRef
+GenerateContentLightLevelInfo(const gfx::HDRMetadata& hdr_metadata);
+
+MEDIA_EXPORT CFDataRef
+GenerateMasteringDisplayColorVolume(const gfx::HDRMetadata& hdr_metadata);
} // namespace media
diff --git a/chromium/media/base/mac/color_space_util_mac.mm b/chromium/media/base/mac/color_space_util_mac.mm
index 8e76a7ef56d..799cfe29ad3 100644
--- a/chromium/media/base/mac/color_space_util_mac.mm
+++ b/chromium/media/base/mac/color_space_util_mac.mm
@@ -4,6 +4,7 @@
#include "media/base/mac/color_space_util_mac.h"
+#include <simd/simd.h>
#include <vector>
#include "base/mac/foundation_util.h"
@@ -56,14 +57,11 @@ gfx::ColorSpace::PrimaryID GetCoreVideoPrimary(CFTypeRef primaries_untyped) {
supported_primaries.push_back(
{kCVImageBufferColorPrimaries_SMPTE_C,
kCMFormatDescriptionColorPrimaries_SMPTE_C,
-
gfx::ColorSpace::PrimaryID::SMPTE240M});
- if (@available(macos 10.11, *)) {
- supported_primaries.push_back(
- {kCVImageBufferColorPrimaries_ITU_R_2020,
- kCMFormatDescriptionColorPrimaries_ITU_R_2020,
- gfx::ColorSpace::PrimaryID::BT2020});
- }
+ supported_primaries.push_back(
+ {kCVImageBufferColorPrimaries_ITU_R_2020,
+ kCMFormatDescriptionColorPrimaries_ITU_R_2020,
+ gfx::ColorSpace::PrimaryID::BT2020});
return supported_primaries;
}());
@@ -87,10 +85,6 @@ gfx::ColorSpace::TransferID GetCoreVideoTransferFn(CFTypeRef transfer_untyped,
static const base::NoDestructor<std::vector<CVImageTransferFn>>
kSupportedTransferFuncs([] {
std::vector<CVImageTransferFn> supported_transfer_funcs;
- // The constants kCMFormatDescriptionTransferFunction_ITU_R_709_2,
- // SMPTE_240M_1995, and UseGamma will compile against macOS 10.10
- // because they are #defined to their kCVImageBufferTransferFunction
- // equivalents. They are technically not present until macOS 10.11.
supported_transfer_funcs.push_back(
{kCVImageBufferTransferFunction_ITU_R_709_2,
kCMFormatDescriptionTransferFunction_ITU_R_709_2,
@@ -103,12 +97,10 @@ gfx::ColorSpace::TransferID GetCoreVideoTransferFn(CFTypeRef transfer_untyped,
{kCVImageBufferTransferFunction_UseGamma,
kCMFormatDescriptionTransferFunction_UseGamma,
gfx::ColorSpace::TransferID::CUSTOM});
- if (@available(macos 10.11, *)) {
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_ITU_R_2020,
- kCMFormatDescriptionTransferFunction_ITU_R_2020,
- gfx::ColorSpace::TransferID::BT2020_10});
- }
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_ITU_R_2020,
+ kCMFormatDescriptionTransferFunction_ITU_R_2020,
+ gfx::ColorSpace::TransferID::BT2020_10});
if (@available(macos 10.12, *)) {
supported_transfer_funcs.push_back(
{kCVImageBufferTransferFunction_SMPTE_ST_428_1,
@@ -199,12 +191,10 @@ gfx::ColorSpace::MatrixID GetCoreVideoMatrix(CFTypeRef matrix_untyped) {
{kCVImageBufferYCbCrMatrix_SMPTE_240M_1995,
kCMFormatDescriptionYCbCrMatrix_SMPTE_240M_1995,
gfx::ColorSpace::MatrixID::SMPTE240M});
- if (@available(macos 10.11, *)) {
- supported_matrices.push_back(
- {kCVImageBufferYCbCrMatrix_ITU_R_2020,
- kCMFormatDescriptionYCbCrMatrix_ITU_R_2020,
- gfx::ColorSpace::MatrixID::BT2020_NCL});
- }
+ supported_matrices.push_back(
+ {kCVImageBufferYCbCrMatrix_ITU_R_2020,
+ kCMFormatDescriptionYCbCrMatrix_ITU_R_2020,
+ gfx::ColorSpace::MatrixID::BT2020_NCL});
return supported_matrices;
}());
@@ -282,4 +272,65 @@ gfx::ColorSpace GetFormatDescriptionColorSpace(
format_description, kCMFormatDescriptionExtension_YCbCrMatrix));
}
+CFDataRef GenerateContentLightLevelInfo(const gfx::HDRMetadata& hdr_metadata) {
+ // This is a SMPTEST2086 Content Light Level Information box.
+ struct ContentLightLevelInfoSEI {
+ uint16_t max_content_light_level;
+ uint16_t max_frame_average_light_level;
+ } __attribute__((packed, aligned(2)));
+ static_assert(sizeof(ContentLightLevelInfoSEI) == 4, "Must be 4 bytes");
+
+ // Values are stored in big-endian...
+ ContentLightLevelInfoSEI sei;
+ sei.max_content_light_level =
+ __builtin_bswap16(hdr_metadata.max_content_light_level);
+ sei.max_frame_average_light_level =
+ __builtin_bswap16(hdr_metadata.max_frame_average_light_level);
+
+ NSData* nsdata_sei = [NSData dataWithBytes:&sei length:4];
+ return base::mac::NSToCFCast(nsdata_sei);
+}
+
+CFDataRef GenerateMasteringDisplayColorVolume(
+ const gfx::HDRMetadata& hdr_metadata) {
+ // This is a SMPTEST2086 Mastering Display Color Volume box.
+ struct MasteringDisplayColorVolumeSEI {
+ vector_ushort2 primaries[3]; // GBR
+ vector_ushort2 white_point;
+ uint32_t luminance_max;
+ uint32_t luminance_min;
+ } __attribute__((packed, aligned(4)));
+ static_assert(sizeof(MasteringDisplayColorVolumeSEI) == 24,
+ "Must be 24 bytes");
+
+ // Make a copy which we can manipulate.
+ auto md = hdr_metadata.mastering_metadata;
+
+ constexpr float kColorCoordinateUpperBound = 50000.0f;
+ md.primary_r.Scale(kColorCoordinateUpperBound);
+ md.primary_g.Scale(kColorCoordinateUpperBound);
+ md.primary_b.Scale(kColorCoordinateUpperBound);
+ md.white_point.Scale(kColorCoordinateUpperBound);
+
+ constexpr float kUnitOfMasteringLuminance = 10000.0f;
+ md.luminance_max *= kUnitOfMasteringLuminance;
+ md.luminance_min *= kUnitOfMasteringLuminance;
+
+ // Values are stored in big-endian...
+ MasteringDisplayColorVolumeSEI sei;
+ sei.primaries[0].x = __builtin_bswap16(md.primary_g.x() + 0.5f);
+ sei.primaries[0].y = __builtin_bswap16(md.primary_g.y() + 0.5f);
+ sei.primaries[1].x = __builtin_bswap16(md.primary_b.x() + 0.5f);
+ sei.primaries[1].y = __builtin_bswap16(md.primary_b.y() + 0.5f);
+ sei.primaries[2].x = __builtin_bswap16(md.primary_r.x() + 0.5f);
+ sei.primaries[2].y = __builtin_bswap16(md.primary_r.y() + 0.5f);
+ sei.white_point.x = __builtin_bswap16(md.white_point.x() + 0.5f);
+ sei.white_point.y = __builtin_bswap16(md.white_point.y() + 0.5f);
+ sei.luminance_max = __builtin_bswap32(md.luminance_max + 0.5f);
+ sei.luminance_min = __builtin_bswap32(md.luminance_min + 0.5f);
+
+ NSData* nsdata_sei = [NSData dataWithBytes:&sei length:24];
+ return base::mac::NSToCFCast(nsdata_sei);
+}
+
} // namespace media
diff --git a/chromium/media/base/media_log_properties.cc b/chromium/media/base/media_log_properties.cc
index cdfbd8a0f56..a21cf880d0a 100644
--- a/chromium/media/base/media_log_properties.cc
+++ b/chromium/media/base/media_log_properties.cc
@@ -28,6 +28,8 @@ std::string MediaLogPropertyKeyToString(MediaLogProperty property) {
STRINGIFY(kIsRangeHeaderSupported);
STRINGIFY(kIsVideoDecryptingDemuxerStream);
STRINGIFY(kIsAudioDecryptingDemuxerStream);
+ STRINGIFY(kVideoEncoderName);
+ STRINGIFY(kIsPlatformVideoEncoder);
STRINGIFY(kAudioDecoderName);
STRINGIFY(kIsPlatformAudioDecoder);
STRINGIFY(kAudioTracks);
diff --git a/chromium/media/base/media_log_properties.h b/chromium/media/base/media_log_properties.h
index 5f6a1084443..0f8a01817e8 100644
--- a/chromium/media/base/media_log_properties.h
+++ b/chromium/media/base/media_log_properties.h
@@ -59,8 +59,8 @@ enum class MediaLogProperty {
kIsRangeHeaderSupported,
// The name of the decoder implementation currently being used to play the
- // media stream. All audio/video decoders have names, such as
- // FFMpegVideoDecoder or D3D11VideoDecoder.
+ // media stream. All audio/video decoders have id numbers defined in
+ // decoder.h.
kVideoDecoderName,
kAudioDecoderName,
@@ -68,6 +68,10 @@ enum class MediaLogProperty {
kIsPlatformVideoDecoder,
kIsPlatformAudioDecoder,
+ // Webcodecs supports encoding video streams.
+ kVideoEncoderName,
+ kIsPlatformVideoEncoder,
+
// Whether this media player is using a decrypting demuxer for the given
// audio or video stream.
kIsVideoDecryptingDemuxerStream,
@@ -101,12 +105,14 @@ MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsStreaming, bool);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kFrameUrl, std::string);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kFrameTitle, std::string);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsSingleOrigin, bool);
-MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kVideoDecoderName, std::string);
+MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kVideoDecoderName, VideoDecoderType);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsPlatformVideoDecoder, bool);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsRangeHeaderSupported, bool);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsVideoDecryptingDemuxerStream, bool);
-MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kAudioDecoderName, std::string);
+MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kAudioDecoderName, AudioDecoderType);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsPlatformAudioDecoder, bool);
+MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kVideoEncoderName, std::string);
+MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsPlatformVideoEncoder, bool);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kIsAudioDecryptingDemuxerStream, bool);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kAudioTracks, std::vector<AudioDecoderConfig>);
MEDIA_LOG_PROPERTY_SUPPORTS_TYPE(kTextTracks, std::vector<TextTrackConfig>);
diff --git a/chromium/media/base/media_serializers.h b/chromium/media/base/media_serializers.h
index 42f89c07122..5dbff1ba7fb 100644
--- a/chromium/media/base/media_serializers.h
+++ b/chromium/media/base/media_serializers.h
@@ -12,6 +12,7 @@
#include "base/strings/stringprintf.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/buffering_state.h"
+#include "media/base/decoder.h"
#include "media/base/media_serializers_base.h"
#include "media/base/status.h"
#include "media/base/status_codes.h"
@@ -135,6 +136,22 @@ struct MediaSerializer<base::TimeDelta> {
// Enum (simple)
template <>
+struct MediaSerializer<VideoDecoderType> {
+ static inline base::Value Serialize(VideoDecoderType value) {
+ return base::Value(GetDecoderName(value));
+ }
+};
+
+// Enum (simple)
+template <>
+struct MediaSerializer<AudioDecoderType> {
+ static inline base::Value Serialize(AudioDecoderType value) {
+ return base::Value(GetDecoderName(value));
+ }
+};
+
+// Enum (simple)
+template <>
struct MediaSerializer<AudioCodec> {
static inline base::Value Serialize(AudioCodec value) {
return base::Value(GetCodecName(value));
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 07419d183fc..7215555d66c 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -192,6 +192,11 @@ const char kOverrideHardwareSecureCodecsForTesting[] =
const char kEnableLiveCaptionPrefForTesting[] =
"enable-live-caption-pref-for-testing";
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+// Enables playback of clear (unencrypted) HEVC content for testing purposes.
+const char kEnableClearHevcForTesting[] = "enable-clear-hevc-for-testing";
+#endif
+
namespace autoplay {
// Autoplay policy that requires a document user activation.
@@ -218,6 +223,10 @@ const base::Feature kFFmpegDecodeOpaqueVP8{"FFmpegDecodeOpaqueVP8",
const base::Feature kOverlayFullscreenVideo{"overlay-fullscreen-video",
base::FEATURE_ENABLED_BY_DEFAULT};
+// TODO(crbug.com/1146594): Flip this to disabled in M92.
+const base::Feature kEnableMediaInternals{"enable-media-internals",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+
// Enable Picture-in-Picture.
const base::Feature kPictureInPicture {
"PictureInPicture",
@@ -262,6 +271,20 @@ const base::Feature kMediaCastOverlayButton{"MediaCastOverlayButton",
const base::Feature kUseAndroidOverlayAggressively{
"UseAndroidOverlayAggressively", base::FEATURE_ENABLED_BY_DEFAULT};
+// If enabled, RTCVideoDecoderAdapter will wrap a DecoderStream as a video
+// decoder, rather than using MojoVideoDecoder. This causes the RTC external
+// decoder to have all the decoder selection / fallback/forward logic of the
+// non-RTC pipeline.
+// TODO(liberato): This also causes the external decoder to use software
+// decoding sometimes, which changes the interpretation of "ExternalDecoder".
+const base::Feature kUseDecoderStreamForWebRTC{
+ "UseDecoderStreamForWebRTC", base::FEATURE_DISABLED_BY_DEFAULT};
+
+// If enabled, when RTCVideoDecoderAdapter is used then SW decoders will be
+// exposed directly to WebRTC.
+const base::Feature kExposeSwDecodersToWebRTC{
+ "ExposeSwDecodersToWebRTC", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Let video without audio be paused when it is playing in the background.
const base::Feature kBackgroundVideoPauseOptimization{
"BackgroundVideoPauseOptimization", base::FEATURE_ENABLED_BY_DEFAULT};
@@ -319,6 +342,10 @@ const base::Feature kD3D11VideoDecoderIgnoreWorkarounds{
const base::Feature kD3D11VideoDecoderVP9Profile2{
"D3D11VideoDecoderEnableVP9Profile2", base::FEATURE_DISABLED_BY_DEFAULT};
+// Enable D3D11VideoDecoder to decode AV1 video.
+const base::Feature kD3D11VideoDecoderAV1{"D3D11VideoDecoderEnableAV1",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// Tell D3D11VideoDecoder not to switch the D3D11 device to multi-threaded mode.
// This is to help us track down IGD crashes.
const base::Feature kD3D11VideoDecoderSkipMultithreaded{
@@ -349,7 +376,7 @@ const base::Feature kGav1VideoDecoder{"Gav1VideoDecoder",
const base::Feature kGlobalMediaControls {
"GlobalMediaControls",
#if defined(OS_WIN) || defined(OS_MAC) || defined(OS_LINUX) || \
- BUILDFLAG(IS_LACROS)
+ BUILDFLAG(IS_CHROMEOS_LACROS)
base::FEATURE_ENABLED_BY_DEFAULT
#else
base::FEATURE_DISABLED_BY_DEFAULT
@@ -363,11 +390,11 @@ const base::Feature kGlobalMediaControlsAutoDismiss{
// Show Cast sessions in Global Media Controls. It is no-op if
// kGlobalMediaControls is not enabled.
const base::Feature kGlobalMediaControlsForCast{
- "GlobalMediaControlsForCast", base::FEATURE_DISABLED_BY_DEFAULT};
+ "GlobalMediaControlsForCast", base::FEATURE_ENABLED_BY_DEFAULT};
// Allow Global Media Controls in system tray of CrOS.
const base::Feature kGlobalMediaControlsForChromeOS{
- "GlobalMediaControlsForChromeOS", base::FEATURE_DISABLED_BY_DEFAULT};
+ "GlobalMediaControlsForChromeOS", base::FEATURE_ENABLED_BY_DEFAULT};
constexpr base::FeatureParam<kCrosGlobalMediaControlsPinOptions>::Option
kCrosGlobalMediaControlsParamOptions[] = {
@@ -391,7 +418,7 @@ const base::Feature kGlobalMediaControlsOverlayControls{
const base::Feature kGlobalMediaControlsPictureInPicture {
"GlobalMediaControlsPictureInPicture",
#if defined(OS_WIN) || defined(OS_MAC) || defined(OS_LINUX) || \
- BUILDFLAG(IS_LACROS)
+ BUILDFLAG(IS_CHROMEOS_LACROS)
base::FEATURE_ENABLED_BY_DEFAULT
#else
base::FEATURE_DISABLED_BY_DEFAULT
@@ -410,6 +437,10 @@ const base::Feature kGlobalMediaControlsModernUI{
const base::Feature kSpecCompliantCanPlayThrough{
"SpecCompliantCanPlayThrough", base::FEATURE_ENABLED_BY_DEFAULT};
+// Controls usage of SurfaceLayer for MediaStreams.
+const base::Feature kSurfaceLayerForMediaStreams{
+ "SurfaceLayerForMediaStreams", base::FEATURE_ENABLED_BY_DEFAULT};
+
// Disables the real audio output stream after silent audio has been delivered
// for too long. Should save quite a bit of power in the muted video case.
const base::Feature kSuspendMutedAudio{"SuspendMutedAudio",
@@ -428,6 +459,18 @@ const base::Feature kUseR16Texture{"use-r16-texture",
const base::Feature kUnifiedAutoplay{"UnifiedAutoplay",
base::FEATURE_ENABLED_BY_DEFAULT};
+// TODO(crbug.com/1052397): Revisit once build flag switch of lacros-chrome is
+// complete.
+#if defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
+// Enable vaapi video decoding on linux. This is already enabled by default on
+// chromeos, but needs an experiment on linux.
+const base::Feature kVaapiVideoDecodeLinux{"VaapiVideoDecoder",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
+const base::Feature kVaapiVideoEncodeLinux{"VaapiVideoEncoder",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+#endif // defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
+
// Enable VA-API hardware decode acceleration for AV1.
const base::Feature kVaapiAV1Decoder{"VaapiAV1Decoder",
base::FEATURE_DISABLED_BY_DEFAULT};
@@ -436,6 +479,12 @@ const base::Feature kVaapiAV1Decoder{"VaapiAV1Decoder",
const base::Feature kVaapiLowPowerEncoderGen9x{
"VaapiLowPowerEncoderGen9x", base::FEATURE_DISABLED_BY_DEFAULT};
+// Deny specific (likely small) resolutions for VA-API hardware decode and
+// encode acceleration.
+// TOOD(b/171041334): Enable by default once the ARC++ hw codecs issue is fixed.
+const base::Feature kVaapiEnforceVideoMinMaxResolution{
+ "VaapiEnforceVideoMinMaxResolution", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enable VA-API hardware encode acceleration for VP8.
const base::Feature kVaapiVP8Encoder{"VaapiVP8Encoder",
base::FEATURE_ENABLED_BY_DEFAULT};
@@ -444,11 +493,11 @@ const base::Feature kVaapiVP8Encoder{"VaapiVP8Encoder",
const base::Feature kVaapiVP9Encoder{"VaapiVP9Encoder",
base::FEATURE_ENABLED_BY_DEFAULT};
-#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
// Enable VP9 k-SVC decoding with HW decoder for webrtc use case on ChromeOS.
const base::Feature kVp9kSVCHWDecoding{"Vp9kSVCHWDecoding",
base::FEATURE_ENABLED_BY_DEFAULT};
-#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
// Inform video blitter of video color space.
const base::Feature kVideoBlitColorAccuracy{"video-blit-color-accuracy",
@@ -469,6 +518,10 @@ const base::Feature kLiveCaption{"LiveCaption",
const base::Feature kUseSodaForLiveCaption{"UseSodaForLiveCaption",
base::FEATURE_DISABLED_BY_DEFAULT};
+// Live Caption runs system-wide on ChromeOS, as opposed to just in the browser.
+const base::Feature kLiveCaptionSystemWideOnChromeOS{
+ "LiveCaptionSystemWideOnChromeOS", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Prevents UrlProvisionFetcher from making a provisioning request. If
// specified, any provisioning request made will not be sent to the provisioning
// server, and the response will indicate a failure to communicate with the
@@ -502,7 +555,7 @@ const base::Feature kWidevineAv1ForceSupportForTesting{
// Enables handling of hardware media keys for controlling media.
const base::Feature kHardwareMediaKeyHandling {
"HardwareMediaKeyHandling",
-#if BUILDFLAG(IS_ASH) || defined(OS_WIN) || defined(OS_MAC) || \
+#if BUILDFLAG(IS_CHROMEOS_ASH) || defined(OS_WIN) || defined(OS_MAC) || \
BUILDFLAG(USE_MPRIS)
base::FEATURE_ENABLED_BY_DEFAULT
#else
@@ -540,10 +593,6 @@ const base::Feature kAutoplayIgnoreWebAudio{"AutoplayIgnoreWebAudio",
const base::Feature kAutoplayDisableSettings{"AutoplayDisableSettings",
base::FEATURE_DISABLED_BY_DEFAULT};
-// Whether we should allow autoplay whitelisting via sounds settings.
-const base::Feature kAutoplayWhitelistSettings{
- "AutoplayWhitelistSettings", base::FEATURE_ENABLED_BY_DEFAULT};
-
#if defined(OS_ANDROID)
// Should we allow video playback to use an overlay if it's not needed for
// security? Normally, we'd always want to allow this, except as part of the
@@ -610,7 +659,7 @@ const base::Feature kUsePooledSharedImageVideoProvider{
"UsePooledSharedImageVideoProvider", base::FEATURE_ENABLED_BY_DEFAULT};
#endif // defined(OS_ANDROID)
-#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#if BUILDFLAG(IS_CHROMEOS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
// Enable the hardware-accelerated direct video decoder instead of the one
// needing the VdaVideoDecoder adapter. This flag is used mainly as a
// chrome:flag for developers debugging issues. TODO(b/159825227): remove when
@@ -625,7 +674,8 @@ const base::Feature kUseChromeOSDirectVideoDecoder{
const base::Feature kUseAlternateVideoDecoderImplementation{
"UseAlternateVideoDecoderImplementation",
base::FEATURE_DISABLED_BY_DEFAULT};
-#endif // BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH) &&
+ // BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#if defined(OS_WIN)
// Does NV12->NV12 video copy on the main thread right before the texture's
@@ -639,6 +689,11 @@ const base::Feature kDelayCopyNV12Textures{"DelayCopyNV12Textures",
const base::Feature kDirectShowGetPhotoState{"DirectShowGetPhotoState",
base::FEATURE_ENABLED_BY_DEFAULT};
+// Includes Infrared cameras in the list returned for EnumerateDevices() on
+// Windows.
+const base::Feature kIncludeIRCamerasInDeviceEnumeration{
+ "IncludeIRCamerasInDeviceEnumeration", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enables asynchronous H264 HW encode acceleration using Media Foundation for
// Windows.
const base::Feature kMediaFoundationAsyncH264Encoding{
@@ -652,6 +707,10 @@ const base::Feature MEDIA_EXPORT kMediaFoundationAV1Decoding{
const base::Feature kMediaFoundationVideoCapture{
"MediaFoundationVideoCapture", base::FEATURE_ENABLED_BY_DEFAULT};
+// Enables MediaFoundation based video capture with D3D11
+const base::Feature kMediaFoundationD3D11VideoCapture{
+ "MediaFoundationD3D11VideoCapture", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enables VP8 decode acceleration for Windows.
const base::Feature MEDIA_EXPORT kMediaFoundationVP8Decoding{
"MediaFoundationVP8Decoding", base::FEATURE_DISABLED_BY_DEFAULT};
@@ -671,17 +730,19 @@ const base::Feature MEDIA_EXPORT kWasapiRawAudioCapture{
// Controls whether the next version mac capturer, including power improvements,
// zero copy operation, and other improvements, is active.
const base::Feature MEDIA_EXPORT kAVFoundationCaptureV2{
- "AVFoundationCaptureV2", base::FEATURE_ENABLED_BY_DEFAULT};
+ "AVFoundationCaptureV2", base::FEATURE_DISABLED_BY_DEFAULT};
// Controls whether or not the V2 capturer exports IOSurfaces for zero-copy.
// This feature only has any effect if kAVFoundationCaptureV2 is also enabled.
const base::Feature MEDIA_EXPORT kAVFoundationCaptureV2ZeroCopy{
"AVFoundationCaptureV2ZeroCopy", base::FEATURE_ENABLED_BY_DEFAULT};
-
-const base::Feature MEDIA_EXPORT kVideoToolboxVp9Decoding{
- "VideoToolboxVp9Decoding", base::FEATURE_DISABLED_BY_DEFAULT};
#endif // defined(OS_MAC)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+const base::Feature MEDIA_EXPORT kDeprecateLowUsageCodecs{
+ "DeprecateLowUsageCodecs", base::FEATURE_ENABLED_BY_DEFAULT};
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
std::string GetEffectiveAutoplayPolicy(const base::CommandLine& command_line) {
// Return the autoplay policy set in the command line, if any.
if (command_line.HasSwitch(switches::kAutoplayPolicy))
@@ -731,15 +792,16 @@ const base::Feature kMediaEngagementHTTPSOnly{
// Enables Media Feeds to allow sites to provide specific recommendations for
// users.
-const base::Feature kMediaFeeds{"MediaFeeds", base::FEATURE_ENABLED_BY_DEFAULT};
+const base::Feature kMediaFeeds{"MediaFeeds",
+ base::FEATURE_DISABLED_BY_DEFAULT};
// Enables fetching Media Feeds periodically in the background.
const base::Feature kMediaFeedsBackgroundFetching{
- "MediaFeedsBackgroundFetching", base::FEATURE_ENABLED_BY_DEFAULT};
+ "MediaFeedsBackgroundFetching", base::FEATURE_DISABLED_BY_DEFAULT};
// Enables checking Media Feeds against safe search to prevent adult content.
const base::Feature kMediaFeedsSafeSearch{"MediaFeedsSafeSearch",
- base::FEATURE_ENABLED_BY_DEFAULT};
+ base::FEATURE_DISABLED_BY_DEFAULT};
// Enables experimental local learning for media. Used in the context of media
// capabilities only. Adds reporting only; does not change media behavior.
@@ -769,7 +831,7 @@ const base::Feature kMediaPowerExperiment{"MediaPowerExperiment",
// has audio focus enabled.
const base::Feature kAudioFocusDuckFlash {
"AudioFocusDuckFlash",
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
base::FEATURE_ENABLED_BY_DEFAULT
#else
base::FEATURE_DISABLED_BY_DEFAULT
@@ -797,6 +859,9 @@ const base::Feature kInternalMediaSession {
const base::Feature kKaleidoscope{"Kaleidoscope",
base::FEATURE_ENABLED_BY_DEFAULT};
+const base::Feature kKaleidoscopeInMenu{"KaleidoscopeInMenu",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
const base::Feature kKaleidoscopeForceShowFirstRunExperience{
"KaleidoscopeForceShowFirstRunExperience",
base::FEATURE_DISABLED_BY_DEFAULT};
@@ -824,17 +889,10 @@ bool IsVideoCaptureAcceleratedJpegDecodingEnabled() {
switches::kUseFakeMjpegDecodeAccelerator)) {
return true;
}
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#endif
return false;
}
-// When enabled, causes the H264Decoder to treat each DecoderBuffer sent to it
-// as a complete frame, rather than waiting for a following indicator for frame
-// completeness. Temporary flag to allow verifying if this change breaks
-// anything.
-const base::Feature kH264DecoderBufferIsCompleteFrame{
- "H264DecoderBufferIsCompleteFrame", base::FEATURE_ENABLED_BY_DEFAULT};
-
} // namespace media
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index e08ba0324d0..62396246cf6 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -86,6 +86,10 @@ MEDIA_EXPORT extern const char kOverrideEnabledCdmInterfaceVersion[];
MEDIA_EXPORT extern const char kOverrideHardwareSecureCodecsForTesting[];
MEDIA_EXPORT extern const char kEnableLiveCaptionPrefForTesting[];
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+MEDIA_EXPORT extern const char kEnableClearHevcForTesting[];
+#endif
+
namespace autoplay {
MEDIA_EXPORT extern const char kDocumentUserActivationRequiredPolicy[];
@@ -105,7 +109,6 @@ MEDIA_EXPORT extern const base::Feature kAudioFocusDuckFlash;
MEDIA_EXPORT extern const base::Feature kAudioFocusLossSuspendMediaSession;
MEDIA_EXPORT extern const base::Feature kAutoplayIgnoreWebAudio;
MEDIA_EXPORT extern const base::Feature kAutoplayDisableSettings;
-MEDIA_EXPORT extern const base::Feature kAutoplayWhitelistSettings;
MEDIA_EXPORT extern const base::Feature kBackgroundVideoPauseOptimization;
MEDIA_EXPORT extern const base::Feature kBresenhamCadence;
MEDIA_EXPORT extern const base::Feature kCdmHostVerification;
@@ -114,9 +117,12 @@ MEDIA_EXPORT extern const base::Feature kD3D11PrintCodecOnCrash;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderIgnoreWorkarounds;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderVP9Profile2;
+MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAV1;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderSkipMultithreaded;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAlwaysCopy;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAllowOverlay;
+MEDIA_EXPORT extern const base::Feature kEnableMediaInternals;
+MEDIA_EXPORT extern const base::Feature kExposeSwDecodersToWebRTC;
MEDIA_EXPORT extern const base::Feature kExternalClearKeyForTesting;
MEDIA_EXPORT extern const base::Feature kFFmpegDecodeOpaqueVP8;
MEDIA_EXPORT extern const base::Feature kFailUrlProvisionFetcherForTesting;
@@ -130,16 +136,17 @@ MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsOverlayControls;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsPictureInPicture;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsSeamlessTransfer;
MEDIA_EXPORT extern const base::Feature kGlobalMediaControlsModernUI;
-MEDIA_EXPORT extern const base::Feature kH264DecoderBufferIsCompleteFrame;
MEDIA_EXPORT extern const base::Feature kHardwareMediaKeyHandling;
MEDIA_EXPORT extern const base::Feature kHardwareSecureDecryption;
MEDIA_EXPORT extern const base::Feature kInternalMediaSession;
MEDIA_EXPORT extern const base::Feature kKaleidoscope;
+MEDIA_EXPORT extern const base::Feature kKaleidoscopeInMenu;
MEDIA_EXPORT extern const base::Feature
kKaleidoscopeForceShowFirstRunExperience;
MEDIA_EXPORT extern const base::Feature kKaleidoscopeModule;
MEDIA_EXPORT extern const base::Feature kKaleidoscopeModuleCacheOnly;
MEDIA_EXPORT extern const base::Feature kLiveCaption;
+MEDIA_EXPORT extern const base::Feature kLiveCaptionSystemWideOnChromeOS;
MEDIA_EXPORT extern const base::Feature kLowDelayVideoRenderingOnLiveStream;
MEDIA_EXPORT extern const base::Feature kMediaCapabilitiesQueryGpuFactories;
MEDIA_EXPORT extern const base::Feature kMediaCapabilitiesWithParameters;
@@ -164,16 +171,25 @@ MEDIA_EXPORT extern const base::Feature kRecordMediaEngagementScores;
MEDIA_EXPORT extern const base::Feature kRecordWebAudioEngagement;
MEDIA_EXPORT extern const base::Feature kResumeBackgroundVideo;
MEDIA_EXPORT extern const base::Feature kRevokeMediaSourceObjectURLOnAttach;
+MEDIA_EXPORT extern const base::Feature kSurfaceLayerForMediaStreams;
MEDIA_EXPORT extern const base::Feature kSuspendMutedAudio;
MEDIA_EXPORT extern const base::Feature kSpecCompliantCanPlayThrough;
MEDIA_EXPORT extern const base::Feature kUnifiedAutoplay;
MEDIA_EXPORT extern const base::Feature kUseAndroidOverlayAggressively;
+MEDIA_EXPORT extern const base::Feature kUseDecoderStreamForWebRTC;
MEDIA_EXPORT extern const base::Feature kUseFakeDeviceForMediaStream;
MEDIA_EXPORT extern const base::Feature kUseMediaHistoryStore;
MEDIA_EXPORT extern const base::Feature kUseR16Texture;
MEDIA_EXPORT extern const base::Feature kUseSodaForLiveCaption;
+// TODO(crbug.com/1052397): Revisit once build flag switch of lacros-chrome is
+// complete.
+#if (defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS))
+MEDIA_EXPORT extern const base::Feature kVaapiVideoDecodeLinux;
+MEDIA_EXPORT extern const base::Feature kVaapiVideoEncodeLinux;
+#endif // defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS))
MEDIA_EXPORT extern const base::Feature kVaapiAV1Decoder;
MEDIA_EXPORT extern const base::Feature kVaapiLowPowerEncoderGen9x;
+MEDIA_EXPORT extern const base::Feature kVaapiEnforceVideoMinMaxResolution;
MEDIA_EXPORT extern const base::Feature kVaapiVP8Encoder;
MEDIA_EXPORT extern const base::Feature kVaapiVP9Encoder;
MEDIA_EXPORT extern const base::Feature kVideoBlitColorAccuracy;
@@ -184,9 +200,9 @@ MEDIA_EXPORT extern const base::Feature kResolutionBasedDecoderPriority;
MEDIA_EXPORT extern const base::Feature kForceHardwareVideoDecoders;
MEDIA_EXPORT extern const base::Feature kForceHardwareAudioDecoders;
-#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
MEDIA_EXPORT extern const base::Feature kVp9kSVCHWDecoding;
-#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
#if defined(OS_ANDROID)
MEDIA_EXPORT extern const base::Feature kAllowNonSecureOverlays;
@@ -203,26 +219,32 @@ MEDIA_EXPORT extern const base::Feature kUseAudioLatencyFromHAL;
MEDIA_EXPORT extern const base::Feature kUsePooledSharedImageVideoProvider;
#endif // defined(OS_ANDROID)
-#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#if BUILDFLAG(IS_CHROMEOS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
MEDIA_EXPORT extern const base::Feature kUseChromeOSDirectVideoDecoder;
MEDIA_EXPORT extern const base::Feature kUseAlternateVideoDecoderImplementation;
-#endif // BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH) &&
+ // BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#if defined(OS_WIN)
MEDIA_EXPORT extern const base::Feature kDelayCopyNV12Textures;
MEDIA_EXPORT extern const base::Feature kDirectShowGetPhotoState;
+MEDIA_EXPORT extern const base::Feature kIncludeIRCamerasInDeviceEnumeration;
MEDIA_EXPORT extern const base::Feature kMediaFoundationAsyncH264Encoding;
MEDIA_EXPORT extern const base::Feature kMediaFoundationAV1Decoding;
MEDIA_EXPORT extern const base::Feature kMediaFoundationVideoCapture;
MEDIA_EXPORT extern const base::Feature kMediaFoundationVP8Decoding;
+MEDIA_EXPORT extern const base::Feature kMediaFoundationD3D11VideoCapture;
MEDIA_EXPORT extern const base::Feature kWasapiRawAudioCapture;
#endif // defined(OS_WIN)
#if defined(OS_MAC)
MEDIA_EXPORT extern const base::Feature kAVFoundationCaptureV2;
MEDIA_EXPORT extern const base::Feature kAVFoundationCaptureV2ZeroCopy;
-MEDIA_EXPORT extern const base::Feature kVideoToolboxVp9Decoding;
+#endif
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+MEDIA_EXPORT extern const base::Feature kDeprecateLowUsageCodecs;
#endif
// Based on a |command_line| and the current platform, returns the effective
diff --git a/chromium/media/base/media_track.h b/chromium/media/base/media_track.h
index a4cbb7a0240..c0e020a9caa 100644
--- a/chromium/media/base/media_track.h
+++ b/chromium/media/base/media_track.h
@@ -7,7 +7,7 @@
#include <string>
-#include "base/util/type_safety/strong_alias.h"
+#include "base/types/strong_alias.h"
#include "media/base/media_export.h"
#include "media/base/stream_parser.h"
@@ -16,10 +16,10 @@ namespace media {
class MEDIA_EXPORT MediaTrack {
public:
enum Type { Text, Audio, Video };
- using Id = util::StrongAlias<class IdTag, std::string>;
- using Kind = util::StrongAlias<class KindTag, std::string>;
- using Label = util::StrongAlias<class LabelTag, std::string>;
- using Language = util::StrongAlias<class LanguageTag, std::string>;
+ using Id = base::StrongAlias<class IdTag, std::string>;
+ using Kind = base::StrongAlias<class KindTag, std::string>;
+ using Label = base::StrongAlias<class LabelTag, std::string>;
+ using Language = base::StrongAlias<class LanguageTag, std::string>;
MediaTrack(Type type,
StreamParser::TrackId bytestream_track_id,
const Kind& kind,
diff --git a/chromium/media/base/mime_util_internal.cc b/chromium/media/base/mime_util_internal.cc
index 39d62476358..6c554eb112e 100644
--- a/chromium/media/base/mime_util_internal.cc
+++ b/chromium/media/base/mime_util_internal.cc
@@ -565,13 +565,12 @@ bool MimeUtil::IsCodecSupportedOnAndroid(
case THEORA:
return false;
- // AV1 is not supported on Android yet.
- case AV1:
- return false;
-
// ----------------------------------------------------------------------
// The remaining codecs may be supported depending on platform abilities.
// ----------------------------------------------------------------------
+ case AV1:
+ return BUILDFLAG(ENABLE_AV1_DECODER);
+
case MPEG2_AAC:
// MPEG2_AAC cannot be used in HLS (mpegurl suffix), but this is enforced
// in the parsing step by excluding MPEG2_AAC from the list of
diff --git a/chromium/media/base/mime_util_unittest.cc b/chromium/media/base/mime_util_unittest.cc
index c539b94b976..ae9451dab30 100644
--- a/chromium/media/base/mime_util_unittest.cc
+++ b/chromium/media/base/mime_util_unittest.cc
@@ -563,7 +563,6 @@ TEST(IsCodecSupportedOnAndroidTest, EncryptedCodecBehavior) {
switch (codec) {
// These codecs are never supported by the Android platform.
case MimeUtil::INVALID_CODEC:
- case MimeUtil::AV1:
case MimeUtil::MPEG_H_AUDIO:
case MimeUtil::THEORA:
EXPECT_FALSE(result);
@@ -611,6 +610,10 @@ TEST(IsCodecSupportedOnAndroidTest, EncryptedCodecBehavior) {
case MimeUtil::EAC3:
EXPECT_EQ(HasEac3Support(), result);
break;
+
+ case MimeUtil::AV1:
+ EXPECT_EQ(BUILDFLAG(ENABLE_AV1_DECODER), result);
+ break;
}
});
}
@@ -630,7 +633,6 @@ TEST(IsCodecSupportedOnAndroidTest, ClearCodecBehavior) {
case MimeUtil::INVALID_CODEC:
case MimeUtil::MPEG_H_AUDIO:
case MimeUtil::THEORA:
- case MimeUtil::AV1:
EXPECT_FALSE(result);
break;
@@ -671,6 +673,10 @@ TEST(IsCodecSupportedOnAndroidTest, ClearCodecBehavior) {
case MimeUtil::EAC3:
EXPECT_EQ(HasEac3Support(), result);
break;
+
+ case MimeUtil::AV1:
+ EXPECT_EQ(BUILDFLAG(ENABLE_AV1_DECODER), result);
+ break;
}
});
}
diff --git a/chromium/media/base/mock_filters.cc b/chromium/media/base/mock_filters.cc
index f7fc5c0cf91..4e8d7c481aa 100644
--- a/chromium/media/base/mock_filters.cc
+++ b/chromium/media/base/mock_filters.cc
@@ -84,6 +84,7 @@ MockVideoDecoder::MockVideoDecoder(bool is_platform_decoder,
supports_decryption_(supports_decryption),
decoder_name_(std::move(decoder_name)) {
ON_CALL(*this, CanReadWithoutStalling()).WillByDefault(Return(true));
+ ON_CALL(*this, IsOptimizedForRTC()).WillByDefault(Return(false));
}
MockVideoDecoder::~MockVideoDecoder() = default;
@@ -100,6 +101,15 @@ std::string MockVideoDecoder::GetDisplayName() const {
return decoder_name_;
}
+VideoDecoderType MockVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kUnknown;
+}
+
+MockAudioEncoder::MockAudioEncoder() = default;
+MockAudioEncoder::~MockAudioEncoder() {
+ OnDestruct();
+}
+
MockVideoEncoder::MockVideoEncoder() = default;
MockVideoEncoder::~MockVideoEncoder() {
Dtor();
@@ -131,6 +141,10 @@ std::string MockAudioDecoder::GetDisplayName() const {
return decoder_name_;
}
+AudioDecoderType MockAudioDecoder::GetDecoderType() const {
+ return AudioDecoderType::kUnknown;
+}
+
MockRendererClient::MockRendererClient() = default;
MockRendererClient::~MockRendererClient() = default;
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index 832c7b4650d..f9fae0efb25 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -16,6 +16,7 @@
#include "build/build_config.h"
#include "media/base/audio_decoder.h"
#include "media/base/audio_decoder_config.h"
+#include "media/base/audio_encoder.h"
#include "media/base/audio_parameters.h"
#include "media/base/audio_renderer.h"
#include "media/base/callback_registry.h"
@@ -70,8 +71,8 @@ class MockPipelineClient : public Pipeline::Client {
MOCK_METHOD1(OnVideoOpacityChange, void(bool));
MOCK_METHOD1(OnVideoFrameRateChange, void(base::Optional<int>));
MOCK_METHOD0(OnVideoAverageKeyframeDistanceUpdate, void());
- MOCK_METHOD1(OnAudioDecoderChange, void(const PipelineDecoderInfo&));
- MOCK_METHOD1(OnVideoDecoderChange, void(const PipelineDecoderInfo&));
+ MOCK_METHOD1(OnAudioDecoderChange, void(const AudioDecoderInfo&));
+ MOCK_METHOD1(OnVideoDecoderChange, void(const VideoDecoderInfo&));
MOCK_METHOD1(OnRemotePlayStateChange, void(MediaStatus::State state));
};
@@ -117,6 +118,7 @@ class MockPipeline : public Pipeline {
MOCK_METHOD1(SetVolume, void(float));
MOCK_METHOD1(SetLatencyHint, void(base::Optional<base::TimeDelta>));
MOCK_METHOD1(SetPreservesPitch, void(bool));
+ MOCK_METHOD1(SetAutoplayInitiated, void(bool));
// TODO(sandersd): These should probably have setters too.
MOCK_CONST_METHOD0(GetMediaTime, base::TimeDelta());
@@ -228,6 +230,7 @@ class MockVideoDecoder : public VideoDecoder {
bool IsPlatformDecoder() const override;
bool SupportsDecryption() const override;
std::string GetDisplayName() const override;
+ VideoDecoderType GetDecoderType() const override;
// VideoDecoder implementation.
void Initialize(const VideoDecoderConfig& config,
@@ -254,6 +257,7 @@ class MockVideoDecoder : public VideoDecoder {
MOCK_CONST_METHOD0(GetMaxDecodeRequests, int());
MOCK_CONST_METHOD0(CanReadWithoutStalling, bool());
MOCK_CONST_METHOD0(NeedsBitstreamConversion, bool());
+ MOCK_CONST_METHOD0(IsOptimizedForRTC, bool());
private:
const bool is_platform_decoder_;
@@ -262,6 +266,35 @@ class MockVideoDecoder : public VideoDecoder {
DISALLOW_COPY_AND_ASSIGN(MockVideoDecoder);
};
+class MockAudioEncoder : public AudioEncoder {
+ public:
+ MockAudioEncoder();
+ ~MockAudioEncoder() override;
+
+ // AudioEncoder implementation.
+ MOCK_METHOD(void,
+ Initialize,
+ (const AudioEncoder::Options& options,
+ AudioEncoder::OutputCB output_cb,
+ AudioEncoder::StatusCB done_cb),
+ (override));
+
+ MOCK_METHOD(void,
+ Encode,
+ (std::unique_ptr<AudioBus> audio_bus,
+ base::TimeTicks capture_time,
+ AudioEncoder::StatusCB done_cb),
+ (override));
+
+ MOCK_METHOD(void, Flush, (AudioEncoder::StatusCB done_cb), (override));
+
+ // A function for mocking destructor calls
+ MOCK_METHOD(void, OnDestruct, ());
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockAudioEncoder);
+};
+
class MockVideoEncoder : public VideoEncoder {
public:
MockVideoEncoder();
@@ -312,6 +345,7 @@ class MockAudioDecoder : public AudioDecoder {
bool IsPlatformDecoder() const override;
bool SupportsDecryption() const override;
std::string GetDisplayName() const override;
+ AudioDecoderType GetDecoderType() const override;
// AudioDecoder implementation.
void Initialize(const AudioDecoderConfig& config,
@@ -414,6 +448,7 @@ class MockAudioRenderer : public AudioRenderer {
MOCK_METHOD1(SetLatencyHint,
void(base::Optional<base::TimeDelta> latency_hint));
MOCK_METHOD1(SetPreservesPitch, void(bool));
+ MOCK_METHOD1(SetAutoplayInitiated, void(bool));
private:
DISALLOW_COPY_AND_ASSIGN(MockAudioRenderer);
@@ -436,6 +471,7 @@ class MockRenderer : public Renderer {
PipelineStatusCallback& init_cb));
MOCK_METHOD1(SetLatencyHint, void(base::Optional<base::TimeDelta>));
MOCK_METHOD1(SetPreservesPitch, void(bool));
+ MOCK_METHOD1(SetAutoplayInitiated, void(bool));
void Flush(base::OnceClosure flush_cb) override { OnFlush(flush_cb); }
MOCK_METHOD1(OnFlush, void(base::OnceClosure& flush_cb));
MOCK_METHOD1(StartPlayingFrom, void(base::TimeDelta timestamp));
@@ -562,10 +598,10 @@ class MockDecryptor : public Decryptor {
void(const VideoDecoderConfig& config, DecoderInitCB init_cb));
MOCK_METHOD2(DecryptAndDecodeAudio,
void(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb));
+ AudioDecodeCB audio_decode_cb));
MOCK_METHOD2(DecryptAndDecodeVideo,
void(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb));
+ VideoDecodeCB video_decode_cb));
MOCK_METHOD1(ResetDecoder, void(StreamType stream_type));
MOCK_METHOD1(DeinitializeDecoder, void(StreamType stream_type));
MOCK_METHOD0(CanAlwaysDecrypt, bool());
@@ -582,9 +618,9 @@ class MockCdmContext : public CdmContext {
MOCK_METHOD1(RegisterEventCB,
std::unique_ptr<CallbackRegistration>(EventCB event_cb));
MOCK_METHOD0(GetDecryptor, Decryptor*());
- MOCK_METHOD0(RequiresMediaFoundationRenderer, bool());
#if defined(OS_WIN)
+ MOCK_METHOD0(RequiresMediaFoundationRenderer, bool());
MOCK_METHOD1(GetMediaFoundationCdmProxy,
bool(GetMediaFoundationCdmProxyCB get_mf_cdm_proxy_cb));
#endif
diff --git a/chromium/media/base/null_video_sink.h b/chromium/media/base/null_video_sink.h
index 3b972a9a156..988799c4881 100644
--- a/chromium/media/base/null_video_sink.h
+++ b/chromium/media/base/null_video_sink.h
@@ -66,7 +66,7 @@ class MEDIA_EXPORT NullVideoSink : public VideoRendererSink {
RenderCallback* callback_;
// Manages cancellation of periodic Render() callback task.
- base::CancelableClosure cancelable_worker_;
+ base::CancelableRepeatingClosure cancelable_worker_;
// Used to determine when a new frame is received.
scoped_refptr<VideoFrame> last_frame_;
diff --git a/chromium/media/base/offloading_audio_encoder.cc b/chromium/media/base/offloading_audio_encoder.cc
new file mode 100644
index 00000000000..a44788396d4
--- /dev/null
+++ b/chromium/media/base/offloading_audio_encoder.cc
@@ -0,0 +1,76 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/offloading_audio_encoder.h"
+
+#include "base/bind_post_task.h"
+#include "base/sequenced_task_runner.h"
+#include "base/task/task_traits.h"
+#include "base/task/thread_pool.h"
+#include "base/threading/sequenced_task_runner_handle.h"
+
+namespace media {
+
+OffloadingAudioEncoder::OffloadingAudioEncoder(
+ std::unique_ptr<AudioEncoder> wrapped_encoder,
+ const scoped_refptr<base::SequencedTaskRunner> work_runner,
+ const scoped_refptr<base::SequencedTaskRunner> callback_runner)
+ : wrapped_encoder_(std::move(wrapped_encoder)),
+ work_runner_(std::move(work_runner)),
+ callback_runner_(std::move(callback_runner)) {
+ DCHECK(wrapped_encoder_);
+ DCHECK(work_runner_);
+ DCHECK(callback_runner_);
+ DCHECK_NE(callback_runner_, work_runner_);
+}
+
+OffloadingAudioEncoder::OffloadingAudioEncoder(
+ std::unique_ptr<AudioEncoder> wrapped_encoder)
+ : OffloadingAudioEncoder(std::move(wrapped_encoder),
+ base::ThreadPool::CreateSequencedTaskRunner(
+ {base::TaskPriority::USER_BLOCKING}),
+ base::SequencedTaskRunnerHandle::Get()) {}
+
+void OffloadingAudioEncoder::Initialize(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&AudioEncoder::Initialize,
+ base::Unretained(wrapped_encoder_.get()),
+ options, WrapCallback(std::move(output_cb)),
+ WrapCallback(std::move(done_cb))));
+}
+
+void OffloadingAudioEncoder::Encode(std::unique_ptr<AudioBus> audio_bus,
+ base::TimeTicks capture_time,
+ StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&AudioEncoder::Encode,
+ base::Unretained(wrapped_encoder_.get()),
+ std::move(audio_bus), capture_time,
+ WrapCallback(std::move(done_cb))));
+}
+
+void OffloadingAudioEncoder::Flush(StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&AudioEncoder::Flush,
+ base::Unretained(wrapped_encoder_.get()),
+ WrapCallback(std::move(done_cb))));
+}
+
+OffloadingAudioEncoder::~OffloadingAudioEncoder() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->DeleteSoon(FROM_HERE, std::move(wrapped_encoder_));
+}
+
+template <class T>
+T OffloadingAudioEncoder::WrapCallback(T cb) {
+ DCHECK(callback_runner_);
+ return base::BindPostTask(callback_runner_, std::move(cb));
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/base/offloading_audio_encoder.h b/chromium/media/base/offloading_audio_encoder.h
new file mode 100644
index 00000000000..39f421821a4
--- /dev/null
+++ b/chromium/media/base/offloading_audio_encoder.h
@@ -0,0 +1,62 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_OFFLOADING_AUDIO_ENCODER_H_
+#define MEDIA_BASE_OFFLOADING_AUDIO_ENCODER_H_
+
+#include <memory>
+#include <type_traits>
+
+#include "base/sequence_checker.h"
+#include "media/base/audio_encoder.h"
+
+namespace base {
+class SequencedTaskRunner;
+}
+
+namespace media {
+
+// A wrapper around audio encoder that offloads all the calls to a dedicated
+// task runner. It's used to move synchronous software encoding work off the
+// current (main) thread.
+class MEDIA_EXPORT OffloadingAudioEncoder final : public AudioEncoder {
+ public:
+ // |work_runner| - task runner for encoding work
+ // |callback_runner| - all encoder's callbacks will be executed on this task
+ // runner.
+ OffloadingAudioEncoder(
+ std::unique_ptr<AudioEncoder> wrapped_encoder,
+ const scoped_refptr<base::SequencedTaskRunner> work_runner,
+ const scoped_refptr<base::SequencedTaskRunner> callback_runner);
+
+ // Uses current task runner for callbacks and asks thread pool for a new task
+ // runner to do actual encoding work.
+ explicit OffloadingAudioEncoder(
+ std::unique_ptr<AudioEncoder> wrapped_encoder);
+
+ ~OffloadingAudioEncoder() override;
+
+ void Initialize(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) override;
+
+ void Encode(std::unique_ptr<AudioBus> audio_bus,
+ base::TimeTicks capture_time,
+ StatusCB done_cb) override;
+
+ void Flush(StatusCB done_cb) override;
+
+ private:
+ template <class T>
+ T WrapCallback(T cb);
+
+ std::unique_ptr<AudioEncoder> wrapped_encoder_;
+ const scoped_refptr<base::SequencedTaskRunner> work_runner_;
+ const scoped_refptr<base::SequencedTaskRunner> callback_runner_;
+ SEQUENCE_CHECKER(sequence_checker_);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_OFFLOADING_AUDIO_ENCODER_H_
diff --git a/chromium/media/base/offloading_audio_encoder_unittest.cc b/chromium/media/base/offloading_audio_encoder_unittest.cc
new file mode 100644
index 00000000000..aba0fd86ab7
--- /dev/null
+++ b/chromium/media/base/offloading_audio_encoder_unittest.cc
@@ -0,0 +1,127 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <memory>
+#include <vector>
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/run_loop.h"
+#include "base/sequenced_task_runner.h"
+#include "base/test/bind.h"
+#include "base/test/gmock_callback_support.h"
+#include "base/test/task_environment.h"
+#include "media/base/media_util.h"
+#include "media/base/mock_filters.h"
+#include "media/base/offloading_audio_encoder.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::base::test::RunCallback;
+using ::base::test::RunOnceCallback;
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace media {
+
+class OffloadingAudioEncoderTest : public testing::Test {
+ protected:
+ void SetUp() override {
+ auto mock_audio_encoder = std::make_unique<MockAudioEncoder>();
+ mock_audio_encoder_ = mock_audio_encoder.get();
+ work_runner_ = base::ThreadPool::CreateSequencedTaskRunner({});
+ callback_runner_ = base::SequencedTaskRunnerHandle::Get();
+ offloading_encoder_ = std::make_unique<OffloadingAudioEncoder>(
+ std::move(mock_audio_encoder), work_runner_, callback_runner_);
+ EXPECT_CALL(*mock_audio_encoder_, OnDestruct()).WillOnce(Invoke([this]() {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ }));
+ }
+
+ void RunLoop() { task_environment_.RunUntilIdle(); }
+
+ base::test::TaskEnvironment task_environment_;
+ scoped_refptr<base::SequencedTaskRunner> work_runner_;
+ scoped_refptr<base::SequencedTaskRunner> callback_runner_;
+ MockAudioEncoder* mock_audio_encoder_;
+ std::unique_ptr<OffloadingAudioEncoder> offloading_encoder_;
+};
+
+TEST_F(OffloadingAudioEncoderTest, Initialize) {
+ bool called_done = false;
+ bool called_output = false;
+ AudioEncoder::Options options;
+ AudioEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](EncodedAudioBuffer, base::Optional<AudioEncoder::CodecDescription>) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_output = true;
+ });
+ AudioEncoder::StatusCB done_cb = base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_done = true;
+ });
+
+ EXPECT_CALL(*mock_audio_encoder_, Initialize(_, _, _))
+ .WillOnce(Invoke([this](const AudioEncoder::Options& options,
+ AudioEncoder::OutputCB output_cb,
+ AudioEncoder::StatusCB done_cb) {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ AudioParameters params;
+ EncodedAudioBuffer buf(params, nullptr, 0, base::TimeTicks());
+ std::move(done_cb).Run(Status());
+
+ // Usually |output_cb| is not called by Initialize() but for this
+ // test it doesn't matter. We only care about a task runner used
+ // for running |output_cb|, and not what triggers those callback.
+ std::move(output_cb).Run(std::move(buf), {});
+ }));
+
+ offloading_encoder_->Initialize(options, std::move(output_cb),
+ std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
+ EXPECT_TRUE(called_output);
+}
+
+TEST_F(OffloadingAudioEncoderTest, Encode) {
+ bool called_done = false;
+ AudioEncoder::StatusCB done_cb = base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_done = true;
+ });
+
+ EXPECT_CALL(*mock_audio_encoder_, Encode(_, _, _))
+ .WillOnce(Invoke([this](std::unique_ptr<AudioBus> audio_bus,
+ base::TimeTicks capture_time,
+ AudioEncoder::StatusCB done_cb) {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ std::move(done_cb).Run(Status());
+ }));
+
+ base::TimeTicks ts;
+ offloading_encoder_->Encode(nullptr, ts, std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
+}
+
+TEST_F(OffloadingAudioEncoderTest, Flush) {
+ bool called_done = false;
+ AudioEncoder::StatusCB done_cb = base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_done = true;
+ });
+
+ EXPECT_CALL(*mock_audio_encoder_, Flush(_))
+ .WillOnce(Invoke([this](AudioEncoder::StatusCB done_cb) {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ std::move(done_cb).Run(Status());
+ }));
+
+ offloading_encoder_->Flush(std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
+}
+
+} // namespace media
diff --git a/chromium/media/base/offloading_video_encoder.cc b/chromium/media/base/offloading_video_encoder.cc
index 02acf7135fe..473edb9e727 100644
--- a/chromium/media/base/offloading_video_encoder.cc
+++ b/chromium/media/base/offloading_video_encoder.cc
@@ -4,10 +4,11 @@
#include "media/base/offloading_video_encoder.h"
+#include "base/bind_post_task.h"
#include "base/sequenced_task_runner.h"
#include "base/task/task_traits.h"
#include "base/task/thread_pool.h"
-#include "media/base/bind_to_current_loop.h"
+#include "base/threading/sequenced_task_runner_handle.h"
#include "media/base/video_frame.h"
namespace media {
@@ -83,7 +84,7 @@ OffloadingVideoEncoder::~OffloadingVideoEncoder() {
template <class T>
T OffloadingVideoEncoder::WrapCallback(T cb) {
DCHECK(callback_runner_);
- return media::BindToLoop(callback_runner_.get(), std::move(cb));
+ return base::BindPostTask(callback_runner_, std::move(cb));
}
-} // namespace media \ No newline at end of file
+} // namespace media
diff --git a/chromium/media/base/pipeline.h b/chromium/media/base/pipeline.h
index 72a22a031c9..3e1180fb019 100644
--- a/chromium/media/base/pipeline.h
+++ b/chromium/media/base/pipeline.h
@@ -79,8 +79,8 @@ class MEDIA_EXPORT Pipeline {
// Executed whenever the underlying AudioDecoder or VideoDecoder changes
// during playback.
- virtual void OnAudioDecoderChange(const PipelineDecoderInfo& info) = 0;
- virtual void OnVideoDecoderChange(const PipelineDecoderInfo& info) = 0;
+ virtual void OnAudioDecoderChange(const AudioDecoderInfo& info) = 0;
+ virtual void OnVideoDecoderChange(const VideoDecoderInfo& info) = 0;
// Executed whenever the video frame rate changes. |fps| will be unset if
// the frame rate is unstable. The duration used for the frame rate is
@@ -229,6 +229,9 @@ class MEDIA_EXPORT Pipeline {
// different than 1.0.
virtual void SetPreservesPitch(bool preserves_pitch) = 0;
+ // Sets a flag indicating whether the audio stream was initiated by autoplay.
+ virtual void SetAutoplayInitiated(bool autoplay_initiated) = 0;
+
// Returns the current media playback time, which progresses from 0 until
// GetMediaDuration().
virtual base::TimeDelta GetMediaTime() const = 0;
diff --git a/chromium/media/base/pipeline_impl.cc b/chromium/media/base/pipeline_impl.cc
index 67f988b9dde..2d4cdcc0274 100644
--- a/chromium/media/base/pipeline_impl.cc
+++ b/chromium/media/base/pipeline_impl.cc
@@ -18,8 +18,10 @@
#include "base/synchronization/lock.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/cdm_context.h"
+#include "media/base/decoder.h"
#include "media/base/demuxer.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
@@ -70,6 +72,7 @@ class PipelineImpl::RendererWrapper final : public DemuxerHost,
void SetVolume(float volume);
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint);
void SetPreservesPitch(bool preserves_pitch);
+ void SetAutoplayInitiated(bool autoplay_initiated);
base::TimeDelta GetMediaTime() const;
Ranges<base::TimeDelta> GetBufferedTimeRanges() const;
bool DidLoadingProgress();
@@ -196,6 +199,8 @@ class PipelineImpl::RendererWrapper final : public DemuxerHost,
// By default, apply pitch adjustments.
bool preserves_pitch_ = true;
+ bool autoplay_initiated_ = false;
+
// Lock used to serialize |shared_state_|.
// TODO(crbug.com/893739): Add GUARDED_BY annotations.
mutable base::Lock shared_state_lock_;
@@ -496,6 +501,18 @@ void PipelineImpl::RendererWrapper::SetPreservesPitch(bool preserves_pitch) {
shared_state_.renderer->SetPreservesPitch(preserves_pitch_);
}
+void PipelineImpl::RendererWrapper::SetAutoplayInitiated(
+ bool autoplay_initiated) {
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if (autoplay_initiated_ == autoplay_initiated)
+ return;
+
+ autoplay_initiated_ = autoplay_initiated;
+ if (shared_state_.renderer)
+ shared_state_.renderer->SetAutoplayInitiated(autoplay_initiated_);
+}
+
base::TimeDelta PipelineImpl::RendererWrapper::GetMediaTime() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
@@ -563,8 +580,11 @@ void PipelineImpl::RendererWrapper::CreateRendererInternal(
<< "CDM should be available now if has encrypted stream";
base::Optional<RendererFactoryType> factory_type;
+
+#if defined(OS_WIN)
if (cdm_context_ && cdm_context_->RequiresMediaFoundationRenderer())
factory_type = RendererFactoryType::kMediaFoundation;
+#endif // defined(OS_WIN)
// TODO(xhwang): During Resume(), the |default_renderer_| might already match
// the |factory_type|, in which case we shouldn't need to create a new one.
@@ -761,7 +781,7 @@ void PipelineImpl::RendererWrapper::OnStatisticsUpdate(
shared_state_.statistics.audio_memory_usage += stats.audio_memory_usage;
shared_state_.statistics.video_memory_usage += stats.video_memory_usage;
- if (!stats.audio_decoder_info.decoder_name.empty() &&
+ if (stats.audio_decoder_info.decoder_type != AudioDecoderType::kUnknown &&
stats.audio_decoder_info != shared_state_.statistics.audio_decoder_info) {
shared_state_.statistics.audio_decoder_info = stats.audio_decoder_info;
main_task_runner_->PostTask(
@@ -769,7 +789,7 @@ void PipelineImpl::RendererWrapper::OnStatisticsUpdate(
weak_pipeline_, stats.audio_decoder_info));
}
- if (!stats.video_decoder_info.decoder_name.empty() &&
+ if (stats.video_decoder_info.decoder_type != VideoDecoderType::kUnknown &&
stats.video_decoder_info != shared_state_.statistics.video_decoder_info) {
shared_state_.statistics.video_decoder_info = stats.video_decoder_info;
main_task_runner_->PostTask(
@@ -1385,6 +1405,15 @@ void PipelineImpl::SetPreservesPitch(bool preserves_pitch) {
preserves_pitch));
}
+void PipelineImpl::SetAutoplayInitiated(bool autoplay_initiated) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ media_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&RendererWrapper::SetAutoplayInitiated,
+ base::Unretained(renderer_wrapper_.get()),
+ autoplay_initiated));
+}
+
base::TimeDelta PipelineImpl::GetMediaTime() const {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -1607,7 +1636,7 @@ void PipelineImpl::OnVideoAverageKeyframeDistanceUpdate() {
client_->OnVideoAverageKeyframeDistanceUpdate();
}
-void PipelineImpl::OnAudioDecoderChange(const PipelineDecoderInfo& info) {
+void PipelineImpl::OnAudioDecoderChange(const AudioDecoderInfo& info) {
DVLOG(2) << __func__ << ": info=" << info;
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(IsRunning());
@@ -1616,7 +1645,7 @@ void PipelineImpl::OnAudioDecoderChange(const PipelineDecoderInfo& info) {
client_->OnAudioDecoderChange(info);
}
-void PipelineImpl::OnVideoDecoderChange(const PipelineDecoderInfo& info) {
+void PipelineImpl::OnVideoDecoderChange(const VideoDecoderInfo& info) {
DVLOG(2) << __func__ << ": info=" << info;
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(IsRunning());
diff --git a/chromium/media/base/pipeline_impl.h b/chromium/media/base/pipeline_impl.h
index d9444600f6f..beb88adb2b6 100644
--- a/chromium/media/base/pipeline_impl.h
+++ b/chromium/media/base/pipeline_impl.h
@@ -105,6 +105,7 @@ class MEDIA_EXPORT PipelineImpl : public Pipeline {
void SetVolume(float volume) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
void SetPreservesPitch(bool preserves_pitch) override;
+ void SetAutoplayInitiated(bool autoplay_initiated) override;
base::TimeDelta GetMediaTime() const override;
Ranges<base::TimeDelta> GetBufferedTimeRanges() const override;
base::TimeDelta GetMediaDuration() const override;
@@ -161,8 +162,8 @@ class MEDIA_EXPORT PipelineImpl : public Pipeline {
void OnVideoNaturalSizeChange(const gfx::Size& size);
void OnVideoOpacityChange(bool opaque);
void OnVideoAverageKeyframeDistanceUpdate();
- void OnAudioDecoderChange(const PipelineDecoderInfo& info);
- void OnVideoDecoderChange(const PipelineDecoderInfo& info);
+ void OnAudioDecoderChange(const AudioDecoderInfo& info);
+ void OnVideoDecoderChange(const VideoDecoderInfo& info);
void OnRemotePlayStateChange(MediaStatus::State state);
void OnVideoFrameRateChange(base::Optional<int> fps);
diff --git a/chromium/media/base/pipeline_impl_unittest.cc b/chromium/media/base/pipeline_impl_unittest.cc
index cb31db062f6..c3f411582b8 100644
--- a/chromium/media/base/pipeline_impl_unittest.cc
+++ b/chromium/media/base/pipeline_impl_unittest.cc
@@ -724,14 +724,14 @@ TEST_F(PipelineImplTest, OnStatisticsUpdate) {
StartPipelineAndExpect(PIPELINE_OK);
PipelineStatistics stats;
- stats.audio_decoder_info.decoder_name = "TestAudioDecoderName";
+ stats.audio_decoder_info.decoder_type = AudioDecoderType::kMojo;
stats.audio_decoder_info.is_platform_decoder = false;
EXPECT_CALL(callbacks_, OnAudioDecoderChange(_));
renderer_client_->OnStatisticsUpdate(stats);
base::RunLoop().RunUntilIdle();
// VideoDecoderInfo changed and we expect OnVideoDecoderChange() to be called.
- stats.video_decoder_info.decoder_name = "TestVideoDecoderName";
+ stats.video_decoder_info.decoder_type = VideoDecoderType::kMojo;
stats.video_decoder_info.is_platform_decoder = true;
EXPECT_CALL(callbacks_, OnVideoDecoderChange(_));
renderer_client_->OnStatisticsUpdate(stats);
@@ -749,7 +749,7 @@ TEST_F(PipelineImplTest, OnStatisticsUpdate) {
base::RunLoop().RunUntilIdle();
// Both info changed.
- stats.audio_decoder_info.decoder_name = "NewTestAudioDecoderName";
+ stats.audio_decoder_info.decoder_type = AudioDecoderType::kFFmpeg;
stats.video_decoder_info.has_decrypting_demuxer_stream = true;
EXPECT_CALL(callbacks_, OnAudioDecoderChange(_));
EXPECT_CALL(callbacks_, OnVideoDecoderChange(_));
diff --git a/chromium/media/base/pipeline_status.cc b/chromium/media/base/pipeline_status.cc
index 17e47f1e48e..ab7dfbee853 100644
--- a/chromium/media/base/pipeline_status.cc
+++ b/chromium/media/base/pipeline_status.cc
@@ -8,6 +8,96 @@
namespace media {
+base::Optional<PipelineStatus> StatusCodeToPipelineStatus(StatusCode status) {
+ switch (status) {
+ case StatusCode::kOk:
+ return PIPELINE_OK;
+ case StatusCode::kPipelineErrorNetwork:
+ return PIPELINE_ERROR_NETWORK;
+ case StatusCode::kPipelineErrorDecode:
+ return PIPELINE_ERROR_DECODE;
+ case StatusCode::kPipelineErrorAbort:
+ return PIPELINE_ERROR_ABORT;
+ case StatusCode::kPipelineErrorInitializationFailed:
+ return PIPELINE_ERROR_INITIALIZATION_FAILED;
+ case StatusCode::kPipelineErrorCouldNotRender:
+ return PIPELINE_ERROR_COULD_NOT_RENDER;
+ case StatusCode::kPipelineErrorRead:
+ return PIPELINE_ERROR_READ;
+ case StatusCode::kPipelineErrorInvalidState:
+ return PIPELINE_ERROR_INVALID_STATE;
+ case StatusCode::kPipelineErrorDemuxerErrorCouldNotOpen:
+ return DEMUXER_ERROR_COULD_NOT_OPEN;
+ case StatusCode::kPipelineErrorDemuxerErrorCouldNotParse:
+ return DEMUXER_ERROR_COULD_NOT_PARSE;
+ case StatusCode::kPipelineErrorDemuxerErrorNoSupportedStreams:
+ return DEMUXER_ERROR_NO_SUPPORTED_STREAMS;
+ case StatusCode::kPipelineErrorDecoderErrorNotSupported:
+ return DECODER_ERROR_NOT_SUPPORTED;
+ case StatusCode::kPipelineErrorChuckDemuxerErrorAppendFailed:
+ return CHUNK_DEMUXER_ERROR_APPEND_FAILED;
+ case StatusCode::kPipelineErrorChunkDemuxerErrorEosStatusDecodeError:
+ return CHUNK_DEMUXER_ERROR_EOS_STATUS_DECODE_ERROR;
+ case StatusCode::kPipelineErrorChunkDemuxerErrorEosStatusNetworkError:
+ return CHUNK_DEMUXER_ERROR_EOS_STATUS_NETWORK_ERROR;
+ case StatusCode::kPipelineErrorAudioRendererError:
+ return AUDIO_RENDERER_ERROR;
+ case StatusCode::kPipelineErrorExternalRendererFailed:
+ return PIPELINE_ERROR_EXTERNAL_RENDERER_FAILED;
+ case StatusCode::kPipelineErrorDemuxerErrorDetectedHLS:
+ return DEMUXER_ERROR_DETECTED_HLS;
+ default:
+ NOTREACHED();
+ return base::nullopt;
+ }
+}
+
+StatusCode PipelineStatusToStatusCode(PipelineStatus status) {
+ switch (status) {
+ case PIPELINE_OK:
+ return StatusCode::kOk;
+ case PIPELINE_ERROR_NETWORK:
+ return StatusCode::kPipelineErrorNetwork;
+ case PIPELINE_ERROR_DECODE:
+ return StatusCode::kPipelineErrorDecode;
+ case PIPELINE_ERROR_ABORT:
+ return StatusCode::kPipelineErrorAbort;
+ case PIPELINE_ERROR_INITIALIZATION_FAILED:
+ return StatusCode::kPipelineErrorInitializationFailed;
+ case PIPELINE_ERROR_COULD_NOT_RENDER:
+ return StatusCode::kPipelineErrorCouldNotRender;
+ case PIPELINE_ERROR_READ:
+ return StatusCode::kPipelineErrorRead;
+ case PIPELINE_ERROR_INVALID_STATE:
+ return StatusCode::kPipelineErrorInvalidState;
+ case DEMUXER_ERROR_COULD_NOT_OPEN:
+ return StatusCode::kPipelineErrorDemuxerErrorCouldNotOpen;
+ case DEMUXER_ERROR_COULD_NOT_PARSE:
+ return StatusCode::kPipelineErrorDemuxerErrorCouldNotParse;
+ case DEMUXER_ERROR_NO_SUPPORTED_STREAMS:
+ return StatusCode::kPipelineErrorDemuxerErrorNoSupportedStreams;
+ case DECODER_ERROR_NOT_SUPPORTED:
+ return StatusCode::kPipelineErrorDecoderErrorNotSupported;
+ case CHUNK_DEMUXER_ERROR_APPEND_FAILED:
+ return StatusCode::kPipelineErrorChuckDemuxerErrorAppendFailed;
+ case CHUNK_DEMUXER_ERROR_EOS_STATUS_DECODE_ERROR:
+ return StatusCode::kPipelineErrorChunkDemuxerErrorEosStatusDecodeError;
+ case CHUNK_DEMUXER_ERROR_EOS_STATUS_NETWORK_ERROR:
+ return StatusCode::kPipelineErrorChunkDemuxerErrorEosStatusNetworkError;
+ case AUDIO_RENDERER_ERROR:
+ return StatusCode::kPipelineErrorAudioRendererError;
+ case PIPELINE_ERROR_EXTERNAL_RENDERER_FAILED:
+ return StatusCode::kPipelineErrorExternalRendererFailed;
+ case DEMUXER_ERROR_DETECTED_HLS:
+ return StatusCode::kPipelineErrorDemuxerErrorDetectedHLS;
+ }
+
+ NOTREACHED();
+ // TODO(crbug.com/1153465): Log pipeline status that failed to convert.
+ // Return a generic decode error.
+ return StatusCode::kPipelineErrorDecode;
+}
+
std::string PipelineStatusToString(PipelineStatus status) {
#define STRINGIFY_STATUS_CASE(status) \
case status: \
@@ -72,24 +162,4 @@ bool operator!=(const PipelineStatistics& first,
return !(first == second);
}
-bool operator==(const PipelineDecoderInfo& first,
- const PipelineDecoderInfo& second) {
- return first.decoder_name == second.decoder_name &&
- first.is_platform_decoder == second.is_platform_decoder &&
- first.has_decrypting_demuxer_stream ==
- second.has_decrypting_demuxer_stream;
-}
-
-bool operator!=(const PipelineDecoderInfo& first,
- const PipelineDecoderInfo& second) {
- return !(first == second);
-}
-
-std::ostream& operator<<(std::ostream& out, const PipelineDecoderInfo& info) {
- return out << "{decoder_name:" << info.decoder_name << ","
- << "is_platform_decoder:" << info.is_platform_decoder << ","
- << "has_decrypting_demuxer_stream:"
- << info.has_decrypting_demuxer_stream << "}";
-}
-
} // namespace media
diff --git a/chromium/media/base/pipeline_status.h b/chromium/media/base/pipeline_status.h
index 0fc1a6b78ba..a9e448040de 100644
--- a/chromium/media/base/pipeline_status.h
+++ b/chromium/media/base/pipeline_status.h
@@ -10,8 +10,11 @@
#include <string>
#include "base/callback.h"
+#include "base/optional.h"
#include "base/time/time.h"
+#include "media/base/decoder.h"
#include "media/base/media_export.h"
+#include "media/base/status.h"
#include "media/base/timestamp_constants.h"
namespace media {
@@ -59,6 +62,10 @@ enum PipelineStatus {
PIPELINE_STATUS_MAX = DEMUXER_ERROR_DETECTED_HLS,
};
+MEDIA_EXPORT base::Optional<PipelineStatus> StatusCodeToPipelineStatus(
+ StatusCode status);
+MEDIA_EXPORT StatusCode PipelineStatusToStatusCode(PipelineStatus status);
+
// Returns a string version of the status, unique to each PipelineStatus, and
// not including any ':'. This makes it suitable for usage in
// MediaError.message as the UA-specific-error-code.
@@ -71,18 +78,44 @@ MEDIA_EXPORT std::ostream& operator<<(std::ostream& out, PipelineStatus status);
using PipelineStatusCB = base::RepeatingCallback<void(PipelineStatus)>;
using PipelineStatusCallback = base::OnceCallback<void(PipelineStatus)>;
+template <typename DecoderTypeId>
struct PipelineDecoderInfo {
bool is_platform_decoder = false;
bool has_decrypting_demuxer_stream = false;
- std::string decoder_name;
+ DecoderTypeId decoder_type = DecoderTypeId::kUnknown;
};
-MEDIA_EXPORT bool operator==(const PipelineDecoderInfo& first,
- const PipelineDecoderInfo& second);
-MEDIA_EXPORT bool operator!=(const PipelineDecoderInfo& first,
- const PipelineDecoderInfo& second);
-MEDIA_EXPORT std::ostream& operator<<(std::ostream& out,
- const PipelineDecoderInfo& info);
+using AudioDecoderInfo = PipelineDecoderInfo<AudioDecoderType>;
+using VideoDecoderInfo = PipelineDecoderInfo<VideoDecoderType>;
+
+template <typename DecoderTypeId>
+MEDIA_EXPORT inline bool operator==(
+ const PipelineDecoderInfo<DecoderTypeId>& first,
+ const PipelineDecoderInfo<DecoderTypeId>& second) {
+ return first.decoder_type == second.decoder_type &&
+ first.is_platform_decoder == second.is_platform_decoder &&
+ first.has_decrypting_demuxer_stream ==
+ second.has_decrypting_demuxer_stream;
+}
+
+template <typename DecoderTypeId>
+MEDIA_EXPORT inline bool operator!=(
+ const PipelineDecoderInfo<DecoderTypeId>& first,
+ const PipelineDecoderInfo<DecoderTypeId>& second) {
+ return !(first == second);
+}
+
+template <typename DecoderTypeId>
+MEDIA_EXPORT inline std::ostream& operator<<(
+ std::ostream& out,
+ const PipelineDecoderInfo<DecoderTypeId>& info) {
+ // TODO(IN THIS CL DON'T FORGET) make a converter to print name.
+ return out << "{decoder_type:" << static_cast<int64_t>(info.decoder_type)
+ << ","
+ << "is_platform_decoder:" << info.is_platform_decoder << ","
+ << "has_decrypting_demuxer_stream:"
+ << info.has_decrypting_demuxer_stream << "}";
+}
struct MEDIA_EXPORT PipelineStatistics {
PipelineStatistics();
@@ -105,8 +138,8 @@ struct MEDIA_EXPORT PipelineStatistics {
// Note: Keep these fields at the end of the structure, if you move them you
// need to also update the test ProtoUtilsTest::PipelineStatisticsConversion.
- PipelineDecoderInfo audio_decoder_info;
- PipelineDecoderInfo video_decoder_info;
+ AudioDecoderInfo audio_decoder_info;
+ VideoDecoderInfo video_decoder_info;
// NOTE: always update operator== implementation in pipeline_status.cc when
// adding a field to this struct. Leave this comment at the end.
diff --git a/chromium/media/base/renderer.cc b/chromium/media/base/renderer.cc
index c1970f95e89..08c4acaa94e 100644
--- a/chromium/media/base/renderer.cc
+++ b/chromium/media/base/renderer.cc
@@ -34,4 +34,8 @@ void Renderer::SetPreservesPitch(bool preserves_pitch) {
// Not supported by most renderers.
}
+void Renderer::SetAutoplayInitiated(bool autoplay_initiated) {
+ // Not supported by most renderers.
+}
+
} // namespace media
diff --git a/chromium/media/base/renderer.h b/chromium/media/base/renderer.h
index b6e1a73373b..9244948c287 100644
--- a/chromium/media/base/renderer.h
+++ b/chromium/media/base/renderer.h
@@ -55,6 +55,9 @@ class MEDIA_EXPORT Renderer {
// different than 1.0.
virtual void SetPreservesPitch(bool preserves_pitch);
+ // Sets a flag indicating whether the audio stream was initiated by autoplay.
+ virtual void SetAutoplayInitiated(bool autoplay_initiated);
+
// The following functions must be called after Initialize().
// Discards any buffered data, executing |flush_cb| when completed.
diff --git a/chromium/media/base/renderer_factory_selector.cc b/chromium/media/base/renderer_factory_selector.cc
index 3695c447d9c..0e912d86a9b 100644
--- a/chromium/media/base/renderer_factory_selector.cc
+++ b/chromium/media/base/renderer_factory_selector.cc
@@ -40,6 +40,7 @@ void RendererFactorySelector::AddFactory(
std::unique_ptr<RendererFactory> factory) {
DCHECK(factory);
DCHECK(!factories_.count(type));
+ DVLOG(2) << __func__ << ": type=" << static_cast<int>(type);
factories_[type] = std::move(factory);
}
diff --git a/chromium/media/base/sample_rates.cc b/chromium/media/base/sample_rates.cc
index 6af08622e9f..9d55ba747e8 100644
--- a/chromium/media/base/sample_rates.cc
+++ b/chromium/media/base/sample_rates.cc
@@ -50,6 +50,9 @@ bool ToAudioSampleRate(int sample_rate, AudioSampleRate* asr) {
case 384000:
*asr = k384000Hz;
return true;
+ case 768000:
+ *asr = k768000Hz;
+ return true;
}
return false;
}
diff --git a/chromium/media/base/sample_rates.h b/chromium/media/base/sample_rates.h
index 165b3911387..edba90ed2a6 100644
--- a/chromium/media/base/sample_rates.h
+++ b/chromium/media/base/sample_rates.h
@@ -25,6 +25,7 @@ enum AudioSampleRate {
k192000Hz = 10,
k24000Hz = 11,
k384000Hz = 12,
+ k768000Hz = 13,
// Must always equal the largest value ever reported:
kAudioSampleRateMax = k384000Hz,
};
diff --git a/chromium/media/base/shared_memory_pool.cc b/chromium/media/base/shared_memory_pool.cc
new file mode 100644
index 00000000000..acbca840f9d
--- /dev/null
+++ b/chromium/media/base/shared_memory_pool.cc
@@ -0,0 +1,109 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/shared_memory_pool.h"
+
+#include "base/logging.h"
+
+namespace {
+constexpr size_t kMaxStoredBuffers = 32;
+} // namespace
+
+namespace media {
+
+SharedMemoryPool::SharedMemoryPool() = default;
+
+SharedMemoryPool::~SharedMemoryPool() = default;
+
+SharedMemoryPool::SharedMemoryHandle::SharedMemoryHandle(
+ base::UnsafeSharedMemoryRegion region,
+ base::WritableSharedMemoryMapping mapping,
+ scoped_refptr<SharedMemoryPool> pool)
+ : region_(std::move(region)),
+ mapping_(std::move(mapping)),
+ pool_(std::move(pool)) {
+ CHECK(pool_);
+ DCHECK(region_.IsValid());
+ DCHECK(mapping_.IsValid());
+}
+
+SharedMemoryPool::SharedMemoryHandle::~SharedMemoryHandle() {
+ pool_->ReleaseBuffer(std::move(region_), std::move(mapping_));
+}
+
+base::UnsafeSharedMemoryRegion*
+SharedMemoryPool::SharedMemoryHandle::GetRegion() {
+ return &region_;
+}
+
+base::WritableSharedMemoryMapping*
+SharedMemoryPool::SharedMemoryHandle::GetMapping() {
+ return &mapping_;
+}
+
+std::unique_ptr<SharedMemoryPool::SharedMemoryHandle>
+SharedMemoryPool::MaybeAllocateBuffer(size_t region_size) {
+ base::AutoLock lock(lock_);
+
+ DCHECK_GE(region_size, 0u);
+ if (is_shutdown_)
+ return nullptr;
+
+ // Only change the configured size if bigger region is requested to avoid
+ // unncecessary reallocations.
+ if (region_size > region_size_) {
+ mappings_.clear();
+ regions_.clear();
+ region_size_ = region_size;
+ }
+ if (!regions_.empty()) {
+ DCHECK_EQ(mappings_.size(), regions_.size());
+ DCHECK_GE(regions_.back().GetSize(), region_size_);
+ auto handle = std::make_unique<SharedMemoryHandle>(
+ std::move(regions_.back()), std::move(mappings_.back()), this);
+ regions_.pop_back();
+ mappings_.pop_back();
+ return handle;
+ }
+
+ auto region = base::UnsafeSharedMemoryRegion::Create(region_size_);
+ if (!region.IsValid())
+ return nullptr;
+
+ base::WritableSharedMemoryMapping mapping = region.Map();
+ if (!mapping.IsValid())
+ return nullptr;
+
+ return std::make_unique<SharedMemoryHandle>(std::move(region),
+ std::move(mapping), this);
+}
+
+void SharedMemoryPool::Shutdown() {
+ base::AutoLock lock(lock_);
+ DCHECK(!is_shutdown_);
+ is_shutdown_ = true;
+ mappings_.clear();
+ regions_.clear();
+}
+
+void SharedMemoryPool::ReleaseBuffer(
+ base::UnsafeSharedMemoryRegion region,
+ base::WritableSharedMemoryMapping mapping) {
+ base::AutoLock lock(lock_);
+ // Only return regions which are at least as big as the current configuration.
+ if (is_shutdown_ || regions_.size() >= kMaxStoredBuffers ||
+ !region.IsValid() || region.GetSize() < region_size_) {
+ DLOG(WARNING) << "Not returning SharedMemoryRegion to the pool:"
+ << " is_shutdown: " << (is_shutdown_ ? "true" : "false")
+ << " stored regions: " << regions_.size()
+ << " configured size: " << region_size_
+ << " this region size: " << region.GetSize()
+ << " valid: " << (region.IsValid() ? "true" : "false");
+ return;
+ }
+ regions_.emplace_back(std::move(region));
+ mappings_.emplace_back(std::move(mapping));
+}
+
+} // namespace media
diff --git a/chromium/media/base/shared_memory_pool.h b/chromium/media/base/shared_memory_pool.h
new file mode 100644
index 00000000000..4de5d710230
--- /dev/null
+++ b/chromium/media/base/shared_memory_pool.h
@@ -0,0 +1,77 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_SHARED_MEMORY_POOL_H_
+#define MEDIA_BASE_SHARED_MEMORY_POOL_H_
+
+#include <vector>
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/unsafe_shared_memory_region.h"
+#include "base/synchronization/lock.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+// SharedMemoryPool manages allocation and pooling of UnsafeSharedMemoryRegions.
+// It is thread-safe.
+// May return bigger regions than requested.
+// If a requested size is increased, all stored regions are purged.
+// Regions are returned to the buffer on destruction of |SharedMemoryHandle| if
+// they are of a correct size.
+class MEDIA_EXPORT SharedMemoryPool
+ : public base::RefCountedThreadSafe<SharedMemoryPool> {
+ public:
+ // Used to store the allocation result.
+ // This class returns memory to the pool upon destruction.
+ class MEDIA_EXPORT SharedMemoryHandle {
+ public:
+ SharedMemoryHandle(base::UnsafeSharedMemoryRegion region,
+ base::WritableSharedMemoryMapping mapping,
+ scoped_refptr<SharedMemoryPool> pool);
+ ~SharedMemoryHandle();
+ // Disallow copy and assign.
+ SharedMemoryHandle(const SharedMemoryHandle&) = delete;
+ SharedMemoryHandle& operator=(const SharedMemoryHandle&) = delete;
+
+ base::UnsafeSharedMemoryRegion* GetRegion();
+
+ base::WritableSharedMemoryMapping* GetMapping();
+
+ private:
+ base::UnsafeSharedMemoryRegion region_;
+ base::WritableSharedMemoryMapping mapping_;
+ scoped_refptr<SharedMemoryPool> pool_;
+ };
+
+ SharedMemoryPool();
+ // Disallow copy and assign.
+ SharedMemoryPool(const SharedMemoryPool&) = delete;
+ SharedMemoryPool& operator=(const SharedMemoryPool&) = delete;
+
+ // Allocates a region of the given |size| or reuses a previous allocation if
+ // possible.
+ std::unique_ptr<SharedMemoryHandle> MaybeAllocateBuffer(size_t size);
+
+ // Shuts down the pool, freeing all currently unused allocations and freeing
+ // outstanding ones as they are returned.
+ void Shutdown();
+
+ private:
+ friend class base::RefCountedThreadSafe<SharedMemoryPool>;
+ ~SharedMemoryPool();
+
+ void ReleaseBuffer(base::UnsafeSharedMemoryRegion region,
+ base::WritableSharedMemoryMapping mapping);
+
+ base::Lock lock_;
+ size_t region_size_ GUARDED_BY(lock_) = 0u;
+ std::vector<base::UnsafeSharedMemoryRegion> regions_ GUARDED_BY(lock_);
+ std::vector<base::WritableSharedMemoryMapping> mappings_ GUARDED_BY(lock_);
+ bool is_shutdown_ GUARDED_BY(lock_) = false;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_SHARED_MEMORY_POOL_H_
diff --git a/chromium/media/base/shared_memory_pool_unittest.cc b/chromium/media/base/shared_memory_pool_unittest.cc
new file mode 100644
index 00000000000..2fb9f951621
--- /dev/null
+++ b/chromium/media/base/shared_memory_pool_unittest.cc
@@ -0,0 +1,57 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/shared_memory_pool.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+TEST(SharedMemoryPoolTest, CreatesRegion) {
+ scoped_refptr<SharedMemoryPool> pool(
+ base::MakeRefCounted<SharedMemoryPool>());
+ auto handle = pool->MaybeAllocateBuffer(1000);
+ ASSERT_TRUE(handle);
+ ASSERT_TRUE(handle->GetRegion());
+ EXPECT_TRUE(handle->GetRegion()->IsValid());
+ ASSERT_TRUE(handle->GetMapping());
+ EXPECT_TRUE(handle->GetMapping()->IsValid());
+}
+
+TEST(SharedMemoryPoolTest, ReusesRegions) {
+ scoped_refptr<SharedMemoryPool> pool(
+ base::MakeRefCounted<SharedMemoryPool>());
+ auto handle = pool->MaybeAllocateBuffer(1000u);
+ ASSERT_TRUE(handle);
+ base::UnsafeSharedMemoryRegion* region = handle->GetRegion();
+ auto id1 = region->GetGUID();
+
+ // Return memory to the pool.
+ handle.reset();
+
+ handle = pool->MaybeAllocateBuffer(1000u);
+ region = handle->GetRegion();
+ // Should reuse the freed region.
+ EXPECT_EQ(id1, region->GetGUID());
+}
+
+TEST(SharedMemoryPoolTest, RespectsSize) {
+ scoped_refptr<SharedMemoryPool> pool(
+ base::MakeRefCounted<SharedMemoryPool>());
+ auto handle = pool->MaybeAllocateBuffer(1000u);
+ ASSERT_TRUE(handle);
+ ASSERT_TRUE(handle->GetRegion());
+ EXPECT_GE(handle->GetRegion()->GetSize(), 1000u);
+
+ handle = pool->MaybeAllocateBuffer(100u);
+ ASSERT_TRUE(handle);
+ ASSERT_TRUE(handle->GetRegion());
+ EXPECT_GE(handle->GetRegion()->GetSize(), 100u);
+
+ handle = pool->MaybeAllocateBuffer(1100u);
+ ASSERT_TRUE(handle);
+ ASSERT_TRUE(handle->GetRegion());
+ EXPECT_GE(handle->GetRegion()->GetSize(), 1100u);
+}
+} // namespace media
diff --git a/chromium/media/base/silent_sink_suspender.h b/chromium/media/base/silent_sink_suspender.h
index 52fffadbfe4..40fc611c402 100644
--- a/chromium/media/base/silent_sink_suspender.h
+++ b/chromium/media/base/silent_sink_suspender.h
@@ -114,7 +114,7 @@ class MEDIA_EXPORT SilentSinkSuspender
// A cancelable task that is posted to switch to or from the |fake_sink_|
// after a period of silence or first non-silent audio respective. We do this
// on Android to save battery consumption.
- base::CancelableCallback<void(bool)> sink_transition_callback_;
+ base::CancelableRepeatingCallback<void(bool)> sink_transition_callback_;
// Audio output delay at the moment when transition to |fake_sink_| starts.
base::TimeDelta latest_output_delay_;
diff --git a/chromium/media/base/sinc_resampler.cc b/chromium/media/base/sinc_resampler.cc
index 79b5e2fbe12..e6ff66411ff 100644
--- a/chromium/media/base/sinc_resampler.cc
+++ b/chromium/media/base/sinc_resampler.cc
@@ -79,6 +79,7 @@
#include "base/check_op.h"
#include "base/numerics/math_constants.h"
+#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
#include "cc/base/math_util.h"
@@ -228,6 +229,8 @@ void SincResampler::SetRatio(double io_sample_rate_ratio) {
}
void SincResampler::Resample(int frames, float* destination) {
+ TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("audio"), "SincResampler::Resample",
+ "io sample rate ratio", io_sample_rate_ratio_);
int remaining_frames = frames;
// Step (1) -- Prime the input buffer at the start of the input stream.
diff --git a/chromium/media/base/status.h b/chromium/media/base/status.h
index ef49cc32781..8a7b4e0b9c7 100644
--- a/chromium/media/base/status.h
+++ b/chromium/media/base/status.h
@@ -166,8 +166,8 @@ MEDIA_EXPORT Status OkStatus();
// }
//
// auto result = FactoryFn();
-// if (result.has_error()) return std::move(result.error());
-// my_object_ = std::move(result.value());
+// if (result.has_error()) return std::move(result).error();
+// my_object_ = std::move(result).value();
//
// Can also be combined into a single switch using `code()`:
//
@@ -178,7 +178,7 @@ MEDIA_EXPORT Status OkStatus();
// break;
// // Maybe switch on specific non-kOk codes for special processing.
// default: // Send unknown errors upwards.
-// return std::move(result.error());
+// return std::move(result).error();
// }
//
// Also useful if one would like to get an enum class return value, unless an
@@ -189,8 +189,8 @@ MEDIA_EXPORT Status OkStatus();
// StatusOr<ResultType> Foo() { ... }
//
// auto result = Foo();
-// if (result.has_error()) return std::move(result.error());
-// switch (result.value()) {
+// if (result.has_error()) return std::move(result).error();
+// switch (std::move(result).value()) {
// case ResultType::kNeedMoreInput:
// ...
// }
@@ -199,16 +199,16 @@ class StatusOr {
public:
// All of these may be implicit, so that one may just return Status or
// the value in question.
- StatusOr(Status&& error) : error_(std::move(error)) {
- DCHECK(!this->error().is_ok());
+ /* not explicit */ StatusOr(Status&& error) : error_(std::move(error)) {
+ DCHECK_NE(code(), StatusCode::kOk);
}
- StatusOr(const Status& error) : error_(error) {
- DCHECK(!this->error().is_ok());
+ /* not explicit */ StatusOr(const Status& error) : error_(error) {
+ DCHECK_NE(code(), StatusCode::kOk);
}
StatusOr(StatusCode code,
const base::Location& location = base::Location::Current())
: error_(Status(code, "", location)) {
- DCHECK(!error().is_ok());
+ DCHECK_NE(code, StatusCode::kOk);
}
StatusOr(T&& value) : value_(std::move(value)) {}
@@ -225,26 +225,38 @@ class StatusOr {
// Do we have a value?
bool has_value() const { return value_.has_value(); }
- // Since we often test for errors, provide this too.
- bool has_error() const { return !has_value(); }
+ // Do we have an error?
+ bool has_error() const { return error_.has_value(); }
// Return the error, if we have one. Up to the caller to make sure that we
- // have one via |!has_value()|.
- Status& error() { return *error_; }
-
- const Status& error() const { return *error_; }
+ // have one via |has_error()|.
+ // NOTE: once this is called, the StatusOr is defunct and should not be used.
+ Status error() && {
+ CHECK(error_);
+ auto error = std::move(*error_);
+ error_.reset();
+ return error;
+ }
- // Return a ref to the value. It's up to the caller to verify that we have a
- // value before calling this.
- T& value() { return std::get<0>(*value_); }
+ // Return the value. It's up to the caller to verify that we have a value
+ // before calling this. Also, this only works once, after which we will have
+ // an error. Use like this: std::move(status_or).value();
+ // NOTE: once this is called, the StatusOr is defunct and should not be used.
+ T value() && {
+ CHECK(value_);
+ auto value = std::move(std::get<0>(*value_));
+ value_.reset();
+ return value;
+ }
- // Returns the error code we have, if any, or `kOk` if we have a value. If
- // this returns `kOk`, then it is equivalent to has_value().
+ // Returns the error code we have, if any, or `kOk`.
StatusCode code() const {
- return has_error() ? error().code() : StatusCode::kOk;
+ CHECK(error_ || value_);
+ return error_ ? error_->code() : StatusCode::kOk;
}
private:
+ // Optional error.
base::Optional<Status> error_;
// We wrap |T| in a container so that windows COM wrappers work. They
// override operator& and similar, and won't compile in a base::Optional.
diff --git a/chromium/media/base/status_codes.h b/chromium/media/base/status_codes.h
index 5c2b7feb3f1..45eee6460fa 100644
--- a/chromium/media/base/status_codes.h
+++ b/chromium/media/base/status_codes.h
@@ -29,6 +29,7 @@ enum class StatusCode : StatusCodeType {
// General errors: 0x00
kAborted = 0x00000001,
+ kInvalidArgument = 0x00000002,
// Decoder Errors: 0x01
kDecoderInitializeNeverCompleted = 0x00000101,
@@ -77,6 +78,22 @@ enum class StatusCode : StatusCodeType {
kCreateVideoProcessorEnumeratorFailed = 0x00000312,
kCreateVideoProcessorFailed = 0x00000313,
kQueryVideoContextFailed = 0x00000314,
+ kAcceleratorFlushFailed = 0x00000315,
+ kTryAgainNotSupported = 0x00000316,
+ kCryptoConfigFailed = 0x00000317,
+ kDecoderBeginFrameFailed = 0x00000318,
+ kReleaseDecoderBufferFailed = 0x00000319,
+ kGetPicParamBufferFailed = 0x00000320,
+ kReleasePicParamBufferFailed = 0x00000321,
+ kGetBitstreamBufferFailed = 0x00000322,
+ kReleaseBitstreamBufferFailed = 0x00000323,
+ kGetSliceControlBufferFailed = 0x00000324,
+ kReleaseSliceControlBufferFailed = 0x00000325,
+ kDecoderEndFrameFailed = 0x00000326,
+ kSubmitDecoderBuffersFailed = 0x00000327,
+ kGetQuantBufferFailed = 0x00000328,
+ kReleaseQuantBufferFailed = 0x00000329,
+ kBitstreamBufferSliceTooBig = 0x00000330,
// MojoDecoder Errors: 0x04
kMojoDecoderNoWrappedDecoder = 0x00000401,
@@ -122,10 +139,50 @@ enum class StatusCode : StatusCodeType {
kVaapiBadImageSize = 0x0000070C,
kVaapiNoTexture = 0x0000070D,
- // Format errors: 0x08
+ // Format Errors: 0x08
kH264ParsingError = 0x00000801,
kH264BufferTooSmall = 0x00000802,
+ // Pipeline Errors: 0x09
+ // Deprecated: kPipelineErrorUrlNotFound = 0x00000901,
+ kPipelineErrorNetwork = 0x00000902,
+ kPipelineErrorDecode = 0x00000903,
+ // Deprecated: kPipelineErrorDecrypt = 0x00000904,
+ kPipelineErrorAbort = 0x00000905,
+ kPipelineErrorInitializationFailed = 0x00000906,
+ // Unused: 0x00000907
+ kPipelineErrorCouldNotRender = 0x00000908,
+ kPipelineErrorRead = 0x00000909,
+ // Deprecated: kPipelineErrorOperationPending = 0x0000090a,
+ kPipelineErrorInvalidState = 0x0000090b,
+ // Demuxer related errors.
+ kPipelineErrorDemuxerErrorCouldNotOpen = 0x0000090c,
+ kPipelineErrorDemuxerErrorCouldNotParse = 0x0000090d,
+ kPipelineErrorDemuxerErrorNoSupportedStreams = 0x0000090e,
+ // Decoder related errors.
+ kPipelineErrorDecoderErrorNotSupported = 0x0000090f,
+ // ChunkDemuxer related errors.
+ kPipelineErrorChuckDemuxerErrorAppendFailed = 0x00000910,
+ kPipelineErrorChunkDemuxerErrorEosStatusDecodeError = 0x00000911,
+ kPipelineErrorChunkDemuxerErrorEosStatusNetworkError = 0x00000912,
+ // Audio rendering errors.
+ kPipelineErrorAudioRendererError = 0x00000913,
+ // Deprecated: kPipelineErrorAudioRendererErrorSpliceFailed = 0x00000914,
+ kPipelineErrorExternalRendererFailed = 0x00000915,
+ // Android only. Used as a signal to fallback MediaPlayerRenderer, and thus
+ // not exactly an 'error' per say.
+ kPipelineErrorDemuxerErrorDetectedHLS = 0x00000916,
+
+ // Frame operation errors: 0x0A
+ kUnsupportedFrameFormatError = 0x00000A01,
+
+ // DecoderStream errors: 0x0B
+ kDecoderStreamInErrorState = 0x00000B00,
+ kDecoderStreamReinitFailed = 0x00000B01,
+ // This is a temporary error for use while the demuxer doesn't return a
+ // proper status.
+ kDecoderStreamDemuxerError = 0x00000B02,
+
// DecodeStatus temporary codes. These names were chosen to match the
// DecodeStatus enum, so that un-converted code can DecodeStatus::OK/etc.
// Note that OK must result in Status::is_ok(), since converted code will
diff --git a/chromium/media/base/status_unittest.cc b/chromium/media/base/status_unittest.cc
index 0a2f7cc5e21..2da38b2ca90 100644
--- a/chromium/media/base/status_unittest.cc
+++ b/chromium/media/base/status_unittest.cc
@@ -201,56 +201,51 @@ TEST_F(StatusTest, StatusOrTypicalUsage) {
}
TEST_F(StatusTest, StatusOrWithMoveOnlyType) {
- StatusOr<std::unique_ptr<int>> error_or(std::make_unique<int>(123));
- EXPECT_TRUE(error_or.has_value());
- EXPECT_FALSE(error_or.has_error());
- std::unique_ptr<int> result = std::move(error_or.value());
- EXPECT_EQ(error_or.value(), nullptr);
+ StatusOr<std::unique_ptr<int>> status_or(std::make_unique<int>(123));
+ EXPECT_TRUE(status_or.has_value());
+ EXPECT_FALSE(status_or.has_error());
+ std::unique_ptr<int> result = std::move(status_or).value();
EXPECT_NE(result.get(), nullptr);
EXPECT_EQ(*result, 123);
}
TEST_F(StatusTest, StatusOrWithCopyableType) {
- StatusOr<int> error_or(123);
- EXPECT_TRUE(error_or.has_value());
- EXPECT_FALSE(error_or.has_error());
- int result = std::move(error_or.value());
+ StatusOr<int> status_or(123);
+ EXPECT_TRUE(status_or.has_value());
+ EXPECT_FALSE(status_or.has_error());
+ int result = std::move(status_or).value();
EXPECT_EQ(result, 123);
- // Should be unaffected by the move.
- EXPECT_EQ(error_or.value(), 123);
}
TEST_F(StatusTest, StatusOrMoveConstructionAndAssignment) {
// Make sure that we can move-construct and move-assign a move-only value.
- StatusOr<std::unique_ptr<int>> error_or_0(std::make_unique<int>(123));
+ StatusOr<std::unique_ptr<int>> status_or_0(std::make_unique<int>(123));
- StatusOr<std::unique_ptr<int>> error_or_1(std::move(error_or_0));
- EXPECT_EQ(error_or_0.value(), nullptr);
+ StatusOr<std::unique_ptr<int>> status_or_1(std::move(status_or_0));
- StatusOr<std::unique_ptr<int>> error_or_2 = std::move(error_or_1);
- EXPECT_EQ(error_or_1.value(), nullptr);
+ StatusOr<std::unique_ptr<int>> status_or_2 = std::move(status_or_1);
- // |error_or_2| should have gotten the original.
- std::unique_ptr<int> value = std::move(error_or_2.value());
+ // |status_or_2| should have gotten the original.
+ std::unique_ptr<int> value = std::move(status_or_2).value();
EXPECT_EQ(*value, 123);
}
TEST_F(StatusTest, StatusOrCopyWorks) {
// Make sure that we can move-construct and move-assign a move-only value.
- StatusOr<int> error_or_0(123);
- StatusOr<int> error_or_1(std::move(error_or_0));
- StatusOr<int> error_or_2 = std::move(error_or_1);
- EXPECT_EQ(error_or_2.value(), 123);
+ StatusOr<int> status_or_0(123);
+ StatusOr<int> status_or_1(std::move(status_or_0));
+ StatusOr<int> status_or_2 = std::move(status_or_1);
+ EXPECT_EQ(std::move(status_or_2).value(), 123);
}
TEST_F(StatusTest, StatusOrCodeIsOkWithValue) {
- StatusOr<int> error_or(123);
- EXPECT_EQ(error_or.code(), StatusCode::kOk);
+ StatusOr<int> status_or(123);
+ EXPECT_EQ(status_or.code(), StatusCode::kOk);
}
TEST_F(StatusTest, StatusOrCodeIsNotOkWithoutValue) {
- StatusOr<int> error_or(StatusCode::kCodeOnlyForTesting);
- EXPECT_EQ(error_or.code(), StatusCode::kCodeOnlyForTesting);
+ StatusOr<int> status_or(StatusCode::kCodeOnlyForTesting);
+ EXPECT_EQ(status_or.code(), StatusCode::kCodeOnlyForTesting);
}
} // namespace media
diff --git a/chromium/media/base/stream_parser.cc b/chromium/media/base/stream_parser.cc
index ccf06a63557..87041a8e92c 100644
--- a/chromium/media/base/stream_parser.cc
+++ b/chromium/media/base/stream_parser.cc
@@ -19,6 +19,12 @@ StreamParser::StreamParser() = default;
StreamParser::~StreamParser() = default;
+// Default implementation of ProcessChunks() is not fully implemented.
+bool StreamParser::ProcessChunks(std::unique_ptr<BufferQueue> buffer_queue) {
+ NOTIMPLEMENTED(); // Likely the wrong type of parser is being used.
+ return false;
+}
+
static bool MergeBufferQueuesInternal(
const std::vector<const StreamParser::BufferQueue*>& buffer_queues,
StreamParser::BufferQueue* merged_buffers) {
diff --git a/chromium/media/base/stream_parser.h b/chromium/media/base/stream_parser.h
index 3eaf9798975..43d788e0182 100644
--- a/chromium/media/base/stream_parser.h
+++ b/chromium/media/base/stream_parser.h
@@ -140,7 +140,12 @@ class MEDIA_EXPORT StreamParser {
// Called when there is new data to parse.
//
// Returns true if the parse succeeds.
+ //
+ // Regular "bytestream-formatted" StreamParsers should fully implement
+ // Parse(), but WebCodecsEncodedChunkStreamParsers should instead fully
+ // implement ProcessChunks().
virtual bool Parse(const uint8_t* buf, int size) = 0;
+ virtual bool ProcessChunks(std::unique_ptr<BufferQueue> buffer_queue);
private:
DISALLOW_COPY_AND_ASSIGN(StreamParser);
diff --git a/chromium/media/base/supported_types.cc b/chromium/media/base/supported_types.cc
index 05de5ab4464..a50314fe513 100644
--- a/chromium/media/base/supported_types.cc
+++ b/chromium/media/base/supported_types.cc
@@ -4,6 +4,7 @@
#include "media/base/supported_types.h"
+#include "base/command_line.h"
#include "base/feature_list.h"
#include "base/logging.h"
#include "base/no_destructor.h"
@@ -51,6 +52,25 @@ bool IsSupportedHdrMetadata(const gfx::HdrMetadataType& hdr_metadata_type) {
return false;
}
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC) && BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+bool IsHevcProfileSupported(VideoCodecProfile profile) {
+ if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableClearHevcForTesting)) {
+ return false;
+ }
+ switch (profile) {
+ case HEVCPROFILE_MAIN: // fallthrough
+ case HEVCPROFILE_MAIN10:
+ return true;
+ case HEVCPROFILE_MAIN_STILL_PICTURE:
+ return false;
+ default:
+ NOTREACHED();
+ }
+ return false;
+}
+#endif // ENABLE_PLATFORM_HEVC && USE_CHROMEOS_PROTECTED_MEDIA
+
} // namespace
bool IsSupportedAudioType(const AudioType& type) {
@@ -243,7 +263,7 @@ bool IsDefaultSupportedAudioType(const AudioType& type) {
case kCodecAMR_NB:
case kCodecAMR_WB:
case kCodecGSM_MS:
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#else
return false;
@@ -300,7 +320,8 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
return IsColorSpaceSupported(type.color_space);
#else
#if defined(OS_ANDROID)
- if (base::android::BuildInfo::GetInstance()->is_at_least_q() &&
+ if (base::android::BuildInfo::GetInstance()->sdk_int() >=
+ base::android::SDK_VERSION_Q &&
IsColorSpaceSupported(type.color_space)) {
return true;
}
@@ -317,15 +338,21 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
case kCodecTheora:
return true;
+ case kCodecHEVC:
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC) && BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+ return IsColorSpaceSupported(type.color_space) &&
+ IsHevcProfileSupported(type.profile);
+#else
+ return false;
+#endif
case kUnknownVideoCodec:
case kCodecVC1:
case kCodecMPEG2:
- case kCodecHEVC:
case kCodecDolbyVision:
return false;
case kCodecMPEG4:
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return true;
#else
return false;
diff --git a/chromium/media/base/supported_types_unittest.cc b/chromium/media/base/supported_types_unittest.cc
index f7e04b945c7..eddc5b3ccc1 100644
--- a/chromium/media/base/supported_types_unittest.cc
+++ b/chromium/media/base/supported_types_unittest.cc
@@ -20,7 +20,7 @@ const bool kPropCodecsEnabled = true;
const bool kPropCodecsEnabled = false;
#endif
-#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_PROPRIETARY_CODECS)
+#if BUILDFLAG(IS_CHROMEOS_ASH) && BUILDFLAG(USE_PROPRIETARY_CODECS)
const bool kMpeg4Supported = true;
#else
const bool kMpeg4Supported = false;
@@ -171,8 +171,8 @@ TEST(SupportedTypesTest, IsSupportedVideoType_VP9Profiles) {
// VP9 Profile2 are supported on x86, ChromeOS on ARM and Mac/Win on ARM64.
// See third_party/libvpx/BUILD.gn.
-#if defined(ARCH_CPU_X86_FAMILY) || \
- (defined(ARCH_CPU_ARM_FAMILY) && BUILDFLAG(IS_ASH)) || \
+#if defined(ARCH_CPU_X86_FAMILY) || \
+ (defined(ARCH_CPU_ARM_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)) || \
(defined(ARCH_CPU_ARM64) && (defined(OS_MAC) || defined(OS_WIN)))
EXPECT_TRUE(IsSupportedVideoType(
{kCodecVP9, VP9PROFILE_PROFILE2, kUnspecifiedLevel, kColorSpace}));
diff --git a/chromium/media/video/supported_video_decoder_config.cc b/chromium/media/base/supported_video_decoder_config.cc
index b0fbb37a228..b3966073ee6 100644
--- a/chromium/media/video/supported_video_decoder_config.cc
+++ b/chromium/media/base/supported_video_decoder_config.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/video/supported_video_decoder_config.h"
+#include "media/base/supported_video_decoder_config.h"
namespace media {
diff --git a/chromium/media/video/supported_video_decoder_config.h b/chromium/media/base/supported_video_decoder_config.h
index 0bd9342cad7..b0fcd1ce850 100644
--- a/chromium/media/video/supported_video_decoder_config.h
+++ b/chromium/media/base/supported_video_decoder_config.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_VIDEO_SUPPORTED_VIDEO_DECODER_CONFIG_H_
-#define MEDIA_VIDEO_SUPPORTED_VIDEO_DECODER_CONFIG_H_
+#ifndef MEDIA_BASE_SUPPORTED_VIDEO_DECODER_CONFIG_H_
+#define MEDIA_BASE_SUPPORTED_VIDEO_DECODER_CONFIG_H_
#include <vector>
@@ -16,6 +16,12 @@
namespace media {
+// The min and max resolution used by SW decoders (dav1d, libgav1, libvpx and
+// ffmpeg for example) when queried about decoding capabilities. For now match
+// the supported resolutions of HW decoders.
+constexpr gfx::Size kDefaultSwDecodeSizeMin(8, 8);
+constexpr gfx::Size kDefaultSwDecodeSizeMax(8192, 8192);
+
// Specification of a range of configurations that are supported by a video
// decoder. Also provides the ability to check if a VideoDecoderConfig matches
// the supported range.
@@ -73,4 +79,4 @@ MEDIA_EXPORT bool IsVideoDecoderConfigSupported(
} // namespace media
-#endif // MEDIA_VIDEO_SUPPORTED_VIDEO_DECODER_CONFIG_H_
+#endif // MEDIA_BASE_SUPPORTED_VIDEO_DECODER_CONFIG_H_
diff --git a/chromium/media/base/supported_video_decoder_config_unittest.cc b/chromium/media/base/supported_video_decoder_config_unittest.cc
new file mode 100644
index 00000000000..dd3dabdbe3d
--- /dev/null
+++ b/chromium/media/base/supported_video_decoder_config_unittest.cc
@@ -0,0 +1,104 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/supported_video_decoder_config.h"
+#include "media/base/test_helpers.h"
+#include "media/base/video_codecs.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+class SupportedVideoDecoderConfigTest : public ::testing::Test {
+ public:
+ SupportedVideoDecoderConfigTest()
+ : decoder_config_(
+ TestVideoConfig::NormalCodecProfile(kCodecH264,
+ H264PROFILE_EXTENDED)) {
+ supported_config_.profile_min = H264PROFILE_MIN;
+ supported_config_.profile_max = H264PROFILE_MAX;
+ supported_config_.coded_size_min = gfx::Size(10, 20);
+ supported_config_.coded_size_max = gfx::Size(10000, 20000);
+ supported_config_.allow_encrypted = true;
+ supported_config_.require_encrypted = false;
+ }
+
+ SupportedVideoDecoderConfig supported_config_;
+
+ // Decoder config that matches |supported_config_|.
+ VideoDecoderConfig decoder_config_;
+};
+
+TEST_F(SupportedVideoDecoderConfigTest, ConstructionWithArgs) {
+ SupportedVideoDecoderConfig config2(
+ supported_config_.profile_min, supported_config_.profile_max,
+ supported_config_.coded_size_min, supported_config_.coded_size_max,
+ supported_config_.allow_encrypted, supported_config_.require_encrypted);
+ EXPECT_EQ(supported_config_.profile_min, config2.profile_min);
+ EXPECT_EQ(supported_config_.profile_max, config2.profile_max);
+ EXPECT_EQ(supported_config_.coded_size_min, config2.coded_size_min);
+ EXPECT_EQ(supported_config_.coded_size_max, config2.coded_size_max);
+ EXPECT_EQ(supported_config_.allow_encrypted, config2.allow_encrypted);
+ EXPECT_EQ(supported_config_.require_encrypted, config2.require_encrypted);
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, MatchingConfigMatches) {
+ EXPECT_TRUE(supported_config_.Matches(decoder_config_));
+
+ // Since |supported_config_| allows encrypted, this should also succeed.
+ decoder_config_.SetIsEncrypted(true);
+ EXPECT_TRUE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, LowerProfileMismatches) {
+ // Raise |profile_min| above |decoder_config_|.
+ supported_config_.profile_min = H264PROFILE_HIGH;
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, HigherProfileMismatches) {
+ // Lower |profile_max| below |decoder_config_|.
+ supported_config_.profile_max = H264PROFILE_MAIN;
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, SmallerMinWidthMismatches) {
+ supported_config_.coded_size_min =
+ gfx::Size(decoder_config_.coded_size().width() + 1, 0);
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, SmallerMinHeightMismatches) {
+ supported_config_.coded_size_min =
+ gfx::Size(0, decoder_config_.coded_size().height() + 1);
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, LargerMaxWidthMismatches) {
+ supported_config_.coded_size_max =
+ gfx::Size(decoder_config_.coded_size().width() - 1, 10000);
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, LargerMaxHeightMismatches) {
+ supported_config_.coded_size_max =
+ gfx::Size(10000, decoder_config_.coded_size().height() - 1);
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, RequiredEncryptionMismatches) {
+ supported_config_.require_encrypted = true;
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+
+ // The encrypted version should succeed.
+ decoder_config_.SetIsEncrypted(true);
+ EXPECT_TRUE(supported_config_.Matches(decoder_config_));
+}
+
+TEST_F(SupportedVideoDecoderConfigTest, AllowedEncryptionMismatches) {
+ supported_config_.allow_encrypted = false;
+ decoder_config_.SetIsEncrypted(true);
+ EXPECT_FALSE(supported_config_.Matches(decoder_config_));
+}
+
+} // namespace media
diff --git a/chromium/media/base/user_input_monitor_unittest.cc b/chromium/media/base/user_input_monitor_unittest.cc
index 5e85052bad6..74238a2beb4 100644
--- a/chromium/media/base/user_input_monitor_unittest.cc
+++ b/chromium/media/base/user_input_monitor_unittest.cc
@@ -19,19 +19,39 @@
#if defined(USE_OZONE)
#include "ui/base/ui_base_features.h" // nogncheck
+#include "ui/ozone/public/ozone_platform.h" // nogncheck
+#endif
+
+#if defined(OS_WIN)
+#include "ui/events/test/keyboard_hook_monitor_utils.h"
#endif
namespace media {
-TEST(UserInputMonitorTest, CreatePlatformSpecific) {
+namespace {
+
+class UserInputMonitorTest : public testing::Test {
+ protected:
+ // testing::Test.
+ void SetUp() override {
#if defined(USE_OZONE)
- // TODO(crbug.com/1109112): enable those tests for Ozone.
- // Here, the only issue why they don't work is that the Ozone platform is not
- // initialised.
- if (features::IsUsingOzonePlatform())
- return;
+ if (features::IsUsingOzonePlatform()) {
+ if (ui::OzonePlatform::GetPlatformNameForTest() == "drm") {
+ // OzonePlatformDrm::InitializeUI hangs in tests on the DRM platform.
+ GTEST_SKIP();
+ }
+ // Initialise Ozone in single process mode, as all tests do.
+ ui::OzonePlatform::InitParams params;
+ params.single_process = true;
+ ui::OzonePlatform::InitializeForUI(params);
+ }
#endif
+ }
+};
+
+} // namespace
+TEST_F(UserInputMonitorTest, CreatePlatformSpecific) {
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
base::test::TaskEnvironment task_environment(
base::test::TaskEnvironment::MainThreadType::IO);
@@ -53,15 +73,7 @@ TEST(UserInputMonitorTest, CreatePlatformSpecific) {
base::RunLoop().RunUntilIdle();
}
-TEST(UserInputMonitorTest, CreatePlatformSpecificWithMapping) {
-#if defined(USE_OZONE)
- // TODO(crbug.com/1109112): enable those tests for Ozone.
- // Here, the only issue why they don't work is that the Ozone platform is not
- // initialised.
- if (features::IsUsingOzonePlatform())
- return;
-#endif
-
+TEST_F(UserInputMonitorTest, CreatePlatformSpecificWithMapping) {
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
base::test::TaskEnvironment task_environment(
base::test::TaskEnvironment::MainThreadType::IO);
@@ -90,7 +102,7 @@ TEST(UserInputMonitorTest, CreatePlatformSpecificWithMapping) {
EXPECT_EQ(0u, ReadKeyPressMonitorCount(readonly_mapping));
}
-TEST(UserInputMonitorTest, ReadWriteKeyPressMonitorCount) {
+TEST_F(UserInputMonitorTest, ReadWriteKeyPressMonitorCount) {
std::unique_ptr<base::MappedReadOnlyRegion> shmem =
std::make_unique<base::MappedReadOnlyRegion>(
base::ReadOnlySharedMemoryRegion::Create(sizeof(uint32_t)));
@@ -102,4 +114,96 @@ TEST(UserInputMonitorTest, ReadWriteKeyPressMonitorCount) {
EXPECT_EQ(count, ReadKeyPressMonitorCount(readonly_mapping));
}
+#if defined(OS_WIN)
+
+//
+// Windows specific scenarios which require simulating keyboard hook events.
+//
+
+TEST_F(UserInputMonitorTest, BlockMonitoringAfterMonitoringEnabled) {
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::UI);
+
+ std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get());
+
+ if (!monitor)
+ return;
+
+ monitor->EnableKeyPressMonitoring();
+ ui::SimulateKeyboardHookRegistered();
+ ui::SimulateKeyboardHookUnregistered();
+ monitor->DisableKeyPressMonitoring();
+
+ monitor.reset();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(UserInputMonitorTest, BlockMonitoringBeforeMonitoringEnabled) {
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::UI);
+
+ std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get());
+
+ if (!monitor)
+ return;
+
+ ui::SimulateKeyboardHookRegistered();
+ monitor->EnableKeyPressMonitoring();
+ ui::SimulateKeyboardHookUnregistered();
+ monitor->DisableKeyPressMonitoring();
+
+ monitor.reset();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(UserInputMonitorTest, UnblockMonitoringAfterMonitoringDisabled) {
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::UI);
+
+ std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get());
+
+ if (!monitor)
+ return;
+
+ monitor->EnableKeyPressMonitoring();
+ ui::SimulateKeyboardHookRegistered();
+ monitor->DisableKeyPressMonitoring();
+ ui::SimulateKeyboardHookUnregistered();
+
+ monitor.reset();
+ base::RunLoop().RunUntilIdle();
+}
+
+TEST_F(UserInputMonitorTest, BlockKeypressMonitoringWithSharedMemoryBuffer) {
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::UI);
+
+ std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
+ base::ThreadTaskRunnerHandle::Get(), base::ThreadTaskRunnerHandle::Get());
+
+ if (!monitor)
+ return;
+
+ base::ReadOnlySharedMemoryMapping readonly_mapping =
+ static_cast<UserInputMonitorBase*>(monitor.get())
+ ->EnableKeyPressMonitoringWithMapping()
+ .Map();
+ EXPECT_EQ(0u, ReadKeyPressMonitorCount(readonly_mapping));
+ ui::SimulateKeyboardHookRegistered();
+ EXPECT_EQ(0u, ReadKeyPressMonitorCount(readonly_mapping));
+ ui::SimulateKeyboardHookUnregistered();
+ EXPECT_EQ(0u, ReadKeyPressMonitorCount(readonly_mapping));
+ monitor->DisableKeyPressMonitoring();
+
+ monitor.reset();
+ base::RunLoop().RunUntilIdle();
+
+ // Check that read only region remains valid after disable.
+ EXPECT_EQ(0u, ReadKeyPressMonitorCount(readonly_mapping));
+}
+#endif // defined(OS_WIN)
+
} // namespace media
diff --git a/chromium/media/base/user_input_monitor_win.cc b/chromium/media/base/user_input_monitor_win.cc
index 8187d23f282..6082070ace0 100644
--- a/chromium/media/base/user_input_monitor_win.cc
+++ b/chromium/media/base/user_input_monitor_win.cc
@@ -20,6 +20,8 @@
#include "third_party/skia/include/core/SkPoint.h"
#include "ui/events/keyboard_event_counter.h"
#include "ui/events/keycodes/keyboard_code_conversion_win.h"
+#include "ui/events/win/keyboard_hook_monitor.h"
+#include "ui/events/win/keyboard_hook_observer.h"
namespace media {
namespace {
@@ -41,7 +43,8 @@ std::unique_ptr<RAWINPUTDEVICE> GetRawInputDevices(HWND hwnd, DWORD flags) {
// UserInputMonitorWin since it needs to be deleted on the UI thread.
class UserInputMonitorWinCore
: public base::SupportsWeakPtr<UserInputMonitorWinCore>,
- public base::CurrentThread::DestructionObserver {
+ public base::CurrentThread::DestructionObserver,
+ public ui::KeyboardHookObserver {
public:
enum EventBitMask {
MOUSE_EVENT_MASK = 1,
@@ -55,6 +58,10 @@ class UserInputMonitorWinCore
// DestructionObserver overrides.
void WillDestroyCurrentMessageLoop() override;
+ // KeyboardHookObserver implementation.
+ void OnHookRegistered() override;
+ void OnHookUnregistered() override;
+
uint32_t GetKeyPressCount() const;
void StartMonitor();
void StartMonitorWithMapping(base::WritableSharedMemoryMapping mapping);
@@ -69,6 +76,9 @@ class UserInputMonitorWinCore
LPARAM lparam,
LRESULT* result);
+ void CreateRawInputWindow();
+ void DestroyRawInputWindow();
+
// Task runner on which |window_| is created.
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner_;
@@ -79,6 +89,9 @@ class UserInputMonitorWinCore
std::unique_ptr<base::win::MessageWindow> window_;
ui::KeyboardEventCounter counter_;
+ bool pause_monitoring_ = false;
+ bool start_monitoring_after_hook_removed_ = false;
+
DISALLOW_COPY_AND_ASSIGN(UserInputMonitorWinCore);
};
@@ -106,10 +119,17 @@ class UserInputMonitorWin : public UserInputMonitorBase {
UserInputMonitorWinCore::UserInputMonitorWinCore(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner)
- : ui_task_runner_(ui_task_runner) {}
+ : ui_task_runner_(ui_task_runner) {
+ // Register this instance with the KeyboardHookMonitor to listen for changes
+ // in the KeyboardHook registration state. Since this instance may have been
+ // constructed after a hook was registered, check the current state as well.
+ ui::KeyboardHookMonitor::GetInstance()->AddObserver(this);
+ pause_monitoring_ = ui::KeyboardHookMonitor::GetInstance()->IsActive();
+}
UserInputMonitorWinCore::~UserInputMonitorWinCore() {
DCHECK(!window_);
+ ui::KeyboardHookMonitor::GetInstance()->RemoveObserver(this);
}
void UserInputMonitorWinCore::WillDestroyCurrentMessageLoop() {
@@ -124,6 +144,54 @@ uint32_t UserInputMonitorWinCore::GetKeyPressCount() const {
void UserInputMonitorWinCore::StartMonitor() {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
+ if (pause_monitoring_) {
+ start_monitoring_after_hook_removed_ = true;
+ return;
+ }
+
+ CreateRawInputWindow();
+}
+
+void UserInputMonitorWinCore::StartMonitorWithMapping(
+ base::WritableSharedMemoryMapping mapping) {
+ StartMonitor();
+ key_press_count_mapping_ =
+ std::make_unique<base::WritableSharedMemoryMapping>(std::move(mapping));
+}
+
+void UserInputMonitorWinCore::StopMonitor() {
+ DCHECK(ui_task_runner_->BelongsToCurrentThread());
+
+ DestroyRawInputWindow();
+ start_monitoring_after_hook_removed_ = false;
+
+ key_press_count_mapping_.reset();
+}
+
+void UserInputMonitorWinCore::OnHookRegistered() {
+ DCHECK(ui_task_runner_->BelongsToCurrentThread());
+ DCHECK(!pause_monitoring_);
+ pause_monitoring_ = true;
+
+ // Don't destroy |key_press_count_mapping_| as this is a temporary block and
+ // we want to allow monitoring to continue using the same shared memory once
+ // monitoring is unblocked.
+ DestroyRawInputWindow();
+}
+
+void UserInputMonitorWinCore::OnHookUnregistered() {
+ DCHECK(ui_task_runner_->BelongsToCurrentThread());
+ DCHECK(pause_monitoring_);
+ pause_monitoring_ = false;
+
+ if (start_monitoring_after_hook_removed_) {
+ start_monitoring_after_hook_removed_ = false;
+ StartMonitor();
+ }
+}
+
+void UserInputMonitorWinCore::CreateRawInputWindow() {
+ DCHECK(ui_task_runner_->BelongsToCurrentThread());
if (window_)
return;
@@ -149,16 +217,8 @@ void UserInputMonitorWinCore::StartMonitor() {
base::CurrentThread::Get()->AddDestructionObserver(this);
}
-void UserInputMonitorWinCore::StartMonitorWithMapping(
- base::WritableSharedMemoryMapping mapping) {
- StartMonitor();
- key_press_count_mapping_ =
- std::make_unique<base::WritableSharedMemoryMapping>(std::move(mapping));
-}
-
-void UserInputMonitorWinCore::StopMonitor() {
+void UserInputMonitorWinCore::DestroyRawInputWindow() {
DCHECK(ui_task_runner_->BelongsToCurrentThread());
-
if (!window_)
return;
@@ -168,11 +228,8 @@ void UserInputMonitorWinCore::StopMonitor() {
if (!RegisterRawInputDevices(device.get(), 1, sizeof(*device))) {
PLOG(INFO) << "RegisterRawInputDevices() failed for RIDEV_REMOVE";
}
-
window_ = nullptr;
- key_press_count_mapping_.reset();
-
// Stop observing message loop destruction if no event is being monitored.
base::CurrentThread::Get()->RemoveDestructionObserver(this);
}
diff --git a/chromium/media/base/video_decoder.cc b/chromium/media/base/video_decoder.cc
index 80ac2f5ab0e..a593bb3e6a1 100644
--- a/chromium/media/base/video_decoder.cc
+++ b/chromium/media/base/video_decoder.cc
@@ -29,6 +29,10 @@ int VideoDecoder::GetMaxDecodeRequests() const {
return 1;
}
+bool VideoDecoder::IsOptimizedForRTC() const {
+ return false;
+}
+
// static
int VideoDecoder::GetRecommendedThreadCount(int desired_threads) {
// If the thread count is specified on the command line, respect it so long as
diff --git a/chromium/media/base/video_decoder.h b/chromium/media/base/video_decoder.h
index b66186c625e..8c6cb290dc2 100644
--- a/chromium/media/base/video_decoder.h
+++ b/chromium/media/base/video_decoder.h
@@ -115,6 +115,10 @@ class MEDIA_EXPORT VideoDecoder : public Decoder {
// Returns maximum number of parallel decode requests.
virtual int GetMaxDecodeRequests() const;
+ // Returns true if and only if this decoder is optimized for decoding RTC
+ // streams. The default is false.
+ virtual bool IsOptimizedForRTC() const;
+
// Returns the recommended number of threads for software video decoding. If
// the --video-threads command line option is specified and is valid, that
// value is returned. Otherwise |desired_threads| is clamped to the number of
@@ -122,6 +126,12 @@ class MEDIA_EXPORT VideoDecoder : public Decoder {
// [|limits::kMinVideoDecodeThreads|, |limits::kMaxVideoDecodeThreads|].
static int GetRecommendedThreadCount(int desired_threads);
+ // Returns the type of the decoder for statistics recording purposes.
+ // For meta-decoders (those which wrap other decoders, ie, MojoVideoDecoder)
+ // this should return the underlying type, if it is known, otherwise return
+ // its own type.
+ virtual VideoDecoderType GetDecoderType() const = 0;
+
private:
DISALLOW_COPY_AND_ASSIGN(VideoDecoder);
};
diff --git a/chromium/media/base/video_decoder_config.cc b/chromium/media/base/video_decoder_config.cc
index 85cc6a64396..79af2bfd372 100644
--- a/chromium/media/base/video_decoder_config.cc
+++ b/chromium/media/base/video_decoder_config.cc
@@ -117,7 +117,11 @@ std::string VideoDecoderConfig::AsHumanReadableString() const {
<< hdr_metadata()->mastering_metadata.primary_b.x() << ","
<< hdr_metadata()->mastering_metadata.primary_b.y() << ") wp("
<< hdr_metadata()->mastering_metadata.white_point.x() << ","
- << hdr_metadata()->mastering_metadata.white_point.y() << ")";
+ << hdr_metadata()->mastering_metadata.white_point.y()
+ << "), max_content_light_level="
+ << hdr_metadata()->max_content_light_level
+ << ", max_frame_average_light_level="
+ << hdr_metadata()->max_frame_average_light_level;
}
return s.str();
diff --git a/chromium/media/base/video_decoder_config.h b/chromium/media/base/video_decoder_config.h
index 5d04d524bad..144f7d61a15 100644
--- a/chromium/media/base/video_decoder_config.h
+++ b/chromium/media/base/video_decoder_config.h
@@ -167,6 +167,10 @@ class MEDIA_EXPORT VideoDecoderConfig {
// useful for decryptors that decrypts an encrypted stream to a clear stream.
void SetIsEncrypted(bool is_encrypted);
+ // Sets whether this config is for WebRTC or not.
+ void set_is_rtc(bool is_rtc) { is_rtc_ = is_rtc; }
+ bool is_rtc() const { return is_rtc_; }
+
private:
VideoCodec codec_ = kUnknownVideoCodec;
VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
@@ -191,6 +195,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
VideoColorSpace color_space_info_;
base::Optional<gfx::HDRMetadata> hdr_metadata_;
+ bool is_rtc_ = false;
// Not using DISALLOW_COPY_AND_ASSIGN here intentionally to allow the compiler
// generated copy constructor and assignment operator. Since the extra data is
diff --git a/chromium/media/base/video_encoder.h b/chromium/media/base/video_encoder.h
index 1513367c15c..d1a590a4086 100644
--- a/chromium/media/base/video_encoder.h
+++ b/chromium/media/base/video_encoder.h
@@ -34,6 +34,11 @@ struct MEDIA_EXPORT VideoEncoderOutput {
class MEDIA_EXPORT VideoEncoder {
public:
+ // TODO: Move this to a new file if there are more codec specific options.
+ struct MEDIA_EXPORT AvcOptions {
+ bool produce_annexb = false;
+ };
+
struct MEDIA_EXPORT Options {
Options();
Options(const Options&);
@@ -44,6 +49,9 @@ class MEDIA_EXPORT VideoEncoder {
gfx::Size frame_size;
base::Optional<int> keyframe_interval = 10000;
+
+ // Only used for H264 encoding.
+ AvcOptions avc;
};
// A sequence of codec specific bytes, commonly known as extradata.
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index f3833229a48..914a4a3a267 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -146,6 +146,7 @@ gfx::Size VideoFrame::SampleSize(VideoPixelFormat format, size_t plane) {
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_BGRA:
+ case PIXEL_FORMAT_RGBAF16:
break;
}
}
@@ -178,6 +179,7 @@ static bool RequiresEvenSizeAllocation(VideoPixelFormat format) {
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_BGRA:
+ case PIXEL_FORMAT_RGBAF16:
return false;
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
@@ -233,6 +235,9 @@ static base::Optional<VideoFrameLayout> GetDefaultLayout(
break;
case PIXEL_FORMAT_ARGB:
+ case PIXEL_FORMAT_XRGB:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_XBGR:
planes = std::vector<ColorPlaneLayout>{ColorPlaneLayout(
coded_size.width() * 4, 0, coded_size.GetArea() * 4)};
break;
@@ -252,10 +257,8 @@ static base::Optional<VideoFrameLayout> GetDefaultLayout(
default:
// TODO(miu): This function should support any pixel format.
// http://crbug.com/555909 .
- DLOG(ERROR)
- << "Only PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_NV12, "
- "and PIXEL_FORMAT_ARGB formats are supported: "
- << VideoPixelFormatToString(format);
+ DLOG(ERROR) << "Unsupported pixel format"
+ << VideoPixelFormatToString(format);
return base::nullopt;
}
@@ -309,7 +312,7 @@ bool VideoFrame::IsValidConfig(VideoPixelFormat format,
return true;
// Make sure new formats are properly accounted for in the method.
- static_assert(PIXEL_FORMAT_MAX == 32,
+ static_assert(PIXEL_FORMAT_MAX == 33,
"Added pixel format, please review IsValidConfig()");
if (format == PIXEL_FORMAT_UNKNOWN) {
@@ -341,7 +344,7 @@ scoped_refptr<VideoFrame> VideoFrame::CreateVideoHoleFrame(
scoped_refptr<VideoFrame> frame =
new VideoFrame(*layout, StorageType::STORAGE_OPAQUE,
gfx::Rect(natural_size), natural_size, timestamp);
- frame->metadata()->overlay_plane_id = overlay_plane_id;
+ frame->metadata().overlay_plane_id = overlay_plane_id;
return frame;
}
@@ -368,7 +371,8 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTextures(
if (format != PIXEL_FORMAT_ARGB && format != PIXEL_FORMAT_XRGB &&
format != PIXEL_FORMAT_NV12 && format != PIXEL_FORMAT_I420 &&
format != PIXEL_FORMAT_ABGR && format != PIXEL_FORMAT_XR30 &&
- format != PIXEL_FORMAT_XB30 && format != PIXEL_FORMAT_P016LE) {
+ format != PIXEL_FORMAT_XB30 && format != PIXEL_FORMAT_P016LE &&
+ format != PIXEL_FORMAT_RGBAF16) {
DLOG(ERROR) << "Unsupported pixel format: "
<< VideoPixelFormatToString(format);
return nullptr;
@@ -434,6 +438,18 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalDataWithLayout(
return nullptr;
}
+ const auto& last_plane = layout.planes()[layout.planes().size() - 1];
+ const size_t required_size = last_plane.offset + last_plane.size;
+ if (data_size < required_size) {
+ DLOG(ERROR) << __func__ << " Provided data size is too small. Provided "
+ << data_size << " bytes, but " << required_size
+ << " bytes are required."
+ << ConfigToString(layout.format(), storage_type,
+ layout.coded_size(), visible_rect,
+ natural_size);
+ return nullptr;
+ }
+
scoped_refptr<VideoFrame> frame = new VideoFrame(
layout, storage_type, visible_rect, natural_size, timestamp);
@@ -845,15 +861,37 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
return nullptr;
}
+ size_t new_plane_count = NumPlanes(format);
+ base::Optional<VideoFrameLayout> new_layout;
+ if (format == frame->format()) {
+ new_layout = frame->layout();
+ } else {
+ std::vector<ColorPlaneLayout> new_planes = frame->layout().planes();
+ if (new_plane_count > new_planes.size()) {
+ DLOG(ERROR) << " Wrapping frame has more planes than old one."
+ << " old plane count: " << new_planes.size()
+ << " new plane count: " << new_plane_count;
+ return nullptr;
+ }
+ new_planes.resize(new_plane_count);
+ new_layout = VideoFrameLayout::CreateWithPlanes(format, frame->coded_size(),
+ new_planes);
+ }
+
+ if (!new_layout.has_value()) {
+ DLOG(ERROR) << " Can't create layout for the wrapping frame";
+ return nullptr;
+ }
+
scoped_refptr<VideoFrame> wrapping_frame(
- new VideoFrame(frame->layout(), frame->storage_type(), visible_rect,
+ new VideoFrame(new_layout.value(), frame->storage_type(), visible_rect,
natural_size, frame->timestamp()));
// Copy all metadata to the wrapped frame->
- wrapping_frame->metadata()->MergeMetadataFrom(frame->metadata());
+ wrapping_frame->metadata().MergeMetadataFrom(frame->metadata());
if (frame->IsMappable()) {
- for (size_t i = 0; i < NumPlanes(format); ++i) {
+ for (size_t i = 0; i < new_plane_count; ++i) {
wrapping_frame->data_[i] = frame->data_[i];
}
}
@@ -882,7 +920,7 @@ scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() {
}
scoped_refptr<VideoFrame> frame = new VideoFrame(
*layout, STORAGE_UNKNOWN, gfx::Rect(), gfx::Size(), kNoTimestamp);
- frame->metadata()->end_of_stream = true;
+ frame->metadata().end_of_stream = true;
return frame;
}
@@ -940,6 +978,15 @@ size_t VideoFrame::AllocationSize(VideoPixelFormat format,
gfx::Size VideoFrame::PlaneSize(VideoPixelFormat format,
size_t plane,
const gfx::Size& coded_size) {
+ gfx::Size size = PlaneSizeInSamples(format, plane, coded_size);
+ size.set_width(size.width() * BytesPerElement(format, plane));
+ return size;
+}
+
+// static
+gfx::Size VideoFrame::PlaneSizeInSamples(VideoPixelFormat format,
+ size_t plane,
+ const gfx::Size& coded_size) {
DCHECK(IsValidPlane(format, plane));
int width = coded_size.width();
@@ -955,8 +1002,7 @@ gfx::Size VideoFrame::PlaneSize(VideoPixelFormat format,
const gfx::Size subsample = SampleSize(format, plane);
DCHECK(width % subsample.width() == 0);
DCHECK(height % subsample.height() == 0);
- return gfx::Size(BytesPerElement(format, plane) * width / subsample.width(),
- height / subsample.height());
+ return gfx::Size(width / subsample.width(), height / subsample.height());
}
// static
@@ -973,7 +1019,7 @@ int VideoFrame::PlaneHorizontalBitsPerPixel(VideoPixelFormat format,
int VideoFrame::PlaneBitsPerPixel(VideoPixelFormat format, size_t plane) {
DCHECK(IsValidPlane(format, plane));
return PlaneHorizontalBitsPerPixel(format, plane) /
- SampleSize(format, plane).height();
+ SampleSize(format, plane).height();
}
// static
@@ -986,6 +1032,8 @@ size_t VideoFrame::RowBytes(size_t plane, VideoPixelFormat format, int width) {
int VideoFrame::BytesPerElement(VideoPixelFormat format, size_t plane) {
DCHECK(IsValidPlane(format, plane));
switch (format) {
+ case PIXEL_FORMAT_RGBAF16:
+ return 8;
case PIXEL_FORMAT_ARGB:
case PIXEL_FORMAT_BGRA:
case PIXEL_FORMAT_XRGB:
@@ -1190,8 +1238,8 @@ uint8_t* VideoFrame::visible_data(size_t plane) {
static_cast<const VideoFrame*>(this)->visible_data(plane));
}
-const gpu::MailboxHolder&
-VideoFrame::mailbox_holder(size_t texture_index) const {
+const gpu::MailboxHolder& VideoFrame::mailbox_holder(
+ size_t texture_index) const {
DCHECK(HasTextures());
DCHECK(IsValidPlane(format(), texture_index));
return wrapped_frame_ ? wrapped_frame_->mailbox_holders_[texture_index]
@@ -1258,13 +1306,15 @@ gpu::SyncToken VideoFrame::UpdateReleaseSyncToken(SyncTokenClient* client) {
}
std::string VideoFrame::AsHumanReadableString() const {
- if (metadata()->end_of_stream)
+ if (metadata().end_of_stream)
return "end of stream";
std::ostringstream s;
s << ConfigToString(format(), storage_type_, coded_size(), visible_rect_,
natural_size_)
<< " timestamp:" << timestamp_.InMicroseconds();
+ if (HasTextures())
+ s << " textures: " << NumTextures();
return s.str();
}
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index 4185e3ce506..5bf0cacebf1 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -346,6 +346,12 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
size_t plane,
const gfx::Size& coded_size);
+ // Returns the plane gfx::Size (in samples) for a plane of the given coded
+ // size and format.
+ static gfx::Size PlaneSizeInSamples(VideoPixelFormat format,
+ size_t plane,
+ const gfx::Size& coded_size);
+
// Returns horizontal bits per pixel for given |plane| and |format|.
static int PlaneHorizontalBitsPerPixel(VideoPixelFormat format, size_t plane);
@@ -559,10 +565,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
//
// TODO(miu): Move some of the "extra" members of VideoFrame (below) into
// here as a later clean-up step.
- //
- // TODO(https://crbug.com/1096727): change the return type to const&.
- const VideoFrameMetadata* metadata() const { return &metadata_; }
- VideoFrameMetadata* metadata() { return &metadata_; }
+ const VideoFrameMetadata& metadata() const { return metadata_; }
+ VideoFrameMetadata& metadata() { return metadata_; }
void set_metadata(const VideoFrameMetadata& metadata) {
metadata_ = metadata;
}
diff --git a/chromium/media/base/video_frame_feedback.h b/chromium/media/base/video_frame_feedback.h
index 3e64b3fded9..348412b1349 100644
--- a/chromium/media/base/video_frame_feedback.h
+++ b/chromium/media/base/video_frame_feedback.h
@@ -35,6 +35,10 @@ struct MEDIA_EXPORT VideoFrameFeedback {
max_framerate_fps == other.max_framerate_fps;
}
+ bool operator!=(const VideoFrameFeedback& other) const {
+ return !(*this == other);
+ }
+
// Combine constraints of two different sinks resulting in constraints fitting
// both of them.
void Combine(const VideoFrameFeedback& other);
diff --git a/chromium/media/base/video_frame_layout.cc b/chromium/media/base/video_frame_layout.cc
index ac6b633e240..dcf0dd23cc5 100644
--- a/chromium/media/base/video_frame_layout.cc
+++ b/chromium/media/base/video_frame_layout.cc
@@ -55,6 +55,7 @@ size_t VideoFrameLayout::NumPlanes(VideoPixelFormat format) {
case PIXEL_FORMAT_XBGR:
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
+ case PIXEL_FORMAT_RGBAF16:
return 1;
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
diff --git a/chromium/media/base/video_frame_metadata.cc b/chromium/media/base/video_frame_metadata.cc
index 6cd1c5dcfb0..cfe6cbc6747 100644
--- a/chromium/media/base/video_frame_metadata.cc
+++ b/chromium/media/base/video_frame_metadata.cc
@@ -21,11 +21,11 @@ VideoFrameMetadata::VideoFrameMetadata(const VideoFrameMetadata& other) =
default;
#define MERGE_FIELD(a, source) \
- if (source->a) \
- this->a = source->a
+ if (source.a) \
+ this->a = source.a
void VideoFrameMetadata::MergeMetadataFrom(
- const VideoFrameMetadata* metadata_source) {
+ const VideoFrameMetadata& metadata_source) {
MERGE_FIELD(allow_overlay, metadata_source);
MERGE_FIELD(capture_begin_time, metadata_source);
MERGE_FIELD(capture_end_time, metadata_source);
@@ -38,7 +38,7 @@ void VideoFrameMetadata::MergeMetadataFrom(
MERGE_FIELD(interactive_content, metadata_source);
MERGE_FIELD(reference_time, metadata_source);
MERGE_FIELD(read_lock_fences_enabled, metadata_source);
- MERGE_FIELD(rotation, metadata_source);
+ MERGE_FIELD(transformation, metadata_source);
MERGE_FIELD(texture_owner, metadata_source);
MERGE_FIELD(wants_promotion_hint, metadata_source);
MERGE_FIELD(protected_video, metadata_source);
@@ -57,6 +57,7 @@ void VideoFrameMetadata::MergeMetadataFrom(
MERGE_FIELD(receive_time, metadata_source);
MERGE_FIELD(wallclock_frame_duration, metadata_source);
MERGE_FIELD(maximum_composition_delay_in_frames, metadata_source);
+ MERGE_FIELD(hw_protected_validation_id, metadata_source);
}
} // namespace media
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index ded4b13adf6..3456e0703bd 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -46,7 +46,7 @@ struct MEDIA_EXPORT VideoFrameMetadata {
};
// Merges internal values from |metadata_source|.
- void MergeMetadataFrom(const VideoFrameMetadata* metadata_source);
+ void MergeMetadataFrom(const VideoFrameMetadata& metadata_source);
// Sources of VideoFrames use this marker to indicate that the associated
// VideoFrame can be overlaid, case in which its contents do not need to be
@@ -114,8 +114,8 @@ struct MEDIA_EXPORT VideoFrameMetadata {
// should use read lock fences.
bool read_lock_fences_enabled = false;
- // Indicates that the frame is rotated.
- base::Optional<VideoRotation> rotation;
+ // Indicates that the frame has a rotation and/or flip.
+ base::Optional<VideoTransformation> transformation;
// Android only: if set, then this frame is not suitable for overlay, even
// if ALLOW_OVERLAY is set. However, it allows us to process the overlay
@@ -134,6 +134,10 @@ struct MEDIA_EXPORT VideoFrameMetadata {
// PROTECTED_VIDEO is also set to true.
bool hw_protected = false;
+ // Identifier used to query if a HW protected video frame can still be
+ // properly displayed or not. Non-zero when valid.
+ uint32_t hw_protected_validation_id = 0;
+
// An UnguessableToken that identifies VideoOverlayFactory that created
// this VideoFrame. It's used by Cast to help with video hole punch.
base::Optional<base::UnguessableToken> overlay_plane_id;
@@ -165,7 +169,7 @@ struct MEDIA_EXPORT VideoFrameMetadata {
// The RTP timestamp associated with this video frame. Stored as a double
// since base::DictionaryValue doesn't have a uint32_t type.
//
- // https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource
+ // https://w3c.github.io/webrtc-pc/#dom-rtcrtpcontributingsource-rtptimestamp
base::Optional<double> rtp_timestamp;
// For video frames coming from a remote source, this is the time the
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index 30f2bbc64e0..50836987e0d 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -59,8 +59,8 @@ media::VideoFrameMetadata GetFullVideoFrameMetadata() {
// gfx::Rects
metadata.capture_update_rect = gfx::Rect(12, 34, 360, 480);
- // media::VideoRotations
- metadata.rotation = media::VideoRotation::VIDEO_ROTATION_90;
+ // media::VideoTransformation
+ metadata.transformation = media::VIDEO_ROTATION_90;
// media::VideoFrameMetadata::CopyMode
metadata.copy_mode = media::VideoFrameMetadata::CopyMode::kCopyToNewTexture;
@@ -119,7 +119,7 @@ void VerifyVideoFrameMetadataEquality(const media::VideoFrameMetadata& a,
EXPECT_EQ(a.interactive_content, b.interactive_content);
EXPECT_EQ(a.reference_time, b.reference_time);
EXPECT_EQ(a.read_lock_fences_enabled, b.read_lock_fences_enabled);
- EXPECT_EQ(a.rotation, b.rotation);
+ EXPECT_EQ(a.transformation, b.transformation);
EXPECT_EQ(a.texture_owner, b.texture_owner);
EXPECT_EQ(a.wants_promotion_hint, b.wants_promotion_hint);
EXPECT_EQ(a.protected_video, b.protected_video);
@@ -290,7 +290,7 @@ TEST(VideoFrame, CreateFrame) {
// Test an empty frame.
frame = VideoFrame::CreateEOSFrame();
- EXPECT_TRUE(frame->metadata()->end_of_stream);
+ EXPECT_TRUE(frame->metadata().end_of_stream);
}
TEST(VideoFrame, CreateZeroInitializedFrame) {
@@ -326,7 +326,7 @@ TEST(VideoFrame, CreateBlackFrame) {
// Test basic properties.
EXPECT_EQ(0, frame->timestamp().InMicroseconds());
- EXPECT_FALSE(frame->metadata()->end_of_stream);
+ EXPECT_FALSE(frame->metadata().end_of_stream);
// Test |frame| properties.
EXPECT_EQ(PIXEL_FORMAT_I420, frame->format());
@@ -368,7 +368,7 @@ TEST(VideoFrame, WrapVideoFrame) {
gfx::Rect visible_rect(1, 1, 1, 1);
gfx::Size natural_size = visible_rect.size();
- wrapped_frame->metadata()->frame_duration = kFrameDuration;
+ wrapped_frame->metadata().frame_duration = kFrameDuration;
frame = media::VideoFrame::WrapVideoFrame(
wrapped_frame, wrapped_frame->format(), visible_rect, natural_size);
wrapped_frame->AddDestructionObserver(
@@ -382,12 +382,12 @@ TEST(VideoFrame, WrapVideoFrame) {
EXPECT_EQ(natural_size, frame->natural_size());
// Verify metadata was copied to the wrapped frame.
- EXPECT_EQ(*frame->metadata()->frame_duration, kFrameDuration);
+ EXPECT_EQ(*frame->metadata().frame_duration, kFrameDuration);
// Verify the metadata copy was a deep copy.
wrapped_frame->clear_metadata();
- EXPECT_NE(wrapped_frame->metadata()->frame_duration.has_value(),
- frame->metadata()->frame_duration.has_value());
+ EXPECT_NE(wrapped_frame->metadata().frame_duration.has_value(),
+ frame->metadata().frame_duration.has_value());
}
// Verify that |wrapped_frame| outlives |frame|.
@@ -725,6 +725,10 @@ TEST(VideoFrame, AllocationSize_OddSize) {
EXPECT_EQ(30u, VideoFrame::AllocationSize(format, size))
<< VideoPixelFormatToString(format);
break;
+ case PIXEL_FORMAT_RGBAF16:
+ EXPECT_EQ(120u, VideoFrame::AllocationSize(format, size))
+ << VideoPixelFormatToString(format);
+ break;
case PIXEL_FORMAT_MJPEG:
case PIXEL_FORMAT_UNKNOWN:
continue;
@@ -738,11 +742,11 @@ TEST(VideoFrameMetadata, MergeMetadata) {
VideoFrameMetadata empty_metadata;
// Merging empty metadata into full metadata should be a no-op.
- full_metadata.MergeMetadataFrom(&empty_metadata);
+ full_metadata.MergeMetadataFrom(empty_metadata);
VerifyVideoFrameMetadataEquality(full_metadata, reference_metadata);
// Merging full metadata into empty metadata should fill it up.
- empty_metadata.MergeMetadataFrom(&full_metadata);
+ empty_metadata.MergeMetadataFrom(full_metadata);
VerifyVideoFrameMetadataEquality(empty_metadata, reference_metadata);
}
@@ -761,7 +765,7 @@ TEST(VideoFrameMetadata, PartialMergeMetadata) {
partial_metadata.allow_overlay = false;
// Merging partial metadata into full metadata partially override it.
- full_metadata.MergeMetadataFrom(&partial_metadata);
+ full_metadata.MergeMetadataFrom(partial_metadata);
EXPECT_EQ(partial_metadata.capture_update_rect, kTempRect);
EXPECT_EQ(partial_metadata.reference_time, kTempTicks);
diff --git a/chromium/media/base/video_types.cc b/chromium/media/base/video_types.cc
index 6814a364db1..56b39a1ed76 100644
--- a/chromium/media/base/video_types.cc
+++ b/chromium/media/base/video_types.cc
@@ -73,6 +73,8 @@ std::string VideoPixelFormatToString(VideoPixelFormat format) {
return "PIXEL_FORMAT_XB30";
case PIXEL_FORMAT_BGRA:
return "PIXEL_FORMAT_BGRA";
+ case PIXEL_FORMAT_RGBAF16:
+ return "PIXEL_FORMAT_RGBAF16";
}
NOTREACHED() << "Invalid VideoPixelFormat provided: " << format;
return "";
@@ -128,6 +130,7 @@ bool IsYuvPlanar(VideoPixelFormat format) {
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_BGRA:
+ case PIXEL_FORMAT_RGBAF16:
return false;
}
return false;
@@ -166,6 +169,7 @@ bool IsOpaque(VideoPixelFormat format) {
case PIXEL_FORMAT_ARGB:
case PIXEL_FORMAT_ABGR:
case PIXEL_FORMAT_BGRA:
+ case PIXEL_FORMAT_RGBAF16:
break;
}
return false;
@@ -209,6 +213,7 @@ size_t BitDepth(VideoPixelFormat format) {
return 12;
case PIXEL_FORMAT_Y16:
case PIXEL_FORMAT_P016LE:
+ case PIXEL_FORMAT_RGBAF16:
return 16;
}
NOTREACHED();
diff --git a/chromium/media/base/video_types.h b/chromium/media/base/video_types.h
index 2205036a07c..2187f23272c 100644
--- a/chromium/media/base/video_types.h
+++ b/chromium/media/base/video_types.h
@@ -77,9 +77,11 @@ enum VideoPixelFormat {
PIXEL_FORMAT_BGRA = 32, // 32bpp ARGB (byte-order), 1 plane.
+ PIXEL_FORMAT_RGBAF16 = 33, // Half float RGBA, 1 plane.
+
// Please update UMA histogram enumeration when adding new formats here.
PIXEL_FORMAT_MAX =
- PIXEL_FORMAT_BGRA, // Must always be equal to largest entry logged.
+ PIXEL_FORMAT_RGBAF16, // Must always be equal to largest entry logged.
};
// Returns the name of a Format as a string.
diff --git a/chromium/media/base/video_util.cc b/chromium/media/base/video_util.cc
index 3daa1cb27d5..5f26611aa07 100644
--- a/chromium/media/base/video_util.cc
+++ b/chromium/media/base/video_util.cc
@@ -7,12 +7,25 @@
#include <cmath>
#include "base/bind.h"
+#include "base/callback_helpers.h"
#include "base/check_op.h"
+#include "base/logging.h"
#include "base/notreached.h"
#include "base/numerics/safe_conversions.h"
#include "base/numerics/safe_math.h"
+#include "gpu/GLES2/gl2extchromium.h"
+#include "gpu/command_buffer/client/raster_interface.h"
+#include "media/base/status_codes.h"
#include "media/base/video_frame.h"
+#include "media/base/video_frame_pool.h"
#include "third_party/libyuv/include/libyuv.h"
+#include "third_party/skia/include/core/SkImage.h"
+#include "third_party/skia/include/core/SkRefCnt.h"
+#include "third_party/skia/include/core/SkSurface.h"
+#include "third_party/skia/include/core/SkYUVAPixmaps.h"
+#include "third_party/skia/include/gpu/GrDirectContext.h"
+#include "third_party/skia/include/gpu/gl/GrGLTypes.h"
+#include "ui/gfx/gpu_memory_buffer.h"
namespace media {
@@ -47,6 +60,202 @@ void FillRegionOutsideVisibleRect(uint8_t* data,
}
}
+std::pair<SkColorType, GrGLenum> GetSkiaAndGlColorTypesForPlane(
+ VideoPixelFormat format,
+ size_t plane) {
+ // TODO(eugene): There is some strange channel switch during RGB readback.
+ // When frame's pixel format matches GL and Skia color types we get reversed
+ // channels. But why?
+ switch (format) {
+ case PIXEL_FORMAT_NV12:
+ if (plane == VideoFrame::kUVPlane)
+ return {kR8G8_unorm_SkColorType, GL_RG8_EXT};
+ if (plane == VideoFrame::kYPlane)
+ return {kAlpha_8_SkColorType, GL_R8_EXT};
+ break;
+ case PIXEL_FORMAT_XBGR:
+ if (plane == VideoFrame::kARGBPlane)
+ return {kRGBA_8888_SkColorType, GL_RGBA8_OES};
+ break;
+ case PIXEL_FORMAT_ABGR:
+ if (plane == VideoFrame::kARGBPlane)
+ return {kRGBA_8888_SkColorType, GL_RGBA8_OES};
+ break;
+ case PIXEL_FORMAT_XRGB:
+ if (plane == VideoFrame::kARGBPlane)
+ return {kBGRA_8888_SkColorType, GL_BGRA8_EXT};
+ break;
+ case PIXEL_FORMAT_ARGB:
+ if (plane == VideoFrame::kARGBPlane)
+ return {kBGRA_8888_SkColorType, GL_BGRA8_EXT};
+ break;
+ default:
+ break;
+ }
+ NOTREACHED();
+ return {kUnknown_SkColorType, 0};
+}
+
+scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySyncGLES(
+ const VideoFrame& txt_frame,
+ gpu::raster::RasterInterface* ri,
+ GrDirectContext* gr_context,
+ VideoFramePool* pool) {
+ DCHECK(gr_context);
+
+ if (txt_frame.NumTextures() > 2 || txt_frame.NumTextures() < 1) {
+ DLOG(ERROR) << "Readback is not possible for this frame: "
+ << txt_frame.AsHumanReadableString();
+ return nullptr;
+ }
+
+ VideoPixelFormat result_format = txt_frame.format();
+ if (txt_frame.NumTextures() == 1 && result_format == PIXEL_FORMAT_NV12) {
+ // Even though |txt_frame| format is NV12 and it is NV12 in GPU memory,
+ // the texture is a RGB view that is produced by a shader on the fly.
+ // So we currently we currently can only read it back as RGB.
+ result_format = PIXEL_FORMAT_ARGB;
+ }
+
+ scoped_refptr<VideoFrame> result =
+ pool
+ ? pool->CreateFrame(result_format, txt_frame.coded_size(),
+ txt_frame.visible_rect(),
+ txt_frame.natural_size(), txt_frame.timestamp())
+ : VideoFrame::CreateFrame(
+ result_format, txt_frame.coded_size(), txt_frame.visible_rect(),
+ txt_frame.natural_size(), txt_frame.timestamp());
+ result->set_color_space(txt_frame.ColorSpace());
+ result->metadata().MergeMetadataFrom(txt_frame.metadata());
+
+ size_t planes = VideoFrame::NumPlanes(result->format());
+ for (size_t plane = 0; plane < planes; plane++) {
+ const gpu::MailboxHolder& holder = txt_frame.mailbox_holder(plane);
+ if (holder.mailbox.IsZero())
+ return nullptr;
+ ri->WaitSyncTokenCHROMIUM(holder.sync_token.GetConstData());
+
+ int width = VideoFrame::Columns(plane, result->format(),
+ result->coded_size().width());
+ int height = result->rows(plane);
+
+ auto texture_id = ri->CreateAndConsumeForGpuRaster(holder.mailbox);
+ if (holder.mailbox.IsSharedImage()) {
+ ri->BeginSharedImageAccessDirectCHROMIUM(
+ texture_id, GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
+ }
+
+ auto cleanup_fn = [](GLuint texture_id, bool shared,
+ gpu::raster::RasterInterface* ri) {
+ if (shared)
+ ri->EndSharedImageAccessDirectCHROMIUM(texture_id);
+ ri->DeleteGpuRasterTexture(texture_id);
+ };
+ base::ScopedClosureRunner cleanup(base::BindOnce(
+ cleanup_fn, texture_id, holder.mailbox.IsSharedImage(), ri));
+
+ GrGLenum texture_format;
+ SkColorType sk_color_type;
+ std::tie(sk_color_type, texture_format) =
+ GetSkiaAndGlColorTypesForPlane(result->format(), plane);
+ GrGLTextureInfo gl_texture_info;
+ gl_texture_info.fID = texture_id;
+ gl_texture_info.fTarget = holder.texture_target;
+ gl_texture_info.fFormat = texture_format;
+
+ GrBackendTexture texture(width, height, GrMipMapped::kNo, gl_texture_info);
+ auto image = SkImage::MakeFromTexture(
+ gr_context, texture, kTopLeft_GrSurfaceOrigin, sk_color_type,
+ kOpaque_SkAlphaType, nullptr /* colorSpace */);
+
+ if (!image) {
+ DLOG(ERROR) << "Can't create SkImage from texture!"
+ << " plane:" << plane;
+ return nullptr;
+ }
+
+ auto info =
+ SkImageInfo::Make(width, height, sk_color_type, kOpaque_SkAlphaType);
+ SkPixmap pixmap(info, result->data(plane), result->row_bytes(plane));
+ if (!image->readPixels(gr_context, pixmap, 0, 0,
+ SkImage::kDisallow_CachingHint)) {
+ DLOG(ERROR) << "Plane readback failed."
+ << " plane:" << plane << " width: " << width
+ << " height: " << height
+ << " minRowBytes: " << info.minRowBytes();
+ return nullptr;
+ }
+ }
+
+ return result;
+}
+
+scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySyncOOP(
+ const VideoFrame& txt_frame,
+ gpu::raster::RasterInterface* ri,
+ VideoFramePool* pool) {
+ if (txt_frame.NumTextures() > 2 || txt_frame.NumTextures() < 1) {
+ DLOG(ERROR) << "Readback is not possible for this frame: "
+ << txt_frame.AsHumanReadableString();
+ return nullptr;
+ }
+
+ VideoPixelFormat result_format = txt_frame.format();
+ if (txt_frame.NumTextures() == 1 && result_format == PIXEL_FORMAT_NV12) {
+ // Even though |txt_frame| format is NV12 and it is NV12 in GPU memory,
+ // the texture is a RGB view that is produced by a shader on the fly.
+ // So we currently we currently can only read it back as RGB.
+ result_format = PIXEL_FORMAT_ARGB;
+ }
+
+ scoped_refptr<VideoFrame> result =
+ pool
+ ? pool->CreateFrame(result_format, txt_frame.coded_size(),
+ txt_frame.visible_rect(),
+ txt_frame.natural_size(), txt_frame.timestamp())
+ : VideoFrame::CreateFrame(
+ result_format, txt_frame.coded_size(), txt_frame.visible_rect(),
+ txt_frame.natural_size(), txt_frame.timestamp());
+ result->set_color_space(txt_frame.ColorSpace());
+ result->metadata().MergeMetadataFrom(txt_frame.metadata());
+
+ size_t planes = VideoFrame::NumPlanes(result->format());
+ for (size_t plane = 0; plane < planes; plane++) {
+ const gpu::MailboxHolder& holder = txt_frame.mailbox_holder(plane);
+ if (holder.mailbox.IsZero()) {
+ DLOG(ERROR) << "Can't readback video frame with Zero texture on plane "
+ << plane;
+ return nullptr;
+ }
+ ri->WaitSyncTokenCHROMIUM(holder.sync_token.GetConstData());
+
+ int width = VideoFrame::Columns(plane, result->format(),
+ result->coded_size().width());
+ int height = result->rows(plane);
+
+ GrGLenum texture_format;
+ SkColorType sk_color_type;
+ std::tie(sk_color_type, texture_format) =
+ GetSkiaAndGlColorTypesForPlane(result->format(), plane);
+
+ auto info =
+ SkImageInfo::Make(width, height, sk_color_type, kOpaque_SkAlphaType);
+
+ ri->ReadbackImagePixels(holder.mailbox, info, info.minRowBytes(), 0, 0,
+ result->data(plane));
+ if (ri->GetError() != GL_NO_ERROR) {
+ DLOG(ERROR) << "Plane readback failed."
+ << " plane:" << plane << " width: " << width
+ << " height: " << height
+ << " minRowBytes: " << info.minRowBytes()
+ << " error: " << ri->GetError();
+ return nullptr;
+ }
+ }
+
+ return result;
+}
+
} // namespace
double GetPixelAspectRatio(const gfx::Rect& visible_rect,
@@ -399,29 +608,42 @@ gfx::Size PadToMatchAspectRatio(const gfx::Size& size,
return gfx::Size(size.width(), RoundedDivision(x, target.width()));
}
-void CopyRGBToVideoFrame(const uint8_t* source,
- int stride,
- const gfx::Rect& region_in_frame,
- VideoFrame* frame) {
- const int kY = VideoFrame::kYPlane;
- const int kU = VideoFrame::kUPlane;
- const int kV = VideoFrame::kVPlane;
- CHECK_EQ(frame->stride(kU), frame->stride(kV));
- const int uv_stride = frame->stride(kU);
-
- if (region_in_frame != gfx::Rect(frame->coded_size())) {
- LetterboxVideoFrame(frame, region_in_frame);
- }
+scoped_refptr<VideoFrame> ConvertToMemoryMappedFrame(
+ scoped_refptr<VideoFrame> video_frame) {
+ DCHECK(video_frame);
+ DCHECK(video_frame->HasGpuMemoryBuffer());
- const int y_offset =
- region_in_frame.x() + (region_in_frame.y() * frame->stride(kY));
- const int uv_offset =
- region_in_frame.x() / 2 + (region_in_frame.y() / 2 * uv_stride);
+ auto* gmb = video_frame->GetGpuMemoryBuffer();
+ if (!gmb->Map())
+ return nullptr;
- libyuv::ARGBToI420(source, stride, frame->data(kY) + y_offset,
- frame->stride(kY), frame->data(kU) + uv_offset, uv_stride,
- frame->data(kV) + uv_offset, uv_stride,
- region_in_frame.width(), region_in_frame.height());
+ const size_t num_planes = VideoFrame::NumPlanes(video_frame->format());
+ uint8_t* plane_addrs[VideoFrame::kMaxPlanes] = {};
+ for (size_t i = 0; i < num_planes; i++)
+ plane_addrs[i] = static_cast<uint8_t*>(gmb->memory(i));
+
+ auto mapped_frame = VideoFrame::WrapExternalYuvDataWithLayout(
+ video_frame->layout(), video_frame->visible_rect(),
+ video_frame->natural_size(), plane_addrs[0], plane_addrs[1],
+ plane_addrs[2], video_frame->timestamp());
+
+ if (!mapped_frame) {
+ gmb->Unmap();
+ return nullptr;
+ }
+
+ mapped_frame->set_color_space(video_frame->ColorSpace());
+ mapped_frame->metadata().MergeMetadataFrom(video_frame->metadata());
+
+ // Pass |video_frame| so that it outlives |mapped_frame| and the mapped buffer
+ // is unmapped on destruction.
+ mapped_frame->AddDestructionObserver(base::BindOnce(
+ [](scoped_refptr<VideoFrame> frame) {
+ DCHECK(frame->HasGpuMemoryBuffer());
+ frame->GetGpuMemoryBuffer()->Unmap();
+ },
+ std::move(video_frame)));
+ return mapped_frame;
}
scoped_refptr<VideoFrame> WrapAsI420VideoFrame(
@@ -429,12 +651,8 @@ scoped_refptr<VideoFrame> WrapAsI420VideoFrame(
DCHECK_EQ(VideoFrame::STORAGE_OWNED_MEMORY, frame->storage_type());
DCHECK_EQ(PIXEL_FORMAT_I420A, frame->format());
- scoped_refptr<media::VideoFrame> wrapped_frame =
- media::VideoFrame::WrapVideoFrame(frame, PIXEL_FORMAT_I420,
- frame->visible_rect(),
- frame->natural_size());
- if (!wrapped_frame)
- return nullptr;
+ scoped_refptr<VideoFrame> wrapped_frame = VideoFrame::WrapVideoFrame(
+ frame, PIXEL_FORMAT_I420, frame->visible_rect(), frame->natural_size());
return wrapped_frame;
}
@@ -487,4 +705,287 @@ bool I420CopyWithPadding(const VideoFrame& src_frame, VideoFrame* dst_frame) {
return true;
}
+scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySync(
+ const VideoFrame& txt_frame,
+ gpu::raster::RasterInterface* ri,
+ GrDirectContext* gr_context,
+ VideoFramePool* pool) {
+ DCHECK(ri);
+
+ if (gr_context) {
+ return ReadbackTextureBackedFrameToMemorySyncGLES(txt_frame, ri, gr_context,
+ pool);
+ }
+ return ReadbackTextureBackedFrameToMemorySyncOOP(txt_frame, ri, pool);
+}
+
+Status ConvertAndScaleFrame(const VideoFrame& src_frame,
+ VideoFrame& dst_frame,
+ std::vector<uint8_t>& tmp_buf) {
+ constexpr auto kDefaultFiltering = libyuv::kFilterBox;
+ if (!src_frame.IsMappable() || !dst_frame.IsMappable())
+ return Status(StatusCode::kUnsupportedFrameFormatError);
+
+ if ((dst_frame.format() == PIXEL_FORMAT_I420 ||
+ dst_frame.format() == PIXEL_FORMAT_NV12) &&
+ (src_frame.format() == PIXEL_FORMAT_XBGR ||
+ src_frame.format() == PIXEL_FORMAT_XRGB ||
+ src_frame.format() == PIXEL_FORMAT_ABGR ||
+ src_frame.format() == PIXEL_FORMAT_ARGB)) {
+ // libyuv's RGB to YUV methods always output BT.601.
+ dst_frame.set_color_space(gfx::ColorSpace::CreateREC601());
+
+ size_t src_stride = src_frame.stride(VideoFrame::kARGBPlane);
+ const uint8_t* src_data = src_frame.visible_data(VideoFrame::kARGBPlane);
+ if (src_frame.visible_rect() != dst_frame.visible_rect()) {
+ size_t tmp_buffer_size = VideoFrame::AllocationSize(
+ src_frame.format(), dst_frame.coded_size());
+ if (tmp_buf.size() < tmp_buffer_size)
+ tmp_buf.resize(tmp_buffer_size);
+
+ size_t stride =
+ VideoFrame::RowBytes(VideoFrame::kARGBPlane, src_frame.format(),
+ dst_frame.visible_rect().width());
+ int error = libyuv::ARGBScale(
+ src_data, src_stride, src_frame.visible_rect().width(),
+ src_frame.visible_rect().height(), tmp_buf.data(), stride,
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height(),
+ kDefaultFiltering);
+ if (error)
+ return Status(StatusCode::kInvalidArgument);
+ src_data = tmp_buf.data();
+ src_stride = stride;
+ }
+
+ if (dst_frame.format() == PIXEL_FORMAT_I420) {
+ auto convert_fn = (src_frame.format() == PIXEL_FORMAT_XBGR ||
+ src_frame.format() == PIXEL_FORMAT_ABGR)
+ ? libyuv::ABGRToI420
+ : libyuv::ARGBToI420;
+ int error = convert_fn(
+ src_data, src_stride, dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUPlane),
+ dst_frame.stride(VideoFrame::kUPlane),
+ dst_frame.visible_data(VideoFrame::kVPlane),
+ dst_frame.stride(VideoFrame::kVPlane),
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height());
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ }
+
+ auto convert_fn = (src_frame.format() == PIXEL_FORMAT_XBGR ||
+ src_frame.format() == PIXEL_FORMAT_ABGR)
+ ? libyuv::ABGRToNV12
+ : libyuv::ARGBToNV12;
+ int error = convert_fn(
+ src_data, src_stride, dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUVPlane),
+ dst_frame.stride(VideoFrame::kUVPlane),
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height());
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ }
+
+ // Converting between YUV formats doesn't change the color space.
+ dst_frame.set_color_space(src_frame.ColorSpace());
+
+ // Both frames are I420, only scaling is required.
+ if (dst_frame.format() == PIXEL_FORMAT_I420 &&
+ src_frame.format() == PIXEL_FORMAT_I420) {
+ int error = libyuv::I420Scale(
+ src_frame.visible_data(VideoFrame::kYPlane),
+ src_frame.stride(VideoFrame::kYPlane),
+ src_frame.visible_data(VideoFrame::kUPlane),
+ src_frame.stride(VideoFrame::kUPlane),
+ src_frame.visible_data(VideoFrame::kVPlane),
+ src_frame.stride(VideoFrame::kVPlane), src_frame.visible_rect().width(),
+ src_frame.visible_rect().height(),
+ dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUPlane),
+ dst_frame.stride(VideoFrame::kUPlane),
+ dst_frame.visible_data(VideoFrame::kVPlane),
+ dst_frame.stride(VideoFrame::kVPlane), dst_frame.visible_rect().width(),
+ dst_frame.visible_rect().height(), kDefaultFiltering);
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ }
+
+ // Both frames are NV12, only scaling is required.
+ if (dst_frame.format() == PIXEL_FORMAT_NV12 &&
+ src_frame.format() == PIXEL_FORMAT_NV12) {
+ int error = libyuv::NV12Scale(
+ src_frame.visible_data(VideoFrame::kYPlane),
+ src_frame.stride(VideoFrame::kYPlane),
+ src_frame.visible_data(VideoFrame::kUVPlane),
+ src_frame.stride(VideoFrame::kUVPlane),
+ src_frame.visible_rect().width(), src_frame.visible_rect().height(),
+ dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUVPlane),
+ dst_frame.stride(VideoFrame::kUVPlane),
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height(),
+ kDefaultFiltering);
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ }
+
+ if (dst_frame.format() == PIXEL_FORMAT_I420 &&
+ src_frame.format() == PIXEL_FORMAT_NV12) {
+ if (src_frame.visible_rect() == dst_frame.visible_rect()) {
+ // Both frames have the same size, only NV12-to-I420 conversion is
+ // required.
+ int error = libyuv::NV12ToI420(
+ src_frame.visible_data(VideoFrame::kYPlane),
+ src_frame.stride(VideoFrame::kYPlane),
+ src_frame.visible_data(VideoFrame::kUVPlane),
+ src_frame.stride(VideoFrame::kUVPlane),
+ dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUPlane),
+ dst_frame.stride(VideoFrame::kUPlane),
+ dst_frame.visible_data(VideoFrame::kVPlane),
+ dst_frame.stride(VideoFrame::kVPlane),
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height());
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ } else {
+ // Both resize and NV12-to-I420 conversion are required.
+ // First, split UV planes into two, basically producing a I420 frame.
+ const int tmp_uv_width = (src_frame.visible_rect().width() + 1) / 2;
+ const int tmp_uv_height = (src_frame.visible_rect().height() + 1) / 2;
+ size_t tmp_buffer_size = tmp_uv_width * tmp_uv_height * 2;
+ if (tmp_buf.size() < tmp_buffer_size)
+ tmp_buf.resize(tmp_buffer_size);
+
+ uint8_t* tmp_u = tmp_buf.data();
+ uint8_t* tmp_v = tmp_u + tmp_uv_width * tmp_uv_height;
+ DCHECK_EQ(tmp_buf.data() + tmp_buffer_size,
+ tmp_v + (tmp_uv_width * tmp_uv_height));
+ libyuv::SplitUVPlane(src_frame.visible_data(VideoFrame::kUVPlane),
+ src_frame.stride(VideoFrame::kUVPlane), tmp_u,
+ tmp_uv_width, tmp_v, tmp_uv_width, tmp_uv_width,
+ tmp_uv_height);
+
+ // Second, scale resulting I420 frame into the destination.
+ int error = libyuv::I420Scale(
+ src_frame.visible_data(VideoFrame::kYPlane),
+ src_frame.stride(VideoFrame::kYPlane),
+ tmp_u, // Temporary U-plane for src UV-plane.
+ tmp_uv_width,
+ tmp_v, // Temporary V-plane for src UV-plane.
+ tmp_uv_width, src_frame.visible_rect().width(),
+ src_frame.visible_rect().height(),
+ dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUPlane),
+ dst_frame.stride(VideoFrame::kUPlane),
+ dst_frame.visible_data(VideoFrame::kVPlane),
+ dst_frame.stride(VideoFrame::kVPlane),
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height(),
+ kDefaultFiltering);
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ }
+ }
+
+ if (dst_frame.format() == PIXEL_FORMAT_NV12 &&
+ src_frame.format() == PIXEL_FORMAT_I420) {
+ if (src_frame.visible_rect() == dst_frame.visible_rect()) {
+ // Both frames have the same size, only I420-to-NV12 conversion is
+ // required.
+ int error = libyuv::I420ToNV12(
+ src_frame.visible_data(VideoFrame::kYPlane),
+ src_frame.stride(VideoFrame::kYPlane),
+ src_frame.visible_data(VideoFrame::kUPlane),
+ src_frame.stride(VideoFrame::kUPlane),
+ src_frame.visible_data(VideoFrame::kVPlane),
+ src_frame.stride(VideoFrame::kVPlane),
+ dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUVPlane),
+ dst_frame.stride(VideoFrame::kUVPlane),
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height());
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ } else {
+ // Both resize and I420-to-NV12 conversion are required.
+ // First, merge U and V planes into one, basically producing a NV12 frame.
+ const int tmp_uv_width = (src_frame.visible_rect().width() + 1) / 2;
+ const int tmp_uv_height = (src_frame.visible_rect().height() + 1) / 2;
+ size_t tmp_buffer_size = tmp_uv_width * tmp_uv_height * 2;
+ if (tmp_buf.size() < tmp_buffer_size)
+ tmp_buf.resize(tmp_buffer_size);
+
+ uint8_t* tmp_uv = tmp_buf.data();
+ size_t stride_uv = tmp_uv_width * 2;
+ libyuv::MergeUVPlane(src_frame.visible_data(VideoFrame::kUPlane),
+ src_frame.stride(VideoFrame::kUPlane),
+ src_frame.visible_data(VideoFrame::kVPlane),
+ src_frame.stride(VideoFrame::kVPlane),
+ tmp_uv, // Temporary for merged UV-plane
+ stride_uv, // Temporary stride
+ tmp_uv_width, tmp_uv_height);
+
+ // Second, scale resulting NV12 frame into the destination.
+ int error = libyuv::NV12Scale(
+ src_frame.visible_data(VideoFrame::kYPlane),
+ src_frame.stride(VideoFrame::kYPlane),
+ tmp_uv, // Temporary for merged UV-plane
+ stride_uv, // Temporary stride
+ src_frame.visible_rect().width(), src_frame.visible_rect().height(),
+ dst_frame.visible_data(VideoFrame::kYPlane),
+ dst_frame.stride(VideoFrame::kYPlane),
+ dst_frame.visible_data(VideoFrame::kUVPlane),
+ dst_frame.stride(VideoFrame::kUVPlane),
+ dst_frame.visible_rect().width(), dst_frame.visible_rect().height(),
+ kDefaultFiltering);
+ return error ? Status(StatusCode::kInvalidArgument) : Status();
+ }
+ }
+
+ return Status(StatusCode::kUnsupportedFrameFormatError)
+ .WithData("src", src_frame.AsHumanReadableString())
+ .WithData("dst", dst_frame.AsHumanReadableString());
+}
+
+scoped_refptr<VideoFrame> CreateFromSkImage(sk_sp<SkImage> sk_image,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp) {
+ DCHECK(!sk_image->isTextureBacked());
+
+ // TODO(crbug.com/1073995): Add F16 support.
+ auto sk_color_type = sk_image->colorType();
+ if (sk_color_type != kRGBA_8888_SkColorType &&
+ sk_color_type != kBGRA_8888_SkColorType) {
+ return nullptr;
+ }
+
+ SkPixmap pm;
+ const bool peek_result = sk_image->peekPixels(&pm);
+ DCHECK(peek_result);
+
+ const auto format =
+ sk_image->isOpaque()
+ ? (sk_color_type == kRGBA_8888_SkColorType ? PIXEL_FORMAT_XBGR
+ : PIXEL_FORMAT_XRGB)
+ : (sk_color_type == kRGBA_8888_SkColorType ? PIXEL_FORMAT_ABGR
+ : PIXEL_FORMAT_ARGB);
+
+ auto coded_size = gfx::Size(sk_image->width(), sk_image->height());
+ auto layout = VideoFrameLayout::CreateWithStrides(
+ format, coded_size, std::vector<int32_t>(1, pm.rowBytes()));
+ if (!layout)
+ return nullptr;
+
+ auto frame = VideoFrame::WrapExternalDataWithLayout(
+ *layout, visible_rect, natural_size,
+ // TODO(crbug.com/1161304): We should be able to wrap readonly memory in
+ // a VideoFrame instead of using writable_addr() here.
+ reinterpret_cast<uint8_t*>(pm.writable_addr()), pm.computeByteSize(),
+ timestamp);
+ if (!frame)
+ return nullptr;
+
+ frame->AddDestructionObserver(base::BindOnce(
+ base::DoNothing::Once<sk_sp<SkImage>>(), std::move(sk_image)));
+ return frame;
+}
+
} // namespace media
diff --git a/chromium/media/base/video_util.h b/chromium/media/base/video_util.h
index 42e060a25b7..3590bf00e9a 100644
--- a/chromium/media/base/video_util.h
+++ b/chromium/media/base/video_util.h
@@ -7,13 +7,26 @@
#include <stdint.h>
+#include <vector>
+
#include "base/memory/ref_counted.h"
#include "media/base/media_export.h"
+#include "media/base/status.h"
+#include "third_party/skia/include/core/SkImage.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
+class GrDirectContext;
+
+namespace gpu {
+namespace raster {
+class RasterInterface;
+} // namespace raster
+} // namespace gpu
+
namespace media {
+class VideoFramePool;
class VideoFrame;
// Computes the pixel aspect ratio of a given |visible_rect| from its
@@ -134,14 +147,26 @@ MEDIA_EXPORT gfx::Size GetRectSizeFromOrigin(const gfx::Rect& rect);
MEDIA_EXPORT gfx::Size PadToMatchAspectRatio(const gfx::Size& size,
const gfx::Size& target);
-// Copy an RGB bitmap into the specified |region_in_frame| of a YUV video frame.
-// Fills the regions outside |region_in_frame| with black.
-MEDIA_EXPORT void CopyRGBToVideoFrame(const uint8_t* source,
- int stride,
- const gfx::Rect& region_in_frame,
- VideoFrame* frame);
+// A helper function to map GpuMemoryBuffer-based VideoFrame. This function
+// maps the given GpuMemoryBuffer of |frame| as-is without converting pixel
+// format. The returned VideoFrame owns the |frame|.
+MEDIA_EXPORT scoped_refptr<VideoFrame> ConvertToMemoryMappedFrame(
+ scoped_refptr<VideoFrame> frame);
-// Converts a frame with YV12A format into I420 by dropping alpha channel.
+// This function synchronously reads pixel data from textures associated with
+// |txt_frame| and creates a new CPU memory backed frame. It's needed because
+// existing video encoders can't handle texture backed frames.
+//
+// TODO(crbug.com/1162530): Combine this function with
+// media::ConvertAndScaleFrame and put it into a new class
+// media:FrameSizeAndFormatConverter.
+MEDIA_EXPORT scoped_refptr<VideoFrame> ReadbackTextureBackedFrameToMemorySync(
+ const VideoFrame& txt_frame,
+ gpu::raster::RasterInterface* ri,
+ GrDirectContext* gr_context,
+ VideoFramePool* pool = nullptr);
+
+// Converts a frame with I420A format into I420 by dropping alpha channel.
MEDIA_EXPORT scoped_refptr<VideoFrame> WrapAsI420VideoFrame(
scoped_refptr<VideoFrame> frame);
@@ -164,6 +189,23 @@ MEDIA_EXPORT scoped_refptr<VideoFrame> WrapAsI420VideoFrame(
MEDIA_EXPORT bool I420CopyWithPadding(const VideoFrame& src_frame,
VideoFrame* dst_frame) WARN_UNUSED_RESULT;
+// Copy pixel data from |src_frame| to |dst_frame| applying scaling and pixel
+// format conversion as needed. Both frames need to be mappabale and have either
+// I420 or NV12 pixel format.
+MEDIA_EXPORT Status ConvertAndScaleFrame(const VideoFrame& src_frame,
+ VideoFrame& dst_frame,
+ std::vector<uint8_t>& tmp_buf)
+ WARN_UNUSED_RESULT;
+
+// Backs a VideoFrame with a SkImage. The created frame takes a ref on the
+// provided SkImage to make this operation zero copy. Only works with CPU
+// backed images.
+MEDIA_EXPORT scoped_refptr<VideoFrame> CreateFromSkImage(
+ sk_sp<SkImage> sk_image,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp);
+
} // namespace media
#endif // MEDIA_BASE_VIDEO_UTIL_H_
diff --git a/chromium/media/base/video_util_unittest.cc b/chromium/media/base/video_util_unittest.cc
index 79af565e5b0..c6ae0616ddc 100644
--- a/chromium/media/base/video_util_unittest.cc
+++ b/chromium/media/base/video_util_unittest.cc
@@ -604,4 +604,30 @@ TEST_F(VideoUtilTest, I420CopyWithPadding) {
EXPECT_TRUE(VerifyCopyWithPadding(*src_frame, *dst_frame));
}
+TEST_F(VideoUtilTest, WrapAsI420VideoFrame) {
+ gfx::Size size(640, 480);
+ scoped_refptr<VideoFrame> src_frame =
+ VideoFrame::CreateFrame(PIXEL_FORMAT_I420A, size, gfx::Rect(size), size,
+ base::TimeDelta::FromDays(1));
+
+ scoped_refptr<VideoFrame> dst_frame = WrapAsI420VideoFrame(src_frame);
+ EXPECT_EQ(dst_frame->format(), PIXEL_FORMAT_I420);
+ EXPECT_EQ(dst_frame->timestamp(), src_frame->timestamp());
+ EXPECT_EQ(dst_frame->coded_size(), src_frame->coded_size());
+ EXPECT_EQ(dst_frame->visible_rect(), src_frame->visible_rect());
+ EXPECT_EQ(dst_frame->natural_size(), src_frame->natural_size());
+
+ std::vector<size_t> planes = {VideoFrame::kYPlane, VideoFrame::kUPlane,
+ VideoFrame::kVPlane};
+ for (auto plane : planes)
+ EXPECT_EQ(dst_frame->data(plane), src_frame->data(plane));
+
+ // Check that memory for planes is not released upon destruction of the
+ // original frame pointer (new frame holds a reference). This check relies on
+ // ASAN.
+ src_frame.reset();
+ for (auto plane : planes)
+ memset(dst_frame->data(plane), 1, dst_frame->stride(plane));
+}
+
} // namespace media
diff --git a/chromium/media/base/win/BUILD.gn b/chromium/media/base/win/BUILD.gn
index b4a7cb764a1..b78090ac9c1 100644
--- a/chromium/media/base/win/BUILD.gn
+++ b/chromium/media/base/win/BUILD.gn
@@ -6,6 +6,7 @@ assert(is_win)
config("delay_load_mf") {
ldflags = [
+ "/DELAYLOAD:d3d11.dll",
"/DELAYLOAD:mf.dll",
"/DELAYLOAD:mfplat.dll",
"/DELAYLOAD:mfreadwrite.dll",
@@ -15,6 +16,8 @@ config("delay_load_mf") {
component("media_foundation_util") {
defines = [ "MF_INITIALIZER_IMPLEMENTATION" ]
sources = [
+ "dxgi_device_manager.cc",
+ "dxgi_device_manager.h",
"mf_helpers.cc",
"mf_helpers.h",
"mf_initializer.cc",
@@ -31,9 +34,11 @@ component("media_foundation_util") {
"//media:shared_memory_support",
]
libs = [
+ "d3d11.lib",
"mf.lib",
"mfplat.lib",
"mfreadwrite.lib",
+ "dxguid.lib",
]
# MediaFoundation is not available on Windows N, so must be delay loaded.
diff --git a/chromium/media/base/win/dxgi_device_manager.cc b/chromium/media/base/win/dxgi_device_manager.cc
new file mode 100644
index 00000000000..75d102e4376
--- /dev/null
+++ b/chromium/media/base/win/dxgi_device_manager.cc
@@ -0,0 +1,143 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/win/dxgi_device_manager.h"
+
+#include <mfcaptureengine.h>
+#include <mfreadwrite.h>
+
+#include "base/win/windows_version.h"
+#include "media/base/win/mf_helpers.h"
+
+namespace media {
+
+DXGIDeviceScopedHandle::DXGIDeviceScopedHandle(
+ IMFDXGIDeviceManager* device_manager)
+ : device_manager_(device_manager) {}
+
+DXGIDeviceScopedHandle::~DXGIDeviceScopedHandle() {
+ if (device_handle_ == INVALID_HANDLE_VALUE) {
+ return;
+ }
+
+ HRESULT hr = device_manager_->CloseDeviceHandle(device_handle_);
+ LOG_IF(ERROR, FAILED(hr)) << "Failed to close device handle";
+ device_handle_ = INVALID_HANDLE_VALUE;
+}
+
+HRESULT DXGIDeviceScopedHandle::LockDevice(REFIID riid, void** device_out) {
+ HRESULT hr = S_OK;
+ if (device_handle_ == INVALID_HANDLE_VALUE) {
+ hr = device_manager_->OpenDeviceHandle(&device_handle_);
+ RETURN_ON_HR_FAILURE(
+ hr, "Failed to open device handle on MF DXGI device manager", hr);
+ }
+ // see
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/nf-mfobjects-imfdxgidevicemanager-lockdevice
+ // for details of LockDevice call.
+ hr = device_manager_->LockDevice(device_handle_, riid, device_out,
+ /*block=*/FALSE);
+ return hr;
+}
+
+Microsoft::WRL::ComPtr<ID3D11Device> DXGIDeviceScopedHandle::GetDevice() {
+ HRESULT hr = S_OK;
+ if (device_handle_ == INVALID_HANDLE_VALUE) {
+ hr = device_manager_->OpenDeviceHandle(&device_handle_);
+ RETURN_ON_HR_FAILURE(
+ hr, "Failed to open device handle on MF DXGI device manager", nullptr);
+ }
+ Microsoft::WRL::ComPtr<ID3D11Device> device;
+ hr = device_manager_->GetVideoService(device_handle_, IID_PPV_ARGS(&device));
+ RETURN_ON_HR_FAILURE(hr, "Failed to get device from MF DXGI device manager",
+ nullptr);
+ return device;
+}
+
+scoped_refptr<DXGIDeviceManager> DXGIDeviceManager::Create() {
+ if (base::win::GetVersion() < base::win::Version::WIN8 ||
+ (!::GetModuleHandle(L"mfplat.dll") && !::LoadLibrary(L"mfplat.dll"))) {
+ // The MF DXGI Device manager is only supported on Win8 or later
+ // Additionally, it is not supported when mfplat.dll isn't available
+ DLOG(ERROR)
+ << "MF DXGI Device Manager not supported on current version of Windows";
+ return nullptr;
+ }
+ Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager;
+ UINT d3d_device_reset_token = 0;
+ HRESULT hr = MFCreateDXGIDeviceManager(&d3d_device_reset_token,
+ &mf_dxgi_device_manager);
+ RETURN_ON_HR_FAILURE(hr, "Failed to create MF DXGI device manager", nullptr);
+ auto dxgi_device_manager = base::WrapRefCounted(new DXGIDeviceManager(
+ std::move(mf_dxgi_device_manager), d3d_device_reset_token));
+ if (dxgi_device_manager && FAILED(dxgi_device_manager->ResetDevice())) {
+ // If setting a device failed, ensure that an empty scoped_refptr is
+ // returned as the dxgi_device_manager is not usable without a device.
+ return nullptr;
+ }
+ return dxgi_device_manager;
+}
+
+DXGIDeviceManager::DXGIDeviceManager(
+ Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager,
+ UINT d3d_device_reset_token)
+ : mf_dxgi_device_manager_(std::move(mf_dxgi_device_manager)),
+ d3d_device_reset_token_(d3d_device_reset_token) {}
+
+DXGIDeviceManager::~DXGIDeviceManager() = default;
+
+HRESULT DXGIDeviceManager::ResetDevice() {
+ Microsoft::WRL::ComPtr<ID3D11Device> d3d_device;
+ constexpr uint32_t kDeviceFlags =
+ D3D11_CREATE_DEVICE_VIDEO_SUPPORT | D3D11_CREATE_DEVICE_BGRA_SUPPORT;
+ HRESULT hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
+ kDeviceFlags, nullptr, 0, D3D11_SDK_VERSION,
+ &d3d_device, nullptr, nullptr);
+ RETURN_ON_HR_FAILURE(hr, "D3D11 device creation failed", hr);
+ hr = mf_dxgi_device_manager_->ResetDevice(d3d_device.Get(),
+ d3d_device_reset_token_);
+ RETURN_ON_HR_FAILURE(hr, "Failed to reset device on MF DXGI device manager",
+ hr);
+ return S_OK;
+}
+
+HRESULT DXGIDeviceManager::RegisterInCaptureEngineAttributes(
+ IMFAttributes* attributes) {
+ HRESULT hr = attributes->SetUnknown(MF_CAPTURE_ENGINE_D3D_MANAGER,
+ mf_dxgi_device_manager_.Get());
+ RETURN_ON_HR_FAILURE(
+ hr, "Failed to set MF_CAPTURE_ENGINE_D3D_MANAGER attribute", hr);
+ return S_OK;
+}
+
+HRESULT DXGIDeviceManager::RegisterInSourceReaderAttributes(
+ IMFAttributes* attributes) {
+ HRESULT hr = attributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER,
+ mf_dxgi_device_manager_.Get());
+ RETURN_ON_HR_FAILURE(
+ hr, "Failed to set MF_SOURCE_READER_D3D_MANAGER attribute", hr);
+ return S_OK;
+}
+
+HRESULT DXGIDeviceManager::RegisterWithMediaSource(
+ Microsoft::WRL::ComPtr<IMFMediaSource> media_source) {
+ Microsoft::WRL::ComPtr<IMFMediaSourceEx> source_ext;
+ HRESULT hr = media_source.As(&source_ext);
+ RETURN_ON_HR_FAILURE(hr, "Failed to query IMFMediaSourceEx", hr);
+ hr = source_ext->SetD3DManager(mf_dxgi_device_manager_.Get());
+ RETURN_ON_HR_FAILURE(hr, "Failed to set D3D manager", hr);
+ return S_OK;
+}
+
+Microsoft::WRL::ComPtr<ID3D11Device> DXGIDeviceManager::GetDevice() {
+ DXGIDeviceScopedHandle device_handle(mf_dxgi_device_manager_.Get());
+ return device_handle.GetDevice();
+}
+
+Microsoft::WRL::ComPtr<IMFDXGIDeviceManager>
+DXGIDeviceManager::GetMFDXGIDeviceManager() {
+ return mf_dxgi_device_manager_;
+}
+
+} // namespace media
diff --git a/chromium/media/base/win/dxgi_device_manager.h b/chromium/media/base/win/dxgi_device_manager.h
new file mode 100644
index 00000000000..d489b3160c2
--- /dev/null
+++ b/chromium/media/base/win/dxgi_device_manager.h
@@ -0,0 +1,76 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_WIN_DXGI_DEVICE_MANAGER_H_
+#define MEDIA_BASE_WIN_DXGI_DEVICE_MANAGER_H_
+
+#include <d3d11.h>
+#include <mfidl.h>
+#include <wrl/client.h>
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_refptr.h"
+#include "media/base/win/mf_initializer_export.h"
+
+namespace media {
+
+// Wrap around the usage of device handle from |device_manager|.
+class MF_INITIALIZER_EXPORT DXGIDeviceScopedHandle {
+ public:
+ explicit DXGIDeviceScopedHandle(IMFDXGIDeviceManager* device_manager);
+ DXGIDeviceScopedHandle(const DXGIDeviceScopedHandle&) = delete;
+ DXGIDeviceScopedHandle& operator=(const DXGIDeviceScopedHandle&) = delete;
+ ~DXGIDeviceScopedHandle();
+
+ HRESULT LockDevice(REFIID riid, void** device_out);
+ Microsoft::WRL::ComPtr<ID3D11Device> GetDevice();
+
+ private:
+ Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> device_manager_;
+
+ HANDLE device_handle_ = INVALID_HANDLE_VALUE;
+};
+
+class MF_INITIALIZER_EXPORT DXGIDeviceManager
+ : public base::RefCounted<DXGIDeviceManager> {
+ public:
+ DXGIDeviceManager(const DXGIDeviceManager&) = delete;
+ DXGIDeviceManager& operator=(const DXGIDeviceManager&) = delete;
+
+ // Returns a DXGIDeviceManager with associated D3D device set, or nullptr on
+ // failure.
+ static scoped_refptr<DXGIDeviceManager> Create();
+
+ // Associates a new D3D device with the DXGI Device Manager
+ virtual HRESULT ResetDevice();
+
+ // Registers this manager in capture engine attributes.
+ HRESULT RegisterInCaptureEngineAttributes(IMFAttributes* attributes);
+
+ // Registers this manager in source reader attributes.
+ HRESULT RegisterInSourceReaderAttributes(IMFAttributes* attributes);
+
+ // Registers this manager with a media source
+ HRESULT RegisterWithMediaSource(
+ Microsoft::WRL::ComPtr<IMFMediaSource> media_source);
+
+ // Directly access D3D device stored in DXGI device manager
+ virtual Microsoft::WRL::ComPtr<ID3D11Device> GetDevice();
+
+ Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> GetMFDXGIDeviceManager();
+
+ protected:
+ friend class base::RefCounted<DXGIDeviceManager>;
+ DXGIDeviceManager(
+ Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager,
+ UINT d3d_device_reset_token);
+ virtual ~DXGIDeviceManager();
+
+ Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager_;
+ UINT d3d_device_reset_token_ = 0;
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_WIN_DXGI_DEVICE_MANAGER_H_
diff --git a/chromium/media/base/win/dxgi_device_scope_handle_unittest.cc b/chromium/media/base/win/dxgi_device_scope_handle_unittest.cc
index 5058d1bfd31..be16636583e 100644
--- a/chromium/media/base/win/dxgi_device_scope_handle_unittest.cc
+++ b/chromium/media/base/win/dxgi_device_scope_handle_unittest.cc
@@ -7,7 +7,7 @@
#include "base/win/windows_version.h"
#include "media/base/test_helpers.h"
-#include "media/base/win/mf_helpers.h"
+#include "media/base/win/dxgi_device_manager.h"
#include "media/base/win/mf_initializer.h"
namespace media {
@@ -66,7 +66,7 @@ class DXGIDeviceScopedHandleTest : public testing::Test {
const bool test_supported_;
};
-TEST_F(DXGIDeviceScopedHandleTest, UseDXGIDeviceScopedHandle) {
+TEST_F(DXGIDeviceScopedHandleTest, LockDevice) {
if (!test_supported_)
return;
@@ -88,4 +88,24 @@ TEST_F(DXGIDeviceScopedHandleTest, UseDXGIDeviceScopedHandle) {
ASSERT_HRESULT_SUCCEEDED(device_handle_3.LockDevice(IID_PPV_ARGS(&device3)));
}
+TEST_F(DXGIDeviceScopedHandleTest, GetDevice) {
+ if (!test_supported_)
+ return;
+
+ {
+ // Create DXGIDeviceScopedHandle in an inner scope.
+ DXGIDeviceScopedHandle device_handle_1(dxgi_device_man_.Get());
+ }
+ {
+ // Create DXGIDeviceScopedHandle in an inner scope with GetDevice call.
+ DXGIDeviceScopedHandle device_handle_2(dxgi_device_man_.Get());
+ ComPtr<ID3D11Device> device2 = device_handle_2.GetDevice();
+ EXPECT_NE(device2, nullptr);
+ }
+ // Use the device in an outer scope.
+ DXGIDeviceScopedHandle device_handle_3(dxgi_device_man_.Get());
+ ComPtr<ID3D11Device> device3 = device_handle_3.GetDevice();
+ EXPECT_NE(device3, nullptr);
+}
+
} // namespace media \ No newline at end of file
diff --git a/chromium/media/base/win/hresult_status_helper.cc b/chromium/media/base/win/hresult_status_helper.cc
index e3d6a43ebc0..fe141bb5ac9 100644
--- a/chromium/media/base/win/hresult_status_helper.cc
+++ b/chromium/media/base/win/hresult_status_helper.cc
@@ -5,6 +5,7 @@
#include "media/base/win/hresult_status_helper.h"
#include "base/logging.h"
+#include "base/strings/string_util.h"
namespace media {
@@ -15,8 +16,12 @@ Status HresultToStatus(HRESULT hresult,
if (SUCCEEDED(hresult))
return OkStatus();
+ std::string sys_err = logging::SystemErrorCodeToString(hresult);
+ if (!base::IsStringUTF8AllowingNoncharacters(sys_err))
+ sys_err = "System error string is invalid";
+
return Status(code, message == nullptr ? "HRESULT" : message, location)
- .WithData("value", logging::SystemErrorCodeToString(hresult));
+ .WithData("value", sys_err);
}
} // namespace media
diff --git a/chromium/media/base/win/mf_helpers.cc b/chromium/media/base/win/mf_helpers.cc
index 99a1f83970a..37cf1183709 100644
--- a/chromium/media/base/win/mf_helpers.cc
+++ b/chromium/media/base/win/mf_helpers.cc
@@ -4,6 +4,8 @@
#include "media/base/win/mf_helpers.h"
+#include <d3d11.h>
+
#include "base/check_op.h"
namespace media {
@@ -51,34 +53,6 @@ MediaBufferScopedPointer::~MediaBufferScopedPointer() {
CHECK(SUCCEEDED(hr));
}
-DXGIDeviceScopedHandle::DXGIDeviceScopedHandle(
- IMFDXGIDeviceManager* device_manager)
- : device_manager_(device_manager) {}
-
-DXGIDeviceScopedHandle::~DXGIDeviceScopedHandle() {
- if (device_handle_ != INVALID_HANDLE_VALUE) {
- HRESULT hr = device_manager_->CloseDeviceHandle(device_handle_);
- CHECK(SUCCEEDED(hr));
- device_handle_ = INVALID_HANDLE_VALUE;
- }
-}
-
-HRESULT DXGIDeviceScopedHandle::LockDevice(REFIID riid, void** device_out) {
- HRESULT hr;
- if (device_handle_ == INVALID_HANDLE_VALUE) {
- hr = device_manager_->OpenDeviceHandle(&device_handle_);
- if (FAILED(hr)) {
- return hr;
- }
- }
- // see
- // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/nf-mfobjects-imfdxgidevicemanager-lockdevice
- // for details of LockDevice call.
- hr = device_manager_->LockDevice(device_handle_, riid, device_out,
- /*block=*/FALSE);
- return hr;
-}
-
HRESULT CopyCoTaskMemWideString(LPCWSTR in_string, LPWSTR* out_string) {
if (!in_string || !out_string) {
return E_INVALIDARG;
@@ -94,4 +68,10 @@ HRESULT CopyCoTaskMemWideString(LPCWSTR in_string, LPWSTR* out_string) {
return S_OK;
}
+HRESULT SetDebugName(ID3D11DeviceChild* d3d11_device_child,
+ const char* debug_string) {
+ return d3d11_device_child->SetPrivateData(WKPDID_D3DDebugObjectName,
+ strlen(debug_string), debug_string);
+}
+
} // namespace media
diff --git a/chromium/media/base/win/mf_helpers.h b/chromium/media/base/win/mf_helpers.h
index aa56e5e437e..6bebf0310f5 100644
--- a/chromium/media/base/win/mf_helpers.h
+++ b/chromium/media/base/win/mf_helpers.h
@@ -13,6 +13,8 @@
#include "base/macros.h"
#include "media/base/win/mf_initializer_export.h"
+struct ID3D11DeviceChild;
+
namespace media {
// Helper function to print HRESULT to std::string.
@@ -62,6 +64,7 @@ class MF_INITIALIZER_EXPORT MediaBufferScopedPointer {
uint8_t* get() { return buffer_; }
DWORD current_length() const { return current_length_; }
+ DWORD max_length() const { return max_length_; }
private:
Microsoft::WRL::ComPtr<IMFMediaBuffer> media_buffer_;
@@ -72,24 +75,14 @@ class MF_INITIALIZER_EXPORT MediaBufferScopedPointer {
DISALLOW_COPY_AND_ASSIGN(MediaBufferScopedPointer);
};
-// Wrap around the usage of device handle from |device_manager|.
-class MF_INITIALIZER_EXPORT DXGIDeviceScopedHandle {
- public:
- explicit DXGIDeviceScopedHandle(IMFDXGIDeviceManager* device_manager);
- ~DXGIDeviceScopedHandle();
-
- HRESULT LockDevice(REFIID riid, void** device_out);
-
- private:
- Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> device_manager_;
-
- HANDLE device_handle_ = INVALID_HANDLE_VALUE;
-};
-
// Copies |in_string| to |out_string| that is allocated with CoTaskMemAlloc().
MF_INITIALIZER_EXPORT HRESULT CopyCoTaskMemWideString(LPCWSTR in_string,
LPWSTR* out_string);
+// Set the debug name of a D3D11 resource for use with ETW debugging tools.
+// D3D11 retains the string passed to this function.
+MF_INITIALIZER_EXPORT HRESULT
+SetDebugName(ID3D11DeviceChild* d3d11_device_child, const char* debug_string);
} // namespace media
#endif // MEDIA_BASE_WIN_MF_HELPERS_H_
diff --git a/chromium/media/blink/cache_util.cc b/chromium/media/blink/cache_util.cc
index 97618b30204..2e8c16c4cf3 100644
--- a/chromium/media/blink/cache_util.cc
+++ b/chromium/media/blink/cache_util.cc
@@ -69,8 +69,8 @@ uint32_t GetReasonsForUncacheability(const WebURLResponse& response) {
if (cache_control_header.substr(0, kMaxAgePrefixLen) == kMaxAgePrefix) {
int64_t max_age_seconds;
base::StringToInt64(
- base::StringPiece(cache_control_header.begin() + kMaxAgePrefixLen,
- cache_control_header.end()),
+ base::MakeStringPiece(cache_control_header.begin() + kMaxAgePrefixLen,
+ cache_control_header.end()),
&max_age_seconds);
if (TimeDelta::FromSeconds(max_age_seconds) < kMinimumAgeForUsefulness)
reasons |= kShortMaxAge;
@@ -105,8 +105,8 @@ base::TimeDelta GetCacheValidUntil(const WebURLResponse& response) {
if (cache_control_header.substr(0, kMaxAgePrefixLen) == kMaxAgePrefix) {
int64_t max_age_seconds;
base::StringToInt64(
- base::StringPiece(cache_control_header.begin() + kMaxAgePrefixLen,
- cache_control_header.end()),
+ base::MakeStringPiece(cache_control_header.begin() + kMaxAgePrefixLen,
+ cache_control_header.end()),
&max_age_seconds);
ret = std::min(ret, TimeDelta::FromSeconds(max_age_seconds));
diff --git a/chromium/media/blink/key_system_config_selector.cc b/chromium/media/blink/key_system_config_selector.cc
index 07d9b12b843..2618c8a1280 100644
--- a/chromium/media/blink/key_system_config_selector.cc
+++ b/chromium/media/blink/key_system_config_selector.cc
@@ -19,7 +19,9 @@
#include "media/base/logging_override_if_enabled.h"
#include "media/base/media_permission.h"
#include "media/base/mime_util.h"
+#include "media/media_buildflags.h"
#include "third_party/blink/public/platform/url_conversion.h"
+#include "third_party/blink/public/platform/web_content_settings_client.h"
#include "third_party/blink/public/platform/web_media_key_system_configuration.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/blink/public/platform/web_vector.h"
@@ -160,6 +162,32 @@ bool IsSupportedMediaType(const std::string& container_mime_type,
std::vector<std::string> codec_vector;
SplitCodecs(codecs, &codec_vector);
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC) && BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+ // EME HEVC is supported on CrOS under these build flags, but it is not
+ // supported for clear playback. Remove the HEVC codec strings to avoid asking
+ // IsSupported*MediaFormat() about HEVC. EME support for HEVC profiles
+ // is described via KeySystemProperties::GetSupportedCodecs().
+ // TODO(1156282): Decouple the rest of clear vs EME codec support.
+ if (base::ToLowerASCII(container_mime_type) == "video/mp4" &&
+ !codec_vector.empty()) {
+ auto it = codec_vector.begin();
+ while (it != codec_vector.end()) {
+ VideoCodecProfile profile;
+ uint8_t level_idc;
+ if (ParseHEVCCodecId(*it, &profile, &level_idc))
+ codec_vector.erase(it);
+ else
+ ++it;
+ }
+
+ // Avoid calling IsSupported*MediaFormat() with an empty vector. For
+ // "video/mp4", this will return MaybeSupported, which we would otherwise
+ // consider "false" below.
+ if (codec_vector.empty())
+ return true;
+ }
+#endif
+
// AesDecryptor decrypts the stream in the demuxer before it reaches the
// decoder so check whether the media format is supported when clear.
SupportsType support_result =
@@ -227,6 +255,9 @@ class KeySystemConfigSelector::ConfigState {
return !are_hw_secure_codecs_required_;
case EmeConfigRule::HW_SECURE_CODECS_REQUIRED:
return !are_hw_secure_codecs_not_allowed_;
+ case EmeConfigRule::IDENTIFIER_AND_HW_SECURE_CODECS_REQUIRED:
+ return !is_identifier_not_allowed_ && IsPermissionPossible() &&
+ !are_hw_secure_codecs_not_allowed_;
case EmeConfigRule::SUPPORTED:
return true;
}
@@ -266,6 +297,10 @@ class KeySystemConfigSelector::ConfigState {
case EmeConfigRule::HW_SECURE_CODECS_REQUIRED:
are_hw_secure_codecs_required_ = true;
return;
+ case EmeConfigRule::IDENTIFIER_AND_HW_SECURE_CODECS_REQUIRED:
+ is_identifier_required_ = true;
+ are_hw_secure_codecs_required_ = true;
+ return;
case EmeConfigRule::SUPPORTED:
return;
}
@@ -302,9 +337,11 @@ class KeySystemConfigSelector::ConfigState {
KeySystemConfigSelector::KeySystemConfigSelector(
KeySystems* key_systems,
- MediaPermission* media_permission)
+ MediaPermission* media_permission,
+ blink::WebContentSettingsClient* content_settings_client)
: key_systems_(key_systems),
media_permission_(media_permission),
+ content_settings_client_(content_settings_client),
is_supported_media_type_cb_(base::BindRepeating(&IsSupportedMediaType)) {
DCHECK(key_systems_);
DCHECK(media_permission_);
@@ -603,8 +640,14 @@ KeySystemConfigSelector::GetSupportedConfiguration(
// 9. If persistent state requirement is "optional" and persisting state is
// not allowed according to restrictions, set persistent state requirement
// to "not-allowed".
+ const bool local_storage_allowed =
+ !content_settings_client_ ||
+ content_settings_client_->AllowStorageAccessSync(
+ blink::WebContentSettingsClient::StorageType::kLocalStorage);
EmeFeatureSupport persistent_state_support =
- key_systems_->GetPersistentStateSupport(key_system);
+ local_storage_allowed
+ ? key_systems_->GetPersistentStateSupport(key_system)
+ : EmeFeatureSupport::NOT_SUPPORTED;
if (persistent_state == EmeFeatureRequirement::kOptional) {
if (persistent_state_support == EmeFeatureSupport::INVALID ||
persistent_state_support == EmeFeatureSupport::NOT_SUPPORTED) {
diff --git a/chromium/media/blink/key_system_config_selector.h b/chromium/media/blink/key_system_config_selector.h
index 92fc3353e5b..3124bad021f 100644
--- a/chromium/media/blink/key_system_config_selector.h
+++ b/chromium/media/blink/key_system_config_selector.h
@@ -20,6 +20,7 @@
namespace blink {
+class WebContentSettingsClient;
struct WebMediaKeySystemConfiguration;
class WebString;
@@ -33,8 +34,10 @@ class MediaPermission;
class MEDIA_BLINK_EXPORT KeySystemConfigSelector {
public:
- KeySystemConfigSelector(KeySystems* key_systems,
- MediaPermission* media_permission);
+ KeySystemConfigSelector(
+ KeySystems* key_systems,
+ MediaPermission* media_permission,
+ blink::WebContentSettingsClient* content_settings_client);
~KeySystemConfigSelector();
@@ -110,7 +113,13 @@ class MEDIA_BLINK_EXPORT KeySystemConfigSelector {
encryption_scheme);
KeySystems* const key_systems_;
+
+ // These objects are unowned but their pointers are always valid. They have
+ // the same lifetime as RenderFrameImpl, and |this| also has the same lifetime
+ // as RenderFrameImpl. RenderFrameImpl owns content::MediaFactory which owns
+ // WebEncryptedMediaClientImpl which owns |this|.
MediaPermission* media_permission_;
+ blink::WebContentSettingsClient* content_settings_client_;
// A callback used to check whether a media type is supported. Only set in
// tests. If null the implementation will check the support using MimeUtil.
diff --git a/chromium/media/blink/key_system_config_selector_unittest.cc b/chromium/media/blink/key_system_config_selector_unittest.cc
index f0670a23e64..b93b4e644ec 100644
--- a/chromium/media/blink/key_system_config_selector_unittest.cc
+++ b/chromium/media/blink/key_system_config_selector_unittest.cc
@@ -15,6 +15,7 @@
#include "media/base/mime_util.h"
#include "media/blink/key_system_config_selector.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/blink/public/platform/web_content_settings_client.h"
#include "third_party/blink/public/platform/web_encrypted_media_types.h"
#include "third_party/blink/public/platform/web_media_key_system_configuration.h"
#include "third_party/blink/public/platform/web_string.h"
@@ -43,6 +44,8 @@ const char kRecommendIdentifierRobustness[] = "recommend_identifier";
const char kRequireIdentifierRobustness[] = "require_identifier";
const char kDisallowHwSecureCodecRobustness[] = "disallow_hw_secure_codec";
const char kRequireHwSecureCodecRobustness[] = "require_hw_secure_codec";
+const char kRequireHwSecureCodecAndIdentifierRobustness[] =
+ "require_hw_secure_codec_and_identifier";
const char kUnsupportedRobustness[] = "unsupported";
// Test container mime types. Supported types are prefixed with audio/video so
@@ -293,6 +296,8 @@ class FakeKeySystems : public KeySystems {
return EmeConfigRule::HW_SECURE_CODECS_NOT_ALLOWED;
if (requested_robustness == kRequireHwSecureCodecRobustness)
return EmeConfigRule::HW_SECURE_CODECS_REQUIRED;
+ if (requested_robustness == kRequireHwSecureCodecAndIdentifierRobustness)
+ return EmeConfigRule::IDENTIFIER_AND_HW_SECURE_CODECS_REQUIRED;
if (requested_robustness == kUnsupportedRobustness)
return EmeConfigRule::NOT_SUPPORTED;
@@ -360,20 +365,36 @@ class FakeMediaPermission : public MediaPermission {
bool is_encrypted_media_enabled = true;
};
+class FakeWebContentSettingsClient : public blink::WebContentSettingsClient {
+ public:
+ bool AllowStorageAccessSync(StorageType storage_type) override {
+ if (storage_type ==
+ blink::WebContentSettingsClient::StorageType::kLocalStorage) {
+ return local_storage_allowed_;
+ }
+ return true;
+ }
+
+ bool local_storage_allowed_ = true;
+};
+
} // namespace
class KeySystemConfigSelectorTest : public testing::Test {
public:
KeySystemConfigSelectorTest()
- : key_systems_(new FakeKeySystems()),
- media_permission_(new FakeMediaPermission()) {}
+ : key_systems_(std::make_unique<FakeKeySystems>()),
+ media_permission_(std::make_unique<FakeMediaPermission>()),
+ content_settings_client_(
+ std::make_unique<FakeWebContentSettingsClient>()) {}
void SelectConfig() {
media_permission_->requests = 0;
succeeded_count_ = 0;
not_supported_count_ = 0;
- KeySystemConfigSelector key_system_config_selector(key_systems_.get(),
- media_permission_.get());
+ KeySystemConfigSelector key_system_config_selector(
+ key_systems_.get(), media_permission_.get(),
+ content_settings_client_.get());
key_system_config_selector.SetIsSupportedMediaTypeCBForTesting(
base::BindRepeating(&IsSupportedMediaType));
@@ -430,6 +451,7 @@ class KeySystemConfigSelectorTest : public testing::Test {
std::unique_ptr<FakeKeySystems> key_systems_;
std::unique_ptr<FakeMediaPermission> media_permission_;
+ std::unique_ptr<FakeWebContentSettingsClient> content_settings_client_;
// Held values for the call to SelectConfig().
WebString key_system_ = WebString::FromUTF8(kSupportedKeySystem);
@@ -707,6 +729,17 @@ TEST_F(KeySystemConfigSelectorTest, PersistentState_Blocked) {
SelectConfigReturnsError();
}
+TEST_F(KeySystemConfigSelectorTest, PersistentState_BlockedByContentSettings) {
+ key_systems_->persistent_state = EmeFeatureSupport::ALWAYS_ENABLED;
+
+ auto config = UsableConfiguration();
+ config.persistent_state = MediaKeysRequirement::kRequired;
+ configs_.push_back(config);
+
+ content_settings_client_->local_storage_allowed_ = false;
+ SelectConfigReturnsError();
+}
+
// --- sessionTypes ---
TEST_F(KeySystemConfigSelectorTest, SessionTypes_Empty) {
@@ -1337,6 +1370,83 @@ TEST_F(KeySystemConfigSelectorTest,
SelectConfigReturnsError();
}
+// --- HW Secure and Identifier Robustness ---
+
+TEST_F(KeySystemConfigSelectorTest,
+ HwSecureCodecAndIdentifier_IncompatibleCodecAndRobustness) {
+ media_permission_->is_granted = true;
+ key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(1);
+ video_capabilities[0].content_type = "a";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kDisallowHwSecureCodec;
+ video_capabilities[0].robustness =
+ kRequireHwSecureCodecAndIdentifierRobustness;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigReturnsError();
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ HwSecureCodecAndIdentifier_IncompatibleCapabilities) {
+ media_permission_->is_granted = true;
+ key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "require_hw_secure_codec";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kSupportedVideoCodec;
+ video_capabilities[0].robustness =
+ kRequireHwSecureCodecAndIdentifierRobustness;
+ video_capabilities[1].content_type = "disallow_hw_secure_codec";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kDisallowHwSecureCodec;
+ video_capabilities[1].robustness = kUnsupportedRobustness;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigRequestsPermissionAndReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.distinctive_identifier);
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ("require_hw_secure_codec",
+ config_.video_capabilities[0].content_type);
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
+TEST_F(KeySystemConfigSelectorTest,
+ HwSecureCodecAndIdentifier_UnsupportedCapabilityNotAffectingRules) {
+ media_permission_->is_granted = true;
+ key_systems_->distinctive_identifier = EmeFeatureSupport::REQUESTABLE;
+
+ std::vector<WebMediaKeySystemMediaCapability> video_capabilities(2);
+ video_capabilities[0].content_type = "unsupported_robustness";
+ video_capabilities[0].mime_type = kSupportedVideoContainer;
+ video_capabilities[0].codecs = kDisallowHwSecureCodec;
+ video_capabilities[0].robustness = kUnsupportedRobustness;
+ video_capabilities[1].content_type = "require_hw_secure_codec";
+ video_capabilities[1].mime_type = kSupportedVideoContainer;
+ video_capabilities[1].codecs = kRequireHwSecureCodec;
+ video_capabilities[1].robustness =
+ kRequireHwSecureCodecAndIdentifierRobustness;
+
+ auto config = EmptyConfiguration();
+ config.video_capabilities = video_capabilities;
+ configs_.push_back(config);
+
+ SelectConfigRequestsPermissionAndReturnsConfig();
+ EXPECT_EQ(MediaKeysRequirement::kRequired, config_.distinctive_identifier);
+ ASSERT_EQ(1u, config_.video_capabilities.size());
+ EXPECT_EQ("require_hw_secure_codec",
+ config_.video_capabilities[0].content_type);
+ EXPECT_TRUE(cdm_config_.use_hw_secure_codecs);
+}
+
// --- audioCapabilities ---
// These are handled by the same code as |videoCapabilities|, so only minimal
// additional testing is done.
diff --git a/chromium/media/blink/multibuffer_data_source_unittest.cc b/chromium/media/blink/multibuffer_data_source_unittest.cc
index 45d0fbc481e..a044e21a8db 100644
--- a/chromium/media/blink/multibuffer_data_source_unittest.cc
+++ b/chromium/media/blink/multibuffer_data_source_unittest.cc
@@ -229,7 +229,7 @@ class MultibufferDataSourceTest : public testing::Test {
GURL gurl(url);
data_source_.reset(new MockMultibufferDataSource(
base::ThreadTaskRunnerHandle::Get(),
- url_index_->GetByUrl(gurl, cors_mode), &host_));
+ url_index_->GetByUrl(gurl, cors_mode, UrlIndex::kNormal), &host_));
data_source_->SetPreload(preload_);
response_generator_.reset(new TestResponseGenerator(gurl, file_size));
@@ -991,7 +991,9 @@ TEST_F(MultibufferDataSourceTest, Http_ShareData) {
StrictMock<MockBufferedDataSourceHost> host2;
MockMultibufferDataSource source2(
base::ThreadTaskRunnerHandle::Get(),
- url_index_->GetByUrl(GURL(kHttpUrl), UrlData::CORS_UNSPECIFIED), &host2);
+ url_index_->GetByUrl(GURL(kHttpUrl), UrlData::CORS_UNSPECIFIED,
+ UrlIndex::kNormal),
+ &host2);
source2.SetPreload(preload_);
EXPECT_CALL(*this, OnInitialize(true));
@@ -1356,7 +1358,8 @@ TEST_F(MultibufferDataSourceTest, SeekPastEOF) {
GURL gurl(kHttpUrl);
data_source_.reset(new MockMultibufferDataSource(
base::ThreadTaskRunnerHandle::Get(),
- url_index_->GetByUrl(gurl, UrlData::CORS_UNSPECIFIED), &host_));
+ url_index_->GetByUrl(gurl, UrlData::CORS_UNSPECIFIED, UrlIndex::kNormal),
+ &host_));
data_source_->SetPreload(preload_);
response_generator_.reset(new TestResponseGenerator(gurl, kDataSize + 1));
@@ -1732,7 +1735,8 @@ TEST_F(MultibufferDataSourceTest, Http_CheckLoadingTransition) {
GURL gurl(kHttpUrl);
data_source_.reset(new MockMultibufferDataSource(
base::ThreadTaskRunnerHandle::Get(),
- url_index_->GetByUrl(gurl, UrlData::CORS_UNSPECIFIED), &host_));
+ url_index_->GetByUrl(gurl, UrlData::CORS_UNSPECIFIED, UrlIndex::kNormal),
+ &host_));
data_source_->SetPreload(preload_);
response_generator_.reset(new TestResponseGenerator(gurl, kDataSize * 1));
diff --git a/chromium/media/blink/resource_multibuffer_data_provider.cc b/chromium/media/blink/resource_multibuffer_data_provider.cc
index 4d252387b5e..43a8c40458d 100644
--- a/chromium/media/blink/resource_multibuffer_data_provider.cc
+++ b/chromium/media/blink/resource_multibuffer_data_provider.cc
@@ -236,8 +236,8 @@ void ResourceMultiBufferDataProvider::DidReceiveResponse(
scoped_refptr<UrlData> destination_url_data(url_data_);
if (!redirects_to_.is_empty()) {
- destination_url_data =
- url_data_->url_index()->GetByUrl(redirects_to_, cors_mode_);
+ destination_url_data = url_data_->url_index()->GetByUrl(
+ redirects_to_, cors_mode_, UrlIndex::kNormal);
redirects_to_ = GURL();
}
diff --git a/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc b/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc
index 998ac9ee579..151a463e28a 100644
--- a/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc
+++ b/chromium/media/blink/resource_multibuffer_data_provider_unittest.cc
@@ -88,7 +88,8 @@ class ResourceMultiBufferDataProviderTest : public testing::Test {
void Initialize(const char* url, int first_position) {
want_frfr = false;
gurl_ = GURL(url);
- url_data_ = url_index_->GetByUrl(gurl_, UrlData::CORS_UNSPECIFIED);
+ url_data_ = url_index_->GetByUrl(gurl_, UrlData::CORS_UNSPECIFIED,
+ UrlIndex::kNormal);
url_data_->set_etag(kEtag);
DCHECK(url_data_);
url_data_->OnRedirect(
diff --git a/chromium/media/blink/url_index.cc b/chromium/media/blink/url_index.cc
index f07d975ed83..701a0977457 100644
--- a/chromium/media/blink/url_index.cc
+++ b/chromium/media/blink/url_index.cc
@@ -240,10 +240,13 @@ void UrlIndex::RemoveUrlData(const scoped_refptr<UrlData>& url_data) {
}
scoped_refptr<UrlData> UrlIndex::GetByUrl(const GURL& gurl,
- UrlData::CorsMode cors_mode) {
- auto i = indexed_data_.find(std::make_pair(gurl, cors_mode));
- if (i != indexed_data_.end() && i->second->Valid()) {
- return i->second;
+ UrlData::CorsMode cors_mode,
+ CacheMode cache_mode) {
+ if (cache_mode == kNormal) {
+ auto i = indexed_data_.find(std::make_pair(gurl, cors_mode));
+ if (i != indexed_data_.end() && i->second->Valid()) {
+ return i->second;
+ }
}
return NewUrlData(gurl, cors_mode);
diff --git a/chromium/media/blink/url_index.h b/chromium/media/blink/url_index.h
index d07f331f0c4..80eef9a7e8a 100644
--- a/chromium/media/blink/url_index.h
+++ b/chromium/media/blink/url_index.h
@@ -227,6 +227,8 @@ class MEDIA_BLINK_EXPORT UrlIndex {
UrlIndex(ResourceFetchContext* fetch_context, int block_shift);
virtual ~UrlIndex();
+ enum CacheMode { kNormal, kCacheDisabled };
+
// Look up an UrlData in the index and return it. If none is found,
// create a new one. Note that newly created UrlData entries are NOT
// added to the index, instead you must call TryInsert on them after
@@ -235,7 +237,8 @@ class MEDIA_BLINK_EXPORT UrlIndex {
// Because the returned UrlData has a raw reference to |this|, it must be
// released before |this| is destroyed.
scoped_refptr<UrlData> GetByUrl(const GURL& gurl,
- UrlData::CorsMode cors_mode);
+ UrlData::CorsMode cors_mode,
+ CacheMode cache_mode);
// Add the given UrlData to the index if possible. If a better UrlData
// is already present in the index, return it instead. (If not, we just
diff --git a/chromium/media/blink/url_index_unittest.cc b/chromium/media/blink/url_index_unittest.cc
index 018a23a02d8..0f3bcfa16fd 100644
--- a/chromium/media/blink/url_index_unittest.cc
+++ b/chromium/media/blink/url_index_unittest.cc
@@ -22,7 +22,8 @@ class UrlIndexTest : public testing::Test {
scoped_refptr<UrlData> GetByUrl(const GURL& gurl,
UrlData::CorsMode cors_mode) {
- scoped_refptr<UrlData> ret = url_index_.GetByUrl(gurl, cors_mode);
+ scoped_refptr<UrlData> ret =
+ url_index_.GetByUrl(gurl, cors_mode, UrlIndex::kNormal);
EXPECT_EQ(ret->url(), gurl);
EXPECT_EQ(ret->cors_mode(), cors_mode);
return ret;
@@ -155,4 +156,19 @@ TEST_F(UrlIndexTest, TryInsert) {
EXPECT_EQ(b, GetByUrl(url, UrlData::CORS_UNSPECIFIED));
}
+TEST_F(UrlIndexTest, GetByUrlCacheDisabled) {
+ GURL url("http://foo.bar.com");
+ UrlData::CorsMode cors = UrlData::CORS_UNSPECIFIED;
+
+ scoped_refptr<UrlData> url_data =
+ url_index_.GetByUrl(url, cors, UrlIndex::kNormal);
+ url_data->Use();
+ url_data->set_range_supported();
+ EXPECT_TRUE(url_data->Valid());
+ url_index_.TryInsert(url_data);
+
+ EXPECT_EQ(url_data, url_index_.GetByUrl(url, cors, UrlIndex::kNormal));
+ EXPECT_NE(url_data, url_index_.GetByUrl(url, cors, UrlIndex::kCacheDisabled));
+}
+
} // namespace media
diff --git a/chromium/media/blink/video_frame_compositor.cc b/chromium/media/blink/video_frame_compositor.cc
index ffb1eedfc60..e621acd3bea 100644
--- a/chromium/media/blink/video_frame_compositor.cc
+++ b/chromium/media/blink/video_frame_compositor.cc
@@ -5,12 +5,12 @@
#include "media/blink/video_frame_compositor.h"
#include "base/bind.h"
+#include "base/bind_post_task.h"
#include "base/callback_helpers.h"
#include "base/synchronization/waitable_event.h"
#include "base/time/default_tick_clock.h"
#include "base/trace_event/trace_event.h"
#include "components/viz/common/frame_sinks/begin_frame_args.h"
-#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
#include "media/base/video_frame.h"
#include "media/blink/webmediaplayer_params.h"
@@ -46,7 +46,7 @@ VideoFrameCompositor::VideoFrameCompositor(
task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VideoFrameCompositor::InitializeSubmitter,
weak_ptr_factory_.GetWeakPtr()));
- update_submission_state_callback_ = BindToLoop(
+ update_submission_state_callback_ = base::BindPostTask(
task_runner_,
base::BindRepeating(&VideoFrameCompositor::SetIsSurfaceVisible,
weak_ptr_factory_.GetWeakPtr()));
diff --git a/chromium/media/blink/video_frame_compositor.h b/chromium/media/blink/video_frame_compositor.h
index c6c3e0976e0..20fde8c10ba 100644
--- a/chromium/media/blink/video_frame_compositor.h
+++ b/chromium/media/blink/video_frame_compositor.h
@@ -255,7 +255,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
OnNewProcessedFrameCB new_processed_frame_cb_;
cc::UpdateSubmissionStateCB update_submission_state_callback_;
- // Callback used to satisfy video.rAF requests.
+ // Callback used to satisfy video.rVFC requests.
// Set on the main thread, fired on the compositor thread.
OnNewFramePresentedCB new_presented_frame_cb_ GUARDED_BY(current_frame_lock_);
diff --git a/chromium/media/blink/watch_time_reporter_unittest.cc b/chromium/media/blink/watch_time_reporter_unittest.cc
index 7626b30c7fd..b6f132ec4e9 100644
--- a/chromium/media/blink/watch_time_reporter_unittest.cc
+++ b/chromium/media/blink/watch_time_reporter_unittest.cc
@@ -265,7 +265,9 @@ class WatchTimeReporterTest
void AcquirePlaybackEventsRecorder(
mojo::PendingReceiver<mojom::PlaybackEventsRecorder> receiver)
override {}
- void Initialize(bool is_mse, mojom::MediaURLScheme url_scheme) override {}
+ void Initialize(bool is_mse,
+ mojom::MediaURLScheme url_scheme,
+ mojom::MediaStreamType media_stream_type) override {}
void OnError(PipelineStatus status) override {}
void SetIsEME() override {}
void SetTimeToMetadata(base::TimeDelta elapsed) override {}
@@ -277,8 +279,8 @@ class WatchTimeReporterTest
void SetHaveEnough() override {}
void SetHasAudio(AudioCodec audio_codec) override {}
void SetHasVideo(VideoCodec video_codec) override {}
- void SetVideoPipelineInfo(const PipelineDecoderInfo& info) override {}
- void SetAudioPipelineInfo(const PipelineDecoderInfo& info) override {}
+ void SetVideoPipelineInfo(const VideoDecoderInfo& info) override {}
+ void SetAudioPipelineInfo(const AudioDecoderInfo& info) override {}
private:
WatchTimeReporterTest* parent_;
@@ -310,7 +312,8 @@ class WatchTimeReporterTest
wtr_ = std::make_unique<blink::WatchTimeReporter>(
mojom::PlaybackProperties::New(has_audio_, has_video_, false, false,
- is_mse, is_encrypted, false),
+ is_mse, is_encrypted, false,
+ mojom::MediaStreamType::kNone),
initial_video_size,
base::BindRepeating(&WatchTimeReporterTest::GetCurrentMediaTime,
base::Unretained(this)),
@@ -1106,8 +1109,8 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterSecondaryProperties) {
has_video_ ? kCodecH264 : kUnknownVideoCodec,
has_audio_ ? AudioCodecProfile::kXHE_AAC : AudioCodecProfile::kUnknown,
has_video_ ? H264PROFILE_MAIN : VIDEO_CODEC_PROFILE_UNKNOWN,
- has_audio_ ? "FirstAudioDecoder" : "",
- has_video_ ? "FirstVideoDecoder" : "",
+ has_audio_ ? AudioDecoderType::kMojo : AudioDecoderType::kUnknown,
+ has_video_ ? VideoDecoderType::kMojo : VideoDecoderType::kUnknown,
has_audio_ ? EncryptionScheme::kCenc : EncryptionScheme::kUnencrypted,
has_video_ ? EncryptionScheme::kCbcs : EncryptionScheme::kUnencrypted,
has_video_ ? gfx::Size(800, 600) : gfx::Size());
@@ -1143,7 +1146,8 @@ TEST_P(WatchTimeReporterTest, SecondaryProperties_SizeIncreased) {
.Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->UpdateSecondaryProperties(mojom::SecondaryPlaybackProperties::New(
kUnknownAudioCodec, kUnknownVideoCodec, AudioCodecProfile::kUnknown,
- VIDEO_CODEC_PROFILE_UNKNOWN, "", "", EncryptionScheme::kUnencrypted,
+ VIDEO_CODEC_PROFILE_UNKNOWN, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
EncryptionScheme::kUnencrypted, kSizeJustRight));
EXPECT_TRUE(IsMonitoring());
@@ -1165,7 +1169,8 @@ TEST_P(WatchTimeReporterTest, SecondaryProperties_SizeDecreased) {
.Times((has_audio_ && has_video_) ? 3 : 2);
wtr_->UpdateSecondaryProperties(mojom::SecondaryPlaybackProperties::New(
kUnknownAudioCodec, kUnknownVideoCodec, AudioCodecProfile::kUnknown,
- VIDEO_CODEC_PROFILE_UNKNOWN, "", "", EncryptionScheme::kUnencrypted,
+ VIDEO_CODEC_PROFILE_UNKNOWN, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
EncryptionScheme::kUnencrypted, kSizeTooSmall));
EXPECT_WATCH_TIME_FINALIZED();
CycleReportingTimer();
diff --git a/chromium/media/blink/webcontentdecryptionmodule_impl.cc b/chromium/media/blink/webcontentdecryptionmodule_impl.cc
index b267d9d7d0c..b1f01ee9074 100644
--- a/chromium/media/blink/webcontentdecryptionmodule_impl.cc
+++ b/chromium/media/blink/webcontentdecryptionmodule_impl.cc
@@ -28,6 +28,7 @@ namespace media {
namespace {
+const char kCreateSessionSessionTypeUMAName[] = "CreateSession.SessionType";
const char kSetServerCertificateUMAName[] = "SetServerCertificate";
const char kGetStatusForPolicyUMAName[] = "GetStatusForPolicy";
@@ -121,6 +122,9 @@ WebContentDecryptionModuleImpl::~WebContentDecryptionModuleImpl() = default;
std::unique_ptr<blink::WebContentDecryptionModuleSession>
WebContentDecryptionModuleImpl::CreateSession(
blink::WebEncryptedMediaSessionType session_type) {
+ base::UmaHistogramEnumeration(
+ adapter_->GetKeySystemUMAPrefix() + kCreateSessionSessionTypeUMAName,
+ session_type);
return adapter_->CreateSession(session_type);
}
diff --git a/chromium/media/blink/webencryptedmediaclient_impl.cc b/chromium/media/blink/webencryptedmediaclient_impl.cc
index 64562475873..dbbbbf97507 100644
--- a/chromium/media/blink/webencryptedmediaclient_impl.cc
+++ b/chromium/media/blink/webencryptedmediaclient_impl.cc
@@ -100,9 +100,12 @@ class WebEncryptedMediaClientImpl::Reporter {
WebEncryptedMediaClientImpl::WebEncryptedMediaClientImpl(
CdmFactory* cdm_factory,
- MediaPermission* media_permission)
+ MediaPermission* media_permission,
+ blink::WebContentSettingsClient* content_settings_client)
: cdm_factory_(cdm_factory),
- key_system_config_selector_(KeySystems::GetInstance(), media_permission) {
+ key_system_config_selector_(KeySystems::GetInstance(),
+ media_permission,
+ content_settings_client) {
DCHECK(cdm_factory_);
}
diff --git a/chromium/media/blink/webencryptedmediaclient_impl.h b/chromium/media/blink/webencryptedmediaclient_impl.h
index f6d8e8d09db..564fcf1aae1 100644
--- a/chromium/media/blink/webencryptedmediaclient_impl.h
+++ b/chromium/media/blink/webencryptedmediaclient_impl.h
@@ -19,6 +19,7 @@
namespace blink {
class WebContentDecryptionModuleResult;
+class WebContentSettingsClient;
struct WebMediaKeySystemConfiguration;
class WebSecurityOrigin;
@@ -33,8 +34,10 @@ class MediaPermission;
class MEDIA_BLINK_EXPORT WebEncryptedMediaClientImpl
: public blink::WebEncryptedMediaClient {
public:
- WebEncryptedMediaClientImpl(CdmFactory* cdm_factory,
- MediaPermission* media_permission);
+ WebEncryptedMediaClientImpl(
+ CdmFactory* cdm_factory,
+ MediaPermission* media_permission,
+ blink::WebContentSettingsClient* content_settings_client);
~WebEncryptedMediaClientImpl() override;
// WebEncryptedMediaClient implementation.
diff --git a/chromium/media/blink/webmediaplayer_impl.cc b/chromium/media/blink/webmediaplayer_impl.cc
index a08e0f048e2..7fa36146a48 100644
--- a/chromium/media/blink/webmediaplayer_impl.cc
+++ b/chromium/media/blink/webmediaplayer_impl.cc
@@ -65,7 +65,6 @@
#include "third_party/blink/public/platform/web_media_player_encrypted_media_client.h"
#include "third_party/blink/public/platform/web_media_player_source.h"
#include "third_party/blink/public/platform/web_media_source.h"
-#include "third_party/blink/public/platform/web_rect.h"
#include "third_party/blink/public/platform/web_runtime_features.h"
#include "third_party/blink/public/platform/web_security_origin.h"
#include "third_party/blink/public/platform/web_size.h"
@@ -86,7 +85,6 @@
#endif
using blink::WebMediaPlayer;
-using blink::WebRect;
using blink::WebString;
using gpu::gles2::GLES2Interface;
@@ -430,12 +428,12 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
auto on_audio_source_provider_set_client_callback = base::BindOnce(
[](base::WeakPtr<WebMediaPlayerImpl> self,
- blink::WebMediaPlayerDelegate* const delegate, int delegate_id) {
+ blink::WebMediaPlayerClient* const client) {
if (!self)
return;
- delegate->DidDisableAudioOutputSinkChanges(self->delegate_id_);
+ client->DidDisableAudioOutputSinkChanges();
},
- weak_this_, delegate_, delegate_id_);
+ weak_this_, client_);
// TODO(xhwang): When we use an external Renderer, many methods won't work,
// e.g. GetCurrentFrameFromCompositor(). See http://crbug.com/434861
@@ -553,7 +551,8 @@ WebMediaPlayerImpl::~WebMediaPlayerImpl() {
WebMediaPlayer::LoadTiming WebMediaPlayerImpl::Load(
LoadType load_type,
const blink::WebMediaPlayerSource& source,
- CorsMode cors_mode) {
+ CorsMode cors_mode,
+ bool is_cache_disabled) {
// Only URL or MSE blob URL is supported.
DCHECK(source.IsURL());
blink::WebURL url = source.GetAsURL();
@@ -563,10 +562,11 @@ WebMediaPlayer::LoadTiming WebMediaPlayerImpl::Load(
bool is_deferred = false;
if (defer_load_cb_) {
- is_deferred = defer_load_cb_.Run(base::BindOnce(
- &WebMediaPlayerImpl::DoLoad, weak_this_, load_type, url, cors_mode));
+ is_deferred = defer_load_cb_.Run(
+ base::BindOnce(&WebMediaPlayerImpl::DoLoad, weak_this_, load_type, url,
+ cors_mode, is_cache_disabled));
} else {
- DoLoad(load_type, url, cors_mode);
+ DoLoad(load_type, url, cors_mode, is_cache_disabled);
}
return is_deferred ? LoadTiming::kDeferred : LoadTiming::kImmediate;
@@ -668,8 +668,6 @@ void WebMediaPlayerImpl::BecameDominantVisibleContent(bool is_dominant) {
void WebMediaPlayerImpl::SetIsEffectivelyFullscreen(
blink::WebFullscreenVideoStatus fullscreen_video_status) {
- delegate_->SetIsEffectivelyFullscreen(delegate_id_, fullscreen_video_status);
-
if (power_status_helper_) {
// We don't care about pip, so anything that's "not fullscreen" is good
// enough for us.
@@ -714,7 +712,7 @@ void WebMediaPlayerImpl::OnDisplayTypeChanged(blink::DisplayType display_type) {
// Resumes playback if it was paused when hidden.
if (paused_when_hidden_) {
paused_when_hidden_ = false;
- OnPlay();
+ client_->ResumePlayback();
}
break;
}
@@ -722,7 +720,8 @@ void WebMediaPlayerImpl::OnDisplayTypeChanged(blink::DisplayType display_type) {
void WebMediaPlayerImpl::DoLoad(LoadType load_type,
const blink::WebURL& url,
- CorsMode cors_mode) {
+ CorsMode cors_mode,
+ bool is_cache_disabled) {
TRACE_EVENT1("media", "WebMediaPlayerImpl::DoLoad", "id", media_log_->id());
DVLOG(1) << __func__;
DCHECK(main_task_runner_->BelongsToCurrentThread());
@@ -781,7 +780,8 @@ void WebMediaPlayerImpl::DoLoad(LoadType load_type,
media_metrics_provider_->Initialize(
load_type == kLoadTypeMediaSource,
load_type == kLoadTypeURL ? blink::GetMediaURLScheme(loaded_url_)
- : mojom::MediaURLScheme::kUnknown);
+ : mojom::MediaURLScheme::kUnknown,
+ mojom::MediaStreamType::kNone);
if (demuxer_override_ || load_type == kLoadTypeMediaSource) {
// If a demuxer override was specified or a Media Source pipeline will be
@@ -818,8 +818,9 @@ void WebMediaPlayerImpl::DoLoad(LoadType load_type,
return;
}
- auto url_data =
- url_index_->GetByUrl(url, static_cast<UrlData::CorsMode>(cors_mode));
+ auto url_data = url_index_->GetByUrl(
+ url, static_cast<UrlData::CorsMode>(cors_mode),
+ is_cache_disabled ? UrlIndex::kCacheDisabled : UrlIndex::kNormal);
mb_data_source_ = new MultibufferDataSource(
main_task_runner_, std::move(url_data), media_log_.get(),
buffered_data_source_host_.get(),
@@ -975,7 +976,7 @@ void WebMediaPlayerImpl::DoSeek(base::TimeDelta time, bool time_updated) {
// Send the seek updates only when the seek pipeline hasn't started,
// OnPipelineSeeked is not called yet.
if (!seeking_)
- delegate_->DidSeek(delegate_id_);
+ client_->DidSeek();
// TODO(sandersd): Move |seeking_| to PipelineController.
// TODO(sandersd): Do we want to reset the idle timer here?
@@ -1017,13 +1018,16 @@ void WebMediaPlayerImpl::SetVolume(double volume) {
pipeline_controller_->SetVolume(volume_ * volume_multiplier_);
if (watch_time_reporter_)
watch_time_reporter_->OnVolumeChange(volume);
- delegate_->DidPlayerMutedStatusChange(delegate_id_, volume == 0.0);
+ client_->DidPlayerMutedStatusChange(volume == 0.0);
if (delegate_has_audio_ != HasUnmutedAudio()) {
delegate_has_audio_ = HasUnmutedAudio();
- delegate_->DidMediaMetadataChange(
- delegate_id_, delegate_has_audio_, HasVideo(),
- DurationToMediaContentType(GetPipelineMediaDuration()));
+ MediaContentType content_type =
+ DurationToMediaContentType(GetPipelineMediaDuration());
+ client_->DidMediaMetadataChange(delegate_has_audio_, HasVideo(),
+ content_type);
+ delegate_->DidMediaMetadataChange(delegate_id_, delegate_has_audio_,
+ HasVideo(), content_type);
}
// The play state is updated because the player might have left the autoplay
@@ -1047,6 +1051,11 @@ void WebMediaPlayerImpl::SetPreservesPitch(bool preserves_pitch) {
pipeline_controller_->SetPreservesPitch(preserves_pitch);
}
+void WebMediaPlayerImpl::SetAutoplayInitiated(bool autoplay_initiated) {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ pipeline_controller_->SetAutoplayInitiated(autoplay_initiated);
+}
+
void WebMediaPlayerImpl::OnRequestPictureInPicture() {
if (!surface_layer_for_video_enabled_)
ActivateSurfaceLayerForVideo();
@@ -1055,7 +1064,7 @@ void WebMediaPlayerImpl::OnRequestPictureInPicture() {
DCHECK(bridge_->GetSurfaceId().is_valid());
}
-void WebMediaPlayerImpl::SetSinkId(
+bool WebMediaPlayerImpl::SetSinkId(
const blink::WebString& sink_id,
blink::WebSetSinkIdCompleteCallback completion_callback) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
@@ -1067,7 +1076,7 @@ void WebMediaPlayerImpl::SetSinkId(
media_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&SetSinkIdOnMediaThread, audio_source_provider_,
sink_id_utf8, std::move(callback)));
- delegate_->DidAudioOutputSinkChange(delegate_id_, sink_id_utf8);
+ return true;
}
STATIC_ASSERT_ENUM(WebMediaPlayer::kPreloadNone, MultibufferDataSource::NONE);
@@ -1334,10 +1343,8 @@ bool WebMediaPlayerImpl::DidLoadingProgress() {
}
void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
- const blink::WebRect& rect,
- cc::PaintFlags& flags,
- int already_uploaded_id,
- VideoFrameUploadMetadata* out_metadata) {
+ const gfx::Rect& rect,
+ cc::PaintFlags& flags) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl:paint");
@@ -1347,16 +1354,9 @@ void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
if (video_frame && video_frame->HasTextures()) {
if (!raster_context_provider_)
return; // Unable to get/create a shared main thread context.
- if (!raster_context_provider_->GrContext())
- return; // The context has been lost since and can't setup a GrContext.
- }
- if (out_metadata && video_frame) {
- // WebGL last-uploaded-frame-metadata API enabled. https://crbug.com/639174
- ComputeFrameUploadMetadata(video_frame.get(), already_uploaded_id,
- out_metadata);
- if (out_metadata->skipped) {
- // Skip uploading this frame.
- return;
+ if (!raster_context_provider_->GrContext() &&
+ !raster_context_provider_->ContextCapabilities().supports_oop_raster) {
+ return; // The context has been lost.
}
}
video_renderer_.Paint(
@@ -1369,6 +1369,12 @@ scoped_refptr<VideoFrame> WebMediaPlayerImpl::GetCurrentFrame() {
return GetCurrentFrameFromCompositor();
}
+media::PaintCanvasVideoRenderer*
+WebMediaPlayerImpl::GetPaintCanvasVideoRenderer() {
+ DCHECK(main_task_runner_->BelongsToCurrentThread());
+ return &video_renderer_;
+}
+
bool WebMediaPlayerImpl::WouldTaintOrigin() const {
if (demuxer_found_hls_) {
// HLS manifests might pull segments from a different origin. We can't know
@@ -1418,89 +1424,6 @@ bool WebMediaPlayerImpl::HasAvailableVideoFrame() const {
return has_first_frame_;
}
-bool WebMediaPlayerImpl::CopyVideoTextureToPlatformTexture(
- gpu::gles2::GLES2Interface* gl,
- unsigned int target,
- unsigned int texture,
- unsigned internal_format,
- unsigned format,
- unsigned type,
- int level,
- bool premultiply_alpha,
- bool flip_y,
- int already_uploaded_id,
- VideoFrameUploadMetadata* out_metadata) {
- DCHECK(main_task_runner_->BelongsToCurrentThread());
- TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture");
-
- scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
- if (!video_frame || !video_frame->HasTextures()) {
- return false;
- }
-
- if (out_metadata) {
- // WebGL last-uploaded-frame-metadata API is enabled.
- // https://crbug.com/639174
- ComputeFrameUploadMetadata(video_frame.get(), already_uploaded_id,
- out_metadata);
- if (out_metadata->skipped) {
- // Skip uploading this frame.
- return true;
- }
- }
-
- return video_renderer_.CopyVideoFrameTexturesToGLTexture(
- raster_context_provider_.get(), gl, video_frame.get(), target, texture,
- internal_format, format, type, level, premultiply_alpha, flip_y);
-}
-
-bool WebMediaPlayerImpl::PrepareVideoFrameForWebGL(
- gpu::gles2::GLES2Interface* gl,
- unsigned target,
- unsigned texture,
- int already_uploaded_id,
- WebMediaPlayer::VideoFrameUploadMetadata* out_metadata) {
- DCHECK(main_task_runner_->BelongsToCurrentThread());
- TRACE_EVENT0("media", "WebMediaPlayerImpl::PrepareVideoFrameForWebGL");
-
- // TODO(crbug.com/776222): How to deal with protected frames.
- scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
- if (!video_frame.get() || !video_frame->HasTextures()) {
- return false;
- }
- if (out_metadata) {
- // WebGL last-uploaded-frame-metadata API is enabled.
- ComputeFrameUploadMetadata(video_frame.get(), already_uploaded_id,
- out_metadata);
- if (out_metadata->skipped) {
- // Skip uploading this frame.
- return true;
- }
- }
-
- return video_renderer_.PrepareVideoFrameForWebGL(
- raster_context_provider_.get(), gl, video_frame.get(), target, texture);
-}
-
-// static
-void WebMediaPlayerImpl::ComputeFrameUploadMetadata(
- VideoFrame* frame,
- int already_uploaded_id,
- VideoFrameUploadMetadata* out_metadata) {
- DCHECK(out_metadata);
- DCHECK(frame);
- out_metadata->frame_id = frame->unique_id();
- out_metadata->visible_rect = frame->visible_rect();
- out_metadata->timestamp = frame->timestamp();
- if (frame->metadata()->frame_duration.has_value()) {
- out_metadata->expected_timestamp =
- frame->timestamp() + *frame->metadata()->frame_duration;
- };
- bool skip_possible = already_uploaded_id != -1;
- bool same_frame_id = frame->unique_id() == already_uploaded_id;
- out_metadata->skipped = skip_possible && same_frame_id;
-}
-
void WebMediaPlayerImpl::SetContentDecryptionModule(
blink::WebContentDecryptionModule* cdm,
blink::WebContentDecryptionModuleResult result) {
@@ -2013,9 +1936,12 @@ void WebMediaPlayerImpl::OnMetadata(const PipelineMetadata& metadata) {
observer_->OnMetadataChanged(pipeline_metadata_);
delegate_has_audio_ = HasUnmutedAudio();
- delegate_->DidMediaMetadataChange(
- delegate_id_, delegate_has_audio_, HasVideo(),
- DurationToMediaContentType(GetPipelineMediaDuration()));
+ MediaContentType content_type =
+ DurationToMediaContentType(GetPipelineMediaDuration());
+ client_->DidMediaMetadataChange(delegate_has_audio_, HasVideo(),
+ content_type);
+ delegate_->DidMediaMetadataChange(delegate_id_, delegate_has_audio_,
+ HasVideo(), content_type);
// It could happen that the demuxer successfully completed initialization
// (implying it had determined media metadata), but then removed all audio and
@@ -2265,7 +2191,7 @@ void WebMediaPlayerImpl::OnBufferingStateChangeInternal(
!seeking_) {
underflow_timer_ = std::make_unique<base::ElapsedTimer>();
watch_time_reporter_->OnUnderflow();
- delegate_->DidBufferUnderflow(delegate_id_);
+ client_->DidBufferUnderflow();
if (playback_events_recorder_)
playback_events_recorder_->OnBuffering();
@@ -2303,9 +2229,12 @@ void WebMediaPlayerImpl::OnDurationChange() {
client_->DurationChanged();
- delegate_->DidMediaMetadataChange(
- delegate_id_, delegate_has_audio_, HasVideo(),
- DurationToMediaContentType(GetPipelineMediaDuration()));
+ MediaContentType content_type =
+ DurationToMediaContentType(GetPipelineMediaDuration());
+ client_->DidMediaMetadataChange(delegate_has_audio_, HasVideo(),
+ content_type);
+ delegate_->DidMediaMetadataChange(delegate_id_, delegate_has_audio_,
+ HasVideo(), content_type);
if (watch_time_reporter_)
watch_time_reporter_->OnDurationChanged(GetPipelineMediaDuration());
@@ -2400,7 +2329,7 @@ void WebMediaPlayerImpl::OnVideoNaturalSizeChange(const gfx::Size& size) {
if (observer_)
observer_->OnMetadataChanged(pipeline_metadata_);
- delegate_->DidPlayerSizeChange(delegate_id_, NaturalSize());
+ client_->DidPlayerSizeChange(NaturalSize());
}
void WebMediaPlayerImpl::OnVideoOpacityChange(bool opaque) {
@@ -2437,6 +2366,11 @@ void WebMediaPlayerImpl::OnAudioConfigChange(const AudioDecoderConfig& config) {
if (observer_)
observer_->OnMetadataChanged(pipeline_metadata_);
+ if (codec_change) {
+ media_metrics_provider_->SetHasAudio(
+ pipeline_metadata_.audio_decoder_config.codec());
+ }
+
if (codec_change || codec_profile_change)
UpdateSecondaryProperties();
}
@@ -2455,6 +2389,11 @@ void WebMediaPlayerImpl::OnVideoConfigChange(const VideoDecoderConfig& config) {
if (observer_)
observer_->OnMetadataChanged(pipeline_metadata_);
+ if (codec_change) {
+ media_metrics_provider_->SetHasVideo(
+ pipeline_metadata_.video_decoder_config.codec());
+ }
+
if (codec_change || codec_profile_change)
UpdateSecondaryProperties();
@@ -2466,12 +2405,12 @@ void WebMediaPlayerImpl::OnVideoAverageKeyframeDistanceUpdate() {
UpdateBackgroundVideoOptimizationState();
}
-void WebMediaPlayerImpl::OnAudioDecoderChange(const PipelineDecoderInfo& info) {
+void WebMediaPlayerImpl::OnAudioDecoderChange(const AudioDecoderInfo& info) {
media_metrics_provider_->SetAudioPipelineInfo(info);
- if (info.decoder_name == audio_decoder_name_)
+ if (info.decoder_type == audio_decoder_type_)
return;
- audio_decoder_name_ = info.decoder_name;
+ audio_decoder_type_ = info.decoder_type;
// If there's no current reporter, there's nothing to be done.
if (!watch_time_reporter_)
@@ -2480,12 +2419,12 @@ void WebMediaPlayerImpl::OnAudioDecoderChange(const PipelineDecoderInfo& info) {
UpdateSecondaryProperties();
}
-void WebMediaPlayerImpl::OnVideoDecoderChange(const PipelineDecoderInfo& info) {
+void WebMediaPlayerImpl::OnVideoDecoderChange(const VideoDecoderInfo& info) {
media_metrics_provider_->SetVideoPipelineInfo(info);
- if (info.decoder_name == video_decoder_name_)
+ if (info.decoder_type == video_decoder_type_)
return;
- video_decoder_name_ = info.decoder_name;
+ video_decoder_type_ = info.decoder_type;
// If there's no current reporter, there's nothing to be done.
if (!watch_time_reporter_)
@@ -2550,7 +2489,7 @@ void WebMediaPlayerImpl::OnFrameShown() {
if (paused_when_hidden_) {
paused_when_hidden_ = false;
- OnPlay(); // Calls UpdatePlayState() so return afterwards.
+ client_->ResumePlayback(); // Calls UpdatePlayState() so return afterwards.
return;
}
@@ -2572,41 +2511,6 @@ void WebMediaPlayerImpl::OnIdleTimeout() {
UpdatePlayState();
}
-void WebMediaPlayerImpl::OnPlay() {
- client_->RequestPlay();
-}
-
-void WebMediaPlayerImpl::OnPause() {
- client_->RequestPause();
-}
-
-void WebMediaPlayerImpl::OnMuted(bool muted) {
- client_->RequestMuted(muted);
-}
-
-void WebMediaPlayerImpl::OnSeekForward(double seconds) {
- DCHECK_GE(seconds, 0) << "Attempted to seek by a negative number of seconds";
- client_->RequestSeek(CurrentTime() + seconds);
-}
-
-void WebMediaPlayerImpl::OnSeekBackward(double seconds) {
- DCHECK_GE(seconds, 0) << "Attempted to seek by a negative number of seconds";
- client_->RequestSeek(CurrentTime() - seconds);
-}
-
-void WebMediaPlayerImpl::OnEnterPictureInPicture() {
- client_->RequestEnterPictureInPicture();
-}
-
-void WebMediaPlayerImpl::OnExitPictureInPicture() {
- client_->RequestExitPictureInPicture();
-}
-
-void WebMediaPlayerImpl::OnSetAudioSink(const std::string& sink_id) {
- SetSinkId(WebString::FromASCII(sink_id),
- base::DoNothing::Once<base::Optional<blink::WebSetSinkIdError>>());
-}
-
void WebMediaPlayerImpl::OnVolumeMultiplierUpdate(double multiplier) {
volume_multiplier_ = multiplier;
SetVolume(volume_);
@@ -2671,10 +2575,10 @@ void WebMediaPlayerImpl::OnRemotePlayStateChange(MediaStatus::State state) {
if (state == MediaStatus::State::PLAYING && Paused()) {
DVLOG(1) << __func__ << " requesting PLAY.";
- client_->RequestPlay();
+ client_->ResumePlayback();
} else if (state == MediaStatus::State::PAUSED && !Paused()) {
DVLOG(1) << __func__ << " requesting PAUSE.";
- client_->RequestPause();
+ client_->PausePlayback();
}
}
#endif // defined(OS_ANDROID)
@@ -2915,8 +2819,9 @@ void WebMediaPlayerImpl::StartPipeline() {
// base::Unretained is safe because |this| owns memory_pressure_listener_.
memory_pressure_listener_ =
std::make_unique<base::MemoryPressureListener>(
- FROM_HERE, base::Bind(&WebMediaPlayerImpl::OnMemoryPressure,
- base::Unretained(this)));
+ FROM_HERE,
+ base::BindRepeating(&WebMediaPlayerImpl::OnMemoryPressure,
+ base::Unretained(this)));
}
}
@@ -3049,8 +2954,8 @@ void WebMediaPlayerImpl::OnTimeUpdate() {
DVLOG(2) << __func__ << "(" << new_position.ToString() << ")";
media_position_state_ = new_position;
- delegate_->DidPlayerMediaPositionStateChange(delegate_id_,
- media_position_state_);
+ client_->DidPlayerMediaPositionStateChange(effective_playback_rate, duration,
+ current_time);
}
void WebMediaPlayerImpl::SetDelegateState(DelegateState new_state,
@@ -3071,11 +2976,13 @@ void WebMediaPlayerImpl::SetDelegateState(DelegateState new_state,
break;
case DelegateState::PLAYING: {
if (HasVideo())
- delegate_->DidPlayerSizeChange(delegate_id_, NaturalSize());
+ client_->DidPlayerSizeChange(NaturalSize());
+ client_->DidPlayerStartPlaying();
delegate_->DidPlay(delegate_id_);
break;
}
case DelegateState::PAUSED:
+ client_->DidPlayerPaused(ended_);
delegate_->DidPause(delegate_id_, ended_);
break;
}
@@ -3385,7 +3292,8 @@ void WebMediaPlayerImpl::ScheduleIdlePauseTimer() {
// Idle timeout chosen arbitrarily.
background_pause_timer_.Start(FROM_HERE, base::TimeDelta::FromSeconds(5),
- this, &WebMediaPlayerImpl::OnPause);
+ client_,
+ &blink::WebMediaPlayerClient::ResumePlayback);
}
void WebMediaPlayerImpl::CreateWatchTimeReporter() {
@@ -3403,7 +3311,8 @@ void WebMediaPlayerImpl::CreateWatchTimeReporter() {
watch_time_reporter_ = std::make_unique<blink::WatchTimeReporter>(
mojom::PlaybackProperties::New(
pipeline_metadata_.has_audio, has_video, false, false,
- !!chunk_demuxer_, is_encrypted_, embedded_media_experience_enabled_),
+ !!chunk_demuxer_, is_encrypted_, embedded_media_experience_enabled_,
+ mojom::MediaStreamType::kNone),
pipeline_metadata_.natural_size,
base::BindRepeating(&WebMediaPlayerImpl::GetCurrentTimeInternal,
base::Unretained(this)),
@@ -3452,7 +3361,7 @@ void WebMediaPlayerImpl::UpdateSecondaryProperties() {
pipeline_metadata_.video_decoder_config.codec(),
pipeline_metadata_.audio_decoder_config.profile(),
pipeline_metadata_.video_decoder_config.profile(),
- audio_decoder_name_, video_decoder_name_,
+ audio_decoder_type_, video_decoder_type_,
pipeline_metadata_.audio_decoder_config.encryption_scheme(),
pipeline_metadata_.video_decoder_config.encryption_scheme(),
pipeline_metadata_.natural_size));
@@ -3631,11 +3540,12 @@ void WebMediaPlayerImpl::UpdateBackgroundVideoOptimizationState() {
if (IsHidden()) {
if (ShouldPausePlaybackWhenHidden()) {
PauseVideoIfNeeded();
- } else if (update_background_status_cb_.IsCancelled()) {
+ } else if (is_background_status_change_cancelled_) {
// Only trigger updates when we don't have one already scheduled.
update_background_status_cb_.Reset(
- base::Bind(&WebMediaPlayerImpl::DisableVideoTrackIfNeeded,
- base::Unretained(this)));
+ base::BindOnce(&WebMediaPlayerImpl::DisableVideoTrackIfNeeded,
+ base::Unretained(this)));
+ is_background_status_change_cancelled_ = false;
// Defer disable track until we're sure the clip will be backgrounded for
// some time. Resuming may take half a second, so frequent tab switches
@@ -3647,6 +3557,7 @@ void WebMediaPlayerImpl::UpdateBackgroundVideoOptimizationState() {
}
} else {
update_background_status_cb_.Cancel();
+ is_background_status_change_cancelled_ = true;
EnableVideoTrackIfNeeded();
}
}
@@ -3660,9 +3571,10 @@ void WebMediaPlayerImpl::PauseVideoIfNeeded() {
seeking_ || paused_)
return;
- // OnPause() will set |paused_when_hidden_| to false and call
- // UpdatePlayState(), so set the flag to true after and then return.
- OnPause();
+ // client_->PausePlayback() will get |paused_when_hidden_| set to
+ // false and UpdatePlayState() called, so set the flag to true after and then
+ // return.
+ client_->PausePlayback();
paused_when_hidden_ = true;
}
@@ -3854,10 +3766,6 @@ bool WebMediaPlayerImpl::IsInPictureInPicture() const {
return client_->GetDisplayType() == blink::DisplayType::kPictureInPicture;
}
-void WebMediaPlayerImpl::OnPictureInPictureAvailabilityChanged(bool available) {
- delegate_->DidPictureInPictureAvailabilityChange(delegate_id_, available);
-}
-
void WebMediaPlayerImpl::MaybeSetContainerNameForMetrics() {
// Pipeline startup failed before even getting a demuxer setup.
if (!demuxer_)
diff --git a/chromium/media/blink/webmediaplayer_impl.h b/chromium/media/blink/webmediaplayer_impl.h
index 995d07a7ea2..bcb8e52a6dd 100644
--- a/chromium/media/blink/webmediaplayer_impl.h
+++ b/chromium/media/blink/webmediaplayer_impl.h
@@ -75,12 +75,6 @@ namespace cc {
class VideoLayer;
}
-namespace gpu {
-namespace gles2 {
-class GLES2Interface;
-}
-} // namespace gpu
-
namespace media {
class CdmContextRef;
class ChunkDemuxer;
@@ -122,7 +116,8 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
WebMediaPlayer::LoadTiming Load(LoadType load_type,
const blink::WebMediaPlayerSource& source,
- CorsMode cors_mode) override;
+ CorsMode cors_mode,
+ bool is_cache_disabled) override;
// Playback controls.
void Play() override;
@@ -132,9 +127,10 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void SetVolume(double volume) override;
void SetLatencyHint(double seconds) override;
void SetPreservesPitch(bool preserves_pitch) override;
+ void SetAutoplayInitiated(bool autoplay_initiated) override;
void OnRequestPictureInPicture() override;
void OnTimeUpdate() override;
- void SetSinkId(
+ bool SetSinkId(
const blink::WebString& sink_id,
blink::WebSetSinkIdCompleteCallback completion_callback) override;
void SetPoster(const blink::WebURL& poster) override;
@@ -144,13 +140,12 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// paint() the current video frame into |canvas|. This is used to support
// various APIs and functionalities, including but not limited to: <canvas>,
- // WebGL texImage2D, ImageBitmap, printing and capturing capabilities.
+ // ImageBitmap, printing and capturing capabilities.
void Paint(cc::PaintCanvas* canvas,
- const blink::WebRect& rect,
- cc::PaintFlags& flags,
- int already_uploaded_id,
- VideoFrameUploadMetadata* out_metadata) override;
+ const gfx::Rect& rect,
+ cc::PaintFlags& flags) override;
scoped_refptr<VideoFrame> GetCurrentFrame() override;
+ media::PaintCanvasVideoRenderer* GetPaintCanvasVideoRenderer() override;
// True if the loaded media has a playable video/audio track.
bool HasVideo() const override;
@@ -177,9 +172,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
bool PausedWhenHidden() const override;
- // Informed when picture-in-picture availability changed.
- void OnPictureInPictureAvailabilityChanged(bool available) override;
-
// Internal states of loading and network.
// TODO(hclam): Ask the pipeline about the state rather than having reading
// them from members which would cause race conditions.
@@ -202,31 +194,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
bool HasAvailableVideoFrame() const override;
- bool CopyVideoTextureToPlatformTexture(
- gpu::gles2::GLES2Interface* gl,
- unsigned int target,
- unsigned int texture,
- unsigned internal_format,
- unsigned format,
- unsigned type,
- int level,
- bool premultiply_alpha,
- bool flip_y,
- int already_uploaded_id,
- VideoFrameUploadMetadata* out_metadata) override;
-
- bool PrepareVideoFrameForWebGL(
- gpu::gles2::GLES2Interface* gl,
- unsigned target,
- unsigned texture,
- int already_uploaded_id,
- WebMediaPlayer::VideoFrameUploadMetadata* out_metadata) override;
-
- static void ComputeFrameUploadMetadata(
- VideoFrame* frame,
- int already_uploaded_id,
- VideoFrameUploadMetadata* out_metadata);
-
scoped_refptr<blink::WebAudioSourceProviderImpl> GetAudioSourceProvider()
override;
@@ -247,14 +214,6 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void OnFrameClosed() override;
void OnFrameShown() override;
void OnIdleTimeout() override;
- void OnPlay() override;
- void OnPause() override;
- void OnMuted(bool muted) override;
- void OnSeekForward(double seconds) override;
- void OnSeekBackward(double seconds) override;
- void OnEnterPictureInPicture() override;
- void OnExitPictureInPicture() override;
- void OnSetAudioSink(const std::string& sink_id) override;
void OnVolumeMultiplierUpdate(double multiplier) override;
void OnBecamePersistentVideo(bool value) override;
void OnPowerExperimentState(bool state) override;
@@ -369,8 +328,8 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void OnVideoOpacityChange(bool opaque) override;
void OnVideoFrameRateChange(base::Optional<int> fps) override;
void OnVideoAverageKeyframeDistanceUpdate() override;
- void OnAudioDecoderChange(const PipelineDecoderInfo& info) override;
- void OnVideoDecoderChange(const PipelineDecoderInfo& info) override;
+ void OnAudioDecoderChange(const AudioDecoderInfo& info) override;
+ void OnVideoDecoderChange(const VideoDecoderInfo& info) override;
// Simplified watch time reporting.
void OnSimpleWatchTimerTick();
@@ -381,7 +340,10 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// Called after |defer_load_cb_| has decided to allow the load. If
// |defer_load_cb_| is null this is called immediately.
- void DoLoad(LoadType load_type, const blink::WebURL& url, CorsMode cors_mode);
+ void DoLoad(LoadType load_type,
+ const blink::WebURL& url,
+ CorsMode cors_mode,
+ bool is_cache_disabled);
// Called after asynchronous initialization of a data source completed.
void DataSourceInitialized(bool success);
@@ -898,8 +860,8 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// Monitors the watch time of the played content.
std::unique_ptr<blink::WatchTimeReporter> watch_time_reporter_;
- std::string audio_decoder_name_;
- std::string video_decoder_name_;
+ AudioDecoderType audio_decoder_type_ = AudioDecoderType::kUnknown;
+ VideoDecoderType video_decoder_type_ = VideoDecoderType::kUnknown;
// The time at which DoLoad() is executed.
base::TimeTicks load_start_time_;
@@ -1000,7 +962,13 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
OverlayInfo overlay_info_;
- base::CancelableClosure update_background_status_cb_;
+ base::CancelableOnceClosure update_background_status_cb_;
+
+ // We cannot use `update_background_status_cb_.IsCancelled()` as that changes
+ // when the callback is run, even if not explicitly cancelled. This is
+ // initialized to true to keep in line with the existing behavior of
+ // base::CancellableOnceClosure.
+ bool is_background_status_change_cancelled_ = true;
mojo::Remote<mojom::MediaMetricsProvider> media_metrics_provider_;
mojo::Remote<mojom::PlaybackEventsRecorder> playback_events_recorder_;
diff --git a/chromium/media/blink/webmediaplayer_impl_unittest.cc b/chromium/media/blink/webmediaplayer_impl_unittest.cc
index 6aca18bc32b..d20b72412b1 100644
--- a/chromium/media/blink/webmediaplayer_impl_unittest.cc
+++ b/chromium/media/blink/webmediaplayer_impl_unittest.cc
@@ -29,6 +29,7 @@
#include "cc/layers/layer.h"
#include "components/viz/test/test_context_provider.h"
#include "media/base/decoder_buffer.h"
+#include "media/base/media_content_type.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
#include "media/base/memory_dump_provider_proxy.h"
@@ -53,6 +54,7 @@
#include "mojo/public/cpp/bindings/remote.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/blink/public/common/tokens/tokens.h"
#include "third_party/blink/public/platform/media/webmediaplayer_delegate.h"
#include "third_party/blink/public/platform/scheduler/web_thread_scheduler.h"
#include "third_party/blink/public/platform/web_fullscreen_video_status.h"
@@ -69,6 +71,7 @@
#include "third_party/blink/public/web/web_testing_support.h"
#include "third_party/blink/public/web/web_view.h"
#include "third_party/blink/public/web/web_widget.h"
+#include "ui/gfx/geometry/size.h"
#include "url/gurl.h"
using ::base::test::RunClosure;
@@ -138,7 +141,6 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD1(AddTextTrack, void(blink::WebInbandTextTrack*));
MOCK_METHOD1(RemoveTextTrack, void(blink::WebInbandTextTrack*));
MOCK_METHOD1(MediaSourceOpened, void(blink::WebMediaSource*));
- MOCK_METHOD1(RequestSeek, void(double));
MOCK_METHOD2(RemotePlaybackCompatibilityChanged,
void(const blink::WebURL&, bool));
MOCK_METHOD1(OnBecamePersistentVideo, void(bool));
@@ -154,11 +156,19 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD0(PictureInPictureStopped, void());
MOCK_METHOD0(OnPictureInPictureStateChange, void());
MOCK_CONST_METHOD0(CouldPlayIfEnoughData, bool());
- MOCK_METHOD0(RequestPlay, void());
- MOCK_METHOD0(RequestPause, void());
- MOCK_METHOD1(RequestMuted, void(bool));
- MOCK_METHOD0(RequestEnterPictureInPicture, void());
- MOCK_METHOD0(RequestExitPictureInPicture, void());
+ MOCK_METHOD0(ResumePlayback, void());
+ MOCK_METHOD0(PausePlayback, void());
+ MOCK_METHOD0(DidPlayerStartPlaying, void());
+ MOCK_METHOD1(DidPlayerPaused, void(bool));
+ MOCK_METHOD1(DidPlayerMutedStatusChange, void(bool));
+ MOCK_METHOD3(DidMediaMetadataChange,
+ void(bool, bool, media::MediaContentType));
+ MOCK_METHOD3(DidPlayerMediaPositionStateChange,
+ void(double, base::TimeDelta, base::TimeDelta position));
+ MOCK_METHOD0(DidDisableAudioOutputSinkChanges, void());
+ MOCK_METHOD1(DidPlayerSizeChange, void(const gfx::Size&));
+ MOCK_METHOD0(DidBufferUnderflow, void());
+ MOCK_METHOD0(DidSeek, void());
MOCK_METHOD0(GetFeatures, Features(void));
MOCK_METHOD0(OnRequestVideoFrameCallback, void());
MOCK_METHOD0(GetTextTrackMetadata, std::vector<blink::TextTrackMetadata>());
@@ -221,10 +231,6 @@ class MockWebMediaPlayerDelegate : public blink::WebMediaPlayerDelegate {
return is_idle_;
}
- void DidPlayerMutedStatusChange(int delegate_id, bool muted) override {
- DCHECK_EQ(player_id_, delegate_id);
- }
-
void ClearStaleFlag(int player_id) override {
DCHECK_EQ(player_id_, player_id);
is_stale_ = false;
@@ -235,22 +241,6 @@ class MockWebMediaPlayerDelegate : public blink::WebMediaPlayerDelegate {
return is_stale_;
}
- void SetIsEffectivelyFullscreen(
- int player_id,
- blink::WebFullscreenVideoStatus fullscreen_video_status) override {
- DCHECK_EQ(player_id_, player_id);
- }
-
- void DidPlayerSizeChange(int player_id, const gfx::Size& size) override {
- DCHECK_EQ(player_id_, player_id);
- }
-
- void DidBufferUnderflow(int player_id) override {
- DCHECK_EQ(player_id_, player_id);
- }
-
- void DidSeek(int player_id) override { DCHECK_EQ(player_id_, player_id); }
-
bool IsFrameHidden() override { return is_hidden_; }
bool IsFrameClosed() override { return is_closed_; }
@@ -278,15 +268,6 @@ class MockWebMediaPlayerDelegate : public blink::WebMediaPlayerDelegate {
int player_id() { return player_id_; }
- MOCK_METHOD2(DidPlayerMediaPositionStateChange,
- void(int, const media_session::MediaPosition&));
-
- MOCK_METHOD2(DidPictureInPictureAvailabilityChange, void(int, bool));
-
- MOCK_METHOD2(DidAudioOutputSinkChange, void(int, const std::string&));
-
- MOCK_METHOD1(DidDisableAudioOutputSinkChanges, void(int));
-
private:
Observer* observer_ = nullptr;
int player_id_ = 1234;
@@ -346,12 +327,12 @@ class WebMediaPlayerImplTest
/*opener=*/nullptr,
mojo::NullAssociatedReceiver(),
*agent_group_scheduler)),
- web_local_frame_(blink::WebLocalFrame::CreateMainFrame(
- web_view_,
- &web_frame_client_,
- nullptr,
- base::UnguessableToken::Create(),
- nullptr)),
+ web_local_frame_(
+ blink::WebLocalFrame::CreateMainFrame(web_view_,
+ &web_frame_client_,
+ nullptr,
+ blink::LocalFrameToken(),
+ nullptr)),
context_provider_(viz::TestContextProvider::Create()),
audio_parameters_(TestAudioParameters::Normal()),
memory_dump_manager_(
@@ -440,7 +421,8 @@ class WebMediaPlayerImplTest
// Initialize provider since none of the tests below actually go through the
// full loading/pipeline initialize phase. If this ever changes the provider
// will start DCHECK failing.
- provider->Initialize(false, mojom::MediaURLScheme::kHttp);
+ provider->Initialize(false, mojom::MediaURLScheme::kHttp,
+ mojom::MediaStreamType::kNone);
audio_sink_ = base::WrapRefCounted(new NiceMock<MockAudioRendererSink>());
@@ -653,7 +635,7 @@ class WebMediaPlayerImplTest
bool IsVideoTrackDisabled() const { return wmpi_->video_track_disabled_; }
bool IsDisableVideoTrackPending() const {
- return !wmpi_->update_background_status_cb_.IsCancelled();
+ return !wmpi_->is_background_status_change_cancelled_;
}
gfx::Size GetNaturalSize() const {
@@ -723,7 +705,8 @@ class WebMediaPlayerImplTest
wmpi_->Load(blink::WebMediaPlayer::kLoadTypeURL,
blink::WebMediaPlayerSource(blink::WebURL(kTestURL)),
- blink::WebMediaPlayer::kCorsModeUnspecified);
+ blink::WebMediaPlayer::kCorsModeUnspecified,
+ /*is_cache_disabled=*/false);
base::RunLoop().RunUntilIdle();
@@ -987,7 +970,8 @@ TEST_F(WebMediaPlayerImplTest, LoadAndDestroyDataUrl) {
wmpi_->Load(blink::WebMediaPlayer::kLoadTypeURL,
blink::WebMediaPlayerSource(blink::WebURL(kMp3DataUrl)),
- blink::WebMediaPlayer::kCorsModeUnspecified);
+ blink::WebMediaPlayer::kCorsModeUnspecified,
+ /*is_cache_disabled=*/false);
base::RunLoop().RunUntilIdle();
@@ -1567,11 +1551,8 @@ TEST_F(WebMediaPlayerImplTest, MediaPositionState_Playing) {
wmpi_->SetRate(1.0);
Play();
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(1.0, kAudioOnlyTestFileDuration,
- base::TimeDelta())));
+ EXPECT_CALL(client_, DidPlayerMediaPositionStateChange(
+ 1.0, kAudioOnlyTestFileDuration, base::TimeDelta()));
wmpi_->OnTimeUpdate();
}
@@ -1582,11 +1563,8 @@ TEST_F(WebMediaPlayerImplTest, MediaPositionState_Paused) {
wmpi_->SetRate(1.0);
// The effective playback rate is 0.0 while paused.
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta())));
+ EXPECT_CALL(client_, DidPlayerMediaPositionStateChange(
+ 0.0, kAudioOnlyTestFileDuration, base::TimeDelta()));
wmpi_->OnTimeUpdate();
}
@@ -1598,21 +1576,17 @@ TEST_F(WebMediaPlayerImplTest, MediaPositionState_PositionChange) {
Play();
testing::Sequence sequence;
- EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(
- 0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta::FromSecondsD(0.1))))
+ EXPECT_CALL(client_, DidPlayerMediaPositionStateChange(
+ 0.0, kAudioOnlyTestFileDuration,
+ base::TimeDelta::FromSecondsD(0.1)))
.InSequence(sequence);
wmpi_->Seek(0.1);
wmpi_->OnTimeUpdate();
// If we load enough data to resume playback the position should be updated.
- EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(
- 0.5, kAudioOnlyTestFileDuration,
- base::TimeDelta::FromSecondsD(0.1))))
+ EXPECT_CALL(client_, DidPlayerMediaPositionStateChange(
+ 0.5, kAudioOnlyTestFileDuration,
+ base::TimeDelta::FromSecondsD(0.1)))
.InSequence(sequence);
SetReadyState(blink::WebMediaPlayer::kReadyStateHaveFutureData);
wmpi_->OnTimeUpdate();
@@ -1629,11 +1603,8 @@ TEST_F(WebMediaPlayerImplTest, MediaPositionState_Underflow) {
Play();
// Underflow will set the effective playback rate to 0.0.
- EXPECT_CALL(delegate_,
- DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(0.0, kAudioOnlyTestFileDuration,
- base::TimeDelta())));
+ EXPECT_CALL(client_, DidPlayerMediaPositionStateChange(
+ 0.0, kAudioOnlyTestFileDuration, base::TimeDelta()));
SetReadyState(blink::WebMediaPlayer::kReadyStateHaveCurrentData);
wmpi_->OnTimeUpdate();
}
@@ -1645,16 +1616,14 @@ TEST_F(WebMediaPlayerImplTest, MediaPositionState_InfiniteCurrentTime) {
SetDuration(kInfiniteDuration);
wmpi_->OnTimeUpdate();
- EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(
- delegate_.player_id(),
- media_session::MediaPosition(
- 0.0, kInfiniteDuration, kInfiniteDuration)));
+ EXPECT_CALL(client_, DidPlayerMediaPositionStateChange(0.0, kInfiniteDuration,
+ kInfiniteDuration));
wmpi_->Seek(kInfiniteDuration.InSecondsF());
wmpi_->OnTimeUpdate();
- testing::Mock::VerifyAndClearExpectations(&delegate_);
+ testing::Mock::VerifyAndClearExpectations(&client_);
- EXPECT_CALL(delegate_, DidPlayerMediaPositionStateChange(_, _)).Times(0);
+ EXPECT_CALL(client_, DidPlayerMediaPositionStateChange(_, _, _)).Times(0);
wmpi_->OnTimeUpdate();
}
@@ -1739,6 +1708,7 @@ ACTION(ReportHaveEnough) {
BUFFERING_CHANGE_REASON_UNKNOWN);
}
+#if defined(OS_WIN)
TEST_F(WebMediaPlayerImplTest, FallbackToMediaFoundationRenderer) {
InitializeWebMediaPlayerImpl();
// To avoid PreloadMetadataLazyLoad.
@@ -1778,6 +1748,7 @@ TEST_F(WebMediaPlayerImplTest, FallbackToMediaFoundationRenderer) {
LoadAndWaitForReadyState(kEncryptedVideoOnlyTestFile,
blink::WebMediaPlayer::kReadyStateHaveCurrentData);
}
+#endif // defined(OS_WIN)
TEST_F(WebMediaPlayerImplTest, VideoConfigChange) {
InitializeWebMediaPlayerImpl();
@@ -2174,7 +2145,8 @@ TEST_F(WebMediaPlayerImplTest, DISABLED_DemuxerOverride) {
EXPECT_FALSE(IsSuspended());
wmpi_->Load(blink::WebMediaPlayer::kLoadTypeURL,
blink::WebMediaPlayerSource(blink::WebURL(GURL("data://test"))),
- blink::WebMediaPlayer::kCorsModeUnspecified);
+ blink::WebMediaPlayer::kCorsModeUnspecified,
+ /*is_cache_disabled=*/false);
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(IsSuspended());
}
@@ -2330,7 +2302,8 @@ TEST_P(WebMediaPlayerImplBackgroundBehaviorTest, VideoOnly) {
EXPECT_EQ(should_pause, ShouldPausePlaybackWhenHidden());
}
-TEST_P(WebMediaPlayerImplBackgroundBehaviorTest, AudioVideo) {
+// TODO(crbug.com/1177112) Re-enable test
+TEST_P(WebMediaPlayerImplBackgroundBehaviorTest, DISABLED_AudioVideo) {
SetMetadata(true, true);
// Optimization requirements are the same for all platforms.
diff --git a/chromium/media/blink/webmediasource_impl.cc b/chromium/media/blink/webmediasource_impl.cc
index 67ef90307ad..7079c498350 100644
--- a/chromium/media/blink/webmediasource_impl.cc
+++ b/chromium/media/blink/webmediasource_impl.cc
@@ -5,7 +5,9 @@
#include "media/blink/webmediasource_impl.h"
#include "base/guid.h"
+#include "media/base/audio_decoder_config.h"
#include "media/base/mime_util.h"
+#include "media/base/video_decoder_config.h"
#include "media/blink/websourcebuffer_impl.h"
#include "media/filters/chunk_demuxer.h"
#include "third_party/blink/public/platform/web_string.h"
@@ -31,19 +33,47 @@ WebMediaSourceImpl::WebMediaSourceImpl(ChunkDemuxer* demuxer)
WebMediaSourceImpl::~WebMediaSourceImpl() = default;
-WebMediaSource::AddStatus WebMediaSourceImpl::AddSourceBuffer(
+std::unique_ptr<blink::WebSourceBuffer> WebMediaSourceImpl::AddSourceBuffer(
const blink::WebString& content_type,
const blink::WebString& codecs,
- blink::WebSourceBuffer** source_buffer) {
+ WebMediaSource::AddStatus& out_status /* out */) {
std::string id = base::GenerateGUID();
- WebMediaSource::AddStatus result = static_cast<WebMediaSource::AddStatus>(
+ out_status = static_cast<WebMediaSource::AddStatus>(
demuxer_->AddId(id, content_type.Utf8(), codecs.Utf8()));
- if (result == WebMediaSource::kAddStatusOk)
- *source_buffer = new WebSourceBufferImpl(id, demuxer_);
+ if (out_status == WebMediaSource::kAddStatusOk)
+ return std::make_unique<WebSourceBufferImpl>(id, demuxer_);
- return result;
+ return nullptr;
+}
+
+std::unique_ptr<blink::WebSourceBuffer> WebMediaSourceImpl::AddSourceBuffer(
+ std::unique_ptr<AudioDecoderConfig> audio_config,
+ WebMediaSource::AddStatus& out_status /* out */) {
+ std::string id = base::GenerateGUID();
+
+ out_status = static_cast<WebMediaSource::AddStatus>(
+ demuxer_->AddId(id, std::move(audio_config)));
+
+ if (out_status == WebMediaSource::kAddStatusOk)
+ return std::make_unique<WebSourceBufferImpl>(id, demuxer_);
+
+ return nullptr;
+}
+
+std::unique_ptr<blink::WebSourceBuffer> WebMediaSourceImpl::AddSourceBuffer(
+ std::unique_ptr<VideoDecoderConfig> video_config,
+ WebMediaSource::AddStatus& out_status /* out */) {
+ std::string id = base::GenerateGUID();
+
+ out_status = static_cast<WebMediaSource::AddStatus>(
+ demuxer_->AddId(id, std::move(video_config)));
+
+ if (out_status == WebMediaSource::kAddStatusOk)
+ return std::make_unique<WebSourceBufferImpl>(id, demuxer_);
+
+ return nullptr;
}
double WebMediaSourceImpl::Duration() {
diff --git a/chromium/media/blink/webmediasource_impl.h b/chromium/media/blink/webmediasource_impl.h
index 2e6d1fd125e..e9c4a35339f 100644
--- a/chromium/media/blink/webmediasource_impl.h
+++ b/chromium/media/blink/webmediasource_impl.h
@@ -13,7 +13,9 @@
#include "third_party/blink/public/platform/web_media_source.h"
namespace media {
+class AudioDecoderConfig;
class ChunkDemuxer;
+class VideoDecoderConfig;
class MEDIA_BLINK_EXPORT WebMediaSourceImpl : public blink::WebMediaSource {
public:
@@ -21,9 +23,16 @@ class MEDIA_BLINK_EXPORT WebMediaSourceImpl : public blink::WebMediaSource {
~WebMediaSourceImpl() override;
// blink::WebMediaSource implementation.
- AddStatus AddSourceBuffer(const blink::WebString& content_type,
- const blink::WebString& codecs,
- blink::WebSourceBuffer** source_buffer) override;
+ std::unique_ptr<blink::WebSourceBuffer> AddSourceBuffer(
+ const blink::WebString& content_type,
+ const blink::WebString& codecs,
+ AddStatus& out_status /* out */) override;
+ std::unique_ptr<blink::WebSourceBuffer> AddSourceBuffer(
+ std::unique_ptr<AudioDecoderConfig> audio_config,
+ AddStatus& out_status /* out */) override;
+ std::unique_ptr<blink::WebSourceBuffer> AddSourceBuffer(
+ std::unique_ptr<VideoDecoderConfig> video_config,
+ AddStatus& out_status /* out */) override;
double Duration() override;
void SetDuration(double duration) override;
void MarkEndOfStream(EndOfStreamStatus status) override;
diff --git a/chromium/media/blink/websourcebuffer_impl.cc b/chromium/media/blink/websourcebuffer_impl.cc
index aa5cf793034..1ea1357f6b5 100644
--- a/chromium/media/blink/websourcebuffer_impl.cc
+++ b/chromium/media/blink/websourcebuffer_impl.cc
@@ -146,6 +146,23 @@ bool WebSourceBufferImpl::Append(const unsigned char* data,
return success;
}
+bool WebSourceBufferImpl::AppendChunks(
+ std::unique_ptr<media::StreamParser::BufferQueue> buffer_queue,
+ double* timestamp_offset) {
+ base::TimeDelta old_offset = timestamp_offset_;
+ bool success =
+ demuxer_->AppendChunks(id_, std::move(buffer_queue), append_window_start_,
+ append_window_end_, &timestamp_offset_);
+
+ // Like in ::Append, timestamp_offset may be updated by coded frame
+ // processing.
+ // TODO(crbug.com/1144908): Consider refactoring this common bit into helper.
+ if (timestamp_offset && old_offset != timestamp_offset_)
+ *timestamp_offset = timestamp_offset_.InSecondsF();
+
+ return success;
+}
+
void WebSourceBufferImpl::ResetParserState() {
demuxer_->ResetParserState(id_,
append_window_start_, append_window_end_,
diff --git a/chromium/media/blink/websourcebuffer_impl.h b/chromium/media/blink/websourcebuffer_impl.h
index 808aa486ceb..0d79492d45b 100644
--- a/chromium/media/blink/websourcebuffer_impl.h
+++ b/chromium/media/blink/websourcebuffer_impl.h
@@ -36,6 +36,9 @@ class WebSourceBufferImpl : public blink::WebSourceBuffer {
bool Append(const unsigned char* data,
unsigned length,
double* timestamp_offset) override;
+ bool AppendChunks(
+ std::unique_ptr<media::StreamParser::BufferQueue> buffer_queue,
+ double* timestamp_offset) override;
void ResetParserState() override;
void Remove(double start, double end) override;
bool CanChangeType(const blink::WebString& content_type,
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc
index 1b5477f2f9f..c6901cd122d 100644
--- a/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_impl.cc
@@ -16,7 +16,6 @@
#include "base/strings/stringprintf.h"
#include "base/task/post_task.h"
#include "media/base/bind_to_current_loop.h"
-#include "media/capabilities/video_decode_stats_db_impl.h"
#include "media/capabilities/video_decode_stats_db_provider.h"
namespace media {
@@ -29,9 +28,6 @@ InMemoryVideoDecodeStatsDBImpl::InMemoryVideoDecodeStatsDBImpl(
InMemoryVideoDecodeStatsDBImpl::~InMemoryVideoDecodeStatsDBImpl() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
-
- if (seed_db_)
- seed_db_->set_dependent_db(nullptr);
}
void InMemoryVideoDecodeStatsDBImpl::Initialize(InitializeCB init_cb) {
@@ -42,7 +38,7 @@ void InMemoryVideoDecodeStatsDBImpl::Initialize(InitializeCB init_cb) {
// Tracking down crbug.com/1114128. Suspect we're double initializing somehow,
// so this should show who the crash at the time of the second initialize
// call.
- CHECK(!seed_db_) << __func__ << " Already have a seed_db_?";
+ DCHECK(!seed_db_) << __func__ << " Already have a seed_db_?";
// Fetch an *initialized* seed DB.
if (seed_db_provider_) {
@@ -65,13 +61,9 @@ void InMemoryVideoDecodeStatsDBImpl::OnGotSeedDB(InitializeCB init_cb,
DVLOG(2) << __func__ << (db ? " has" : " null") << " seed db";
db_init_ = true;
-
- CHECK(!seed_db_) << __func__ << " Already have a seed_db_?";
+ DCHECK(!seed_db_) << __func__ << " Already have a seed_db_?";
seed_db_ = db;
- if (seed_db_)
- seed_db_->set_dependent_db(this);
-
// Hard coding success = true. There are rare cases (e.g. disk corruption)
// where an incognito profile may fail to acquire a reference to the base
// profile's DB. But this just means incognito is in the same boat as guest
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
index 6c39f4fa286..7899c78a8c8 100644
--- a/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
@@ -11,7 +11,6 @@
#include "base/test/gtest_util.h"
#include "base/test/task_environment.h"
#include "media/capabilities/in_memory_video_decode_stats_db_impl.h"
-#include "media/capabilities/video_decode_stats_db_impl.h"
#include "media/capabilities/video_decode_stats_db_provider.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -391,14 +390,4 @@ TEST_F(SeededInMemoryDBTest, SeedReadFailureOnAppendingingStats) {
task_environment_.RunUntilIdle();
}
-TEST_F(SeededInMemoryDBTest, SeedDBTearDownRace) {
- ::testing::FLAGS_gtest_death_test_style = "threadsafe";
-
- // Establish depends-on connection from InMemoryDB to SeedDB.
- InitializeEmptyDB();
-
- // Clearing the seed-db dependency should trigger a crash.
- EXPECT_CHECK_DEATH(seed_db_.reset());
-}
-
} // namespace media
diff --git a/chromium/media/capabilities/video_decode_stats_db.cc b/chromium/media/capabilities/video_decode_stats_db.cc
index 74c54417118..954b8008e59 100644
--- a/chromium/media/capabilities/video_decode_stats_db.cc
+++ b/chromium/media/capabilities/video_decode_stats_db.cc
@@ -123,9 +123,4 @@ bool operator!=(const VideoDecodeStatsDB::DecodeStatsEntry& x,
return !(x == y);
}
-VideoDecodeStatsDB::~VideoDecodeStatsDB() {
- // Tracking down crash. See https://crbug/865321.
- CHECK(!dependent_db_) << __func__ << " Destroying before dependent_db_!";
-}
-
} // namespace media
diff --git a/chromium/media/capabilities/video_decode_stats_db.h b/chromium/media/capabilities/video_decode_stats_db.h
index fc42e7842af..128214a9353 100644
--- a/chromium/media/capabilities/video_decode_stats_db.h
+++ b/chromium/media/capabilities/video_decode_stats_db.h
@@ -72,7 +72,7 @@ class MEDIA_EXPORT VideoDecodeStatsDB {
uint64_t frames_power_efficient;
};
- virtual ~VideoDecodeStatsDB();
+ virtual ~VideoDecodeStatsDB() = default;
// Run asynchronous initialization of database. Initialization must complete
// before calling other APIs. Initialization must be RE-RUN after calling
@@ -101,22 +101,6 @@ class MEDIA_EXPORT VideoDecodeStatsDB {
// Clear all statistics from the DB.
virtual void ClearStats(base::OnceClosure clear_done_cb) = 0;
-
- // Tracking down root cause of crash probable UAF (https://crbug/865321).
- // We will CHECK if a |dependent_db_| is found to be set during destruction.
- // Dependent DB should always be destroyed and unhooked before |this|.
- void set_dependent_db(VideoDecodeStatsDB* dependent) {
- // One of these should be non-null.
- CHECK(!dependent_db_ || !dependent);
- // They shouldn't already match.
- CHECK(dependent_db_ != dependent);
-
- dependent_db_ = dependent;
- }
-
- private:
- // See set_dependent_db().
- VideoDecodeStatsDB* dependent_db_ = nullptr;
};
MEDIA_EXPORT bool operator==(const VideoDecodeStatsDB::VideoDescKey& x,
diff --git a/chromium/media/capture/BUILD.gn b/chromium/media/capture/BUILD.gn
index 03b6d3ec832..ab303fa3a11 100644
--- a/chromium/media/capture/BUILD.gn
+++ b/chromium/media/capture/BUILD.gn
@@ -124,9 +124,12 @@ component("capture_lib") {
"video/video_capture_buffer_tracker_factory_impl.h",
"video/video_capture_device_client.cc",
"video/video_capture_device_client.h",
+ "video/video_capture_metrics.cc",
+ "video/video_capture_metrics.h",
"video/video_capture_system.h",
"video/video_capture_system_impl.cc",
"video/video_capture_system_impl.h",
+ "video/video_frame_receiver.cc",
"video/video_frame_receiver.h",
"video/video_frame_receiver_on_task_runner.cc",
"video/video_frame_receiver_on_task_runner.h",
@@ -188,8 +191,11 @@ component("capture_lib") {
"video/mac/video_capture_device_factory_mac.mm",
"video/mac/video_capture_device_mac.h",
"video/mac/video_capture_device_mac.mm",
+ "video/mac/video_capture_metrics_mac.h",
+ "video/mac/video_capture_metrics_mac.mm",
]
deps += [
+ "//components/crash/core/common:crash_key",
"//services/video_capture/public/uma",
"//third_party/decklink",
]
@@ -211,6 +217,8 @@ component("capture_lib") {
"video/win/capability_list_win.h",
"video/win/filter_base_win.cc",
"video/win/filter_base_win.h",
+ "video/win/gpu_memory_buffer_tracker.cc",
+ "video/win/gpu_memory_buffer_tracker.h",
"video/win/metrics.cc",
"video/win/metrics.h",
"video/win/pin_base_win.cc",
@@ -220,6 +228,8 @@ component("capture_lib") {
"video/win/sink_filter_win.h",
"video/win/sink_input_pin_win.cc",
"video/win/sink_input_pin_win.h",
+ "video/win/video_capture_buffer_tracker_factory_win.cc",
+ "video/win/video_capture_buffer_tracker_factory_win.h",
"video/win/video_capture_device_factory_win.cc",
"video/win/video_capture_device_factory_win.h",
"video/win/video_capture_device_mf_win.cc",
@@ -228,19 +238,15 @@ component("capture_lib") {
"video/win/video_capture_device_utils_win.h",
"video/win/video_capture_device_win.cc",
"video/win/video_capture_device_win.h",
- "video/win/video_capture_dxgi_device_manager.cc",
- "video/win/video_capture_dxgi_device_manager.h",
]
deps += [ "//media/base/win:media_foundation_util" ]
libs = [
- "d3d11.lib",
"mf.lib",
"mfplat.lib",
"mfreadwrite.lib",
"mfuuid.lib",
]
ldflags = [
- "/DELAYLOAD:d3d11.dll",
"/DELAYLOAD:mf.dll",
"/DELAYLOAD:mfplat.dll",
"/DELAYLOAD:mfreadwrite.dll",
@@ -269,7 +275,7 @@ component("capture_lib") {
]
}
- if (is_ash) {
+ if (is_chromeos_ash) {
sources += [
"video/chromeos/camera_3a_controller.cc",
"video/chromeos/camera_3a_controller.h",
@@ -287,8 +293,6 @@ component("capture_lib") {
"video/chromeos/camera_device_delegate.h",
"video/chromeos/camera_hal_delegate.cc",
"video/chromeos/camera_hal_delegate.h",
- "video/chromeos/camera_hal_dispatcher_impl.cc",
- "video/chromeos/camera_hal_dispatcher_impl.h",
"video/chromeos/camera_metadata_utils.cc",
"video/chromeos/camera_metadata_utils.h",
"video/chromeos/capture_metadata_dispatcher.h",
@@ -306,8 +310,12 @@ component("capture_lib") {
"video/chromeos/scoped_video_capture_jpeg_decoder.h",
"video/chromeos/stream_buffer_manager.cc",
"video/chromeos/stream_buffer_manager.h",
+ "video/chromeos/token_manager.cc",
+ "video/chromeos/token_manager.h",
"video/chromeos/vendor_tag_ops_delegate.cc",
"video/chromeos/vendor_tag_ops_delegate.h",
+ "video/chromeos/video_capture_device_chromeos_delegate.cc",
+ "video/chromeos/video_capture_device_chromeos_delegate.h",
"video/chromeos/video_capture_device_chromeos_halv3.cc",
"video/chromeos/video_capture_device_chromeos_halv3.h",
"video/chromeos/video_capture_device_factory_chromeos.cc",
@@ -324,16 +332,28 @@ component("capture_lib") {
]
public_deps += [ "//media/capture/video/chromeos/public" ]
deps += [
+ "//ash/constants",
"//build/config/linux/libdrm",
- "//chromeos/dbus/power",
"//components/chromeos_camera:mojo_mjpeg_decode_accelerator",
"//components/chromeos_camera/common",
+ "//components/device_event_log",
"//gpu/ipc/common:common",
"//media/capture/video/chromeos/mojom:cros_camera",
"//third_party/libsync",
]
}
+ if (is_chromeos_ash) {
+ sources += [
+ "video/chromeos/ash/camera_hal_dispatcher_impl.cc",
+ "video/chromeos/ash/camera_hal_dispatcher_impl.h",
+ "video/chromeos/ash/power_manager_client_proxy.cc",
+ "video/chromeos/ash/power_manager_client_proxy.h",
+ ]
+
+ deps += [ "//chromeos/dbus/power" ]
+ }
+
if (is_fuchsia) {
sources += [
"video/fuchsia/video_capture_device_factory_fuchsia.cc",
@@ -388,7 +408,7 @@ source_set("test_support") {
]
}
- if (is_ash) {
+ if (is_chromeos_ash) {
sources += [
"video/chromeos/mock_camera_module.cc",
"video/chromeos/mock_camera_module.h",
@@ -419,6 +439,7 @@ test("capture_unittests") {
"video/file_video_capture_device_unittest.cc",
"video/video_capture_device_client_unittest.cc",
"video/video_capture_device_unittest.cc",
+ "video/video_capture_metrics_unittest.cc",
"video_capture_types_unittest.cc",
]
@@ -447,7 +468,7 @@ test("capture_unittests") {
]
}
- if (is_ash) {
+ if (is_chromeos_ash) {
sources += [ "video/linux/camera_config_chromeos_unittest.cc" ]
}
@@ -485,6 +506,7 @@ test("capture_unittests") {
"video/mac/video_capture_device_avfoundation_mac_unittest.mm",
"video/mac/video_capture_device_factory_mac_unittest.mm",
"video/mac/video_capture_device_mac_unittest.mm",
+ "video/mac/video_capture_metrics_mac_unittest.mm",
]
frameworks = [
"AVFoundation.framework",
@@ -492,10 +514,14 @@ test("capture_unittests") {
"CoreVideo.framework",
"IOSurface.framework",
]
+ deps += [ "//third_party/ocmock" ]
}
if (is_win) {
sources += [
+ "video/win/d3d_capture_test_utils.cc",
+ "video/win/d3d_capture_test_utils.h",
+ "video/win/gpu_memory_buffer_tracker_unittest.cc",
"video/win/video_capture_device_factory_win_unittest.cc",
"video/win/video_capture_device_mf_win_unittest.cc",
]
@@ -518,17 +544,18 @@ test("capture_unittests") {
}
# TODO(https://crbug.com/1043007): use is_linux.
- if (is_ash) {
+ if (is_chromeos_ash) {
deps += [ "//media/gpu/test:local_gpu_memory_buffer_manager" ]
}
- if (is_ash) {
+ if (is_chromeos_ash) {
sources += [
+ "video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc",
"video/chromeos/camera_3a_controller_unittest.cc",
"video/chromeos/camera_device_delegate_unittest.cc",
"video/chromeos/camera_hal_delegate_unittest.cc",
- "video/chromeos/camera_hal_dispatcher_impl_unittest.cc",
"video/chromeos/request_manager_unittest.cc",
+ "video/chromeos/token_manager_unittest.cc",
]
deps += [
"//build/config/linux/libdrm",
diff --git a/chromium/media/capture/content/OWNERS b/chromium/media/capture/content/OWNERS
index 90320c9bd1b..866a0313dc6 100644
--- a/chromium/media/capture/content/OWNERS
+++ b/chromium/media/capture/content/OWNERS
@@ -1,2 +1,2 @@
-miu@chromium.org
+jophba@chromium.org
mfoltz@chromium.org
diff --git a/chromium/media/capture/content/android/BUILD.gn b/chromium/media/capture/content/android/BUILD.gn
index 5343fa2d0c8..879405e2073 100644
--- a/chromium/media/capture/content/android/BUILD.gn
+++ b/chromium/media/capture/content/android/BUILD.gn
@@ -37,7 +37,7 @@ android_library("screen_capture_java") {
deps = [
"//base:base_java",
"//base:jni_java",
- "//third_party/android_deps:androidx_annotation_annotation_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
]
sources = [ "java/src/org/chromium/media/ScreenCapture.java" ]
annotation_processor_deps = [ "//base/android/jni_generator:jni_processor" ]
diff --git a/chromium/media/capture/content/android/thread_safe_capture_oracle.cc b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
index 57144977f4b..af63b851bc3 100644
--- a/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
+++ b/chromium/media/capture/content/android/thread_safe_capture_oracle.cc
@@ -221,18 +221,18 @@ void ThreadSafeCaptureOracle::DidCaptureFrame(
if (!should_deliver_frame || !client_)
return;
- frame->metadata()->frame_rate = params_.requested_format.frame_rate;
- frame->metadata()->capture_begin_time = capture->begin_time;
- frame->metadata()->capture_end_time = base::TimeTicks::Now();
- frame->metadata()->frame_duration = capture->frame_duration;
- frame->metadata()->reference_time = reference_time;
+ frame->metadata().frame_rate = params_.requested_format.frame_rate;
+ frame->metadata().capture_begin_time = capture->begin_time;
+ frame->metadata().capture_end_time = base::TimeTicks::Now();
+ frame->metadata().frame_duration = capture->frame_duration;
+ frame->metadata().reference_time = reference_time;
media::VideoCaptureFormat format(frame->coded_size(),
params_.requested_format.frame_rate,
frame->format());
client_->OnIncomingCapturedBufferExt(
std::move(capture->buffer), format, frame->ColorSpace(), reference_time,
- frame->timestamp(), frame->visible_rect(), *frame->metadata());
+ frame->timestamp(), frame->visible_rect(), frame->metadata());
}
void ThreadSafeCaptureOracle::OnConsumerReportingUtilization(
diff --git a/chromium/media/capture/mojom/video_capture.mojom b/chromium/media/capture/mojom/video_capture.mojom
index 22602278948..1b999399f60 100644
--- a/chromium/media/capture/mojom/video_capture.mojom
+++ b/chromium/media/capture/mojom/video_capture.mojom
@@ -69,9 +69,8 @@ interface VideoCaptureObserver {
// video frames via calls to OnBufferReady().
OnNewBuffer(int32 buffer_id, media.mojom.VideoBufferHandle buffer_handle);
- // |buffer_id| has video capture data with |info| containing the associated
- // VideoFrame constituent parts.
- OnBufferReady(int32 buffer_id, VideoFrameInfo info);
+ // |buffer| and |scaled_buffers| have capture data ready for consumption.
+ OnBufferReady(ReadyBuffer buffer, array<ReadyBuffer> scaled_buffers);
// The buffer handle previously registered for |buffer_id| via OnNewBuffer(),
// is no longer going to be used by the Browser/Host.
diff --git a/chromium/media/capture/mojom/video_capture_types.mojom b/chromium/media/capture/mojom/video_capture_types.mojom
index 8d4f4c68d38..dc58b87da3c 100644
--- a/chromium/media/capture/mojom/video_capture_types.mojom
+++ b/chromium/media/capture/mojom/video_capture_types.mojom
@@ -43,6 +43,7 @@ enum VideoCapturePixelFormat {
XR30,
XB30,
BGRA,
+ RGBAF16,
};
enum ResolutionChangePolicy {
@@ -230,6 +231,8 @@ enum VideoCaptureError {
kFuchsiaUnsupportedPixelFormat,
kFuchsiaFailedToMapSysmemBuffer,
kCrosHalV3DeviceContextDuplicatedClient,
+ kDesktopCaptureDeviceMacFailedStreamCreate,
+ kDesktopCaptureDeviceMacFailedStreamStart,
};
enum VideoCaptureFrameDropReason {
@@ -322,6 +325,13 @@ struct VideoFrameInfo{
PlaneStrides? strides;
};
+// Represents a buffer that is ready for consumption. |buffer_id| has video
+// capture data and |info| contains the associated VideoFrame constituent parts.
+struct ReadyBuffer {
+ int32 buffer_id;
+ VideoFrameInfo info;
+};
+
// Represents information about a capture device.
// |device_id| represents a unique id of a physical device. Since the same
// physical device may be accessible through different APIs |capture_api|
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
index d94dd8abe7d..50fd6931568 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
@@ -147,6 +147,8 @@ EnumTraits<media::mojom::VideoCapturePixelFormat,
return media::mojom::VideoCapturePixelFormat::XR30;
case media::VideoPixelFormat::PIXEL_FORMAT_XB30:
return media::mojom::VideoCapturePixelFormat::XB30;
+ case media::VideoPixelFormat::PIXEL_FORMAT_RGBAF16:
+ return media::mojom::VideoCapturePixelFormat::RGBAF16;
}
NOTREACHED();
return media::mojom::VideoCapturePixelFormat::I420;
@@ -248,6 +250,9 @@ bool EnumTraits<media::mojom::VideoCapturePixelFormat,
case media::mojom::VideoCapturePixelFormat::BGRA:
*output = media::PIXEL_FORMAT_BGRA;
return true;
+ case media::mojom::VideoCapturePixelFormat::RGBAF16:
+ *output = media::PIXEL_FORMAT_RGBAF16;
+ return true;
}
NOTREACHED();
return false;
@@ -704,6 +709,12 @@ EnumTraits<media::mojom::VideoCaptureError, media::VideoCaptureError>::ToMojom(
case media::VideoCaptureError::kCrosHalV3DeviceContextDuplicatedClient:
return media::mojom::VideoCaptureError::
kCrosHalV3DeviceContextDuplicatedClient;
+ case media::VideoCaptureError::kDesktopCaptureDeviceMacFailedStreamCreate:
+ return media::mojom::VideoCaptureError::
+ kDesktopCaptureDeviceMacFailedStreamCreate;
+ case media::VideoCaptureError::kDesktopCaptureDeviceMacFailedStreamStart:
+ return media::mojom::VideoCaptureError::
+ kDesktopCaptureDeviceMacFailedStreamStart;
}
NOTREACHED();
return media::mojom::VideoCaptureError::kNone;
@@ -1251,6 +1262,16 @@ bool EnumTraits<media::mojom::VideoCaptureError, media::VideoCaptureError>::
*output =
media::VideoCaptureError::kCrosHalV3DeviceContextDuplicatedClient;
return true;
+ case media::mojom::VideoCaptureError::
+ kDesktopCaptureDeviceMacFailedStreamCreate:
+ *output =
+ media::VideoCaptureError::kDesktopCaptureDeviceMacFailedStreamCreate;
+ return true;
+ case media::mojom::VideoCaptureError::
+ kDesktopCaptureDeviceMacFailedStreamStart:
+ *output =
+ media::VideoCaptureError::kDesktopCaptureDeviceMacFailedStreamStart;
+ return true;
}
NOTREACHED();
return false;
diff --git a/chromium/media/capture/video/DEPS b/chromium/media/capture/video/DEPS
index 1ddde61f105..aa779c908c9 100644
--- a/chromium/media/capture/video/DEPS
+++ b/chromium/media/capture/video/DEPS
@@ -1,5 +1,6 @@
include_rules = [
"+chromeos/dbus",
+ "+components/device_event_log",
"+mojo/public/cpp",
"+third_party/libyuv",
]
diff --git a/chromium/media/capture/video/android/BUILD.gn b/chromium/media/capture/video/android/BUILD.gn
index d2c29492525..038445788ef 100644
--- a/chromium/media/capture/video/android/BUILD.gn
+++ b/chromium/media/capture/video/android/BUILD.gn
@@ -52,7 +52,7 @@ android_library("capture_java") {
deps = [
"//base:base_java",
"//base:jni_java",
- "//third_party/android_deps:androidx_annotation_annotation_java",
+ "//third_party/androidx:androidx_annotation_annotation_java",
]
annotation_processor_deps = [ "//base/android/jni_generator:jni_processor" ]
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.cc b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
index ff95a22a29d..0708e1ecdf4 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
@@ -82,8 +82,10 @@ void VideoCaptureDeviceFactoryAndroid::GetDevicesInfo(
const std::string device_id =
base::android::ConvertJavaStringToUTF8(device_id_jstring);
- const int capture_api_type =
- Java_VideoCaptureFactory_getCaptureApiType(env, camera_index);
+ const VideoCaptureApi capture_api_type = static_cast<VideoCaptureApi>(
+ Java_VideoCaptureFactory_getCaptureApiType(env, camera_index));
+ if (capture_api_type == VideoCaptureApi::UNKNOWN)
+ continue;
VideoCaptureControlSupport control_support;
const int facing_mode =
Java_VideoCaptureFactory_getFacingMode(env, camera_index);
@@ -101,9 +103,8 @@ void VideoCaptureDeviceFactoryAndroid::GetDevicesInfo(
// currently only used for USB model identifiers, so this implementation
// just indicates an unknown device model (by not providing one).
VideoCaptureDeviceInfo device_info(VideoCaptureDeviceDescriptor(
- display_name, device_id, "" /*model_id*/,
- static_cast<VideoCaptureApi>(capture_api_type), control_support,
- VideoCaptureTransportType::OTHER_TRANSPORT,
+ display_name, device_id, "" /*model_id*/, capture_api_type,
+ control_support, VideoCaptureTransportType::OTHER_TRANSPORT,
static_cast<VideoFacingMode>(facing_mode)));
auto it = supported_formats_cache_.find(device_id);
diff --git a/chromium/media/capture/video/chromeos/DEPS b/chromium/media/capture/video/chromeos/DEPS
index 156f915bf69..1d9cd2a7d89 100644
--- a/chromium/media/capture/video/chromeos/DEPS
+++ b/chromium/media/capture/video/chromeos/DEPS
@@ -1,5 +1,5 @@
include_rules = [
- "+chromeos/dbus",
+ "+ash/constants/ash_features.h",
"+components/chromeos_camera",
"+third_party/libsync",
]
diff --git a/chromium/media/capture/video/chromeos/ash/DEPS b/chromium/media/capture/video/chromeos/ash/DEPS
new file mode 100644
index 00000000000..09b7125849a
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+chromeos/dbus",
+]
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.cc
index 39fa08efe27..fe9613b55d1 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.cc
@@ -2,8 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
-
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
#include <fcntl.h>
#include <grp.h>
#include <poll.h>
@@ -16,6 +15,7 @@
#include "base/files/file.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
+#include "base/notreached.h"
#include "base/posix/eintr_wrapper.h"
#include "base/rand_util.h"
#include "base/single_thread_task_runner.h"
@@ -23,6 +23,8 @@
#include "base/strings/string_number_conversions.h"
#include "base/synchronization/waitable_event.h"
#include "base/trace_event/trace_event.h"
+#include "components/device_event_log/device_event_log.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/platform/named_platform_channel.h"
@@ -50,7 +52,8 @@ std::string GenerateRandomToken() {
// to here, and the write side will be closed in such a case.
bool WaitForSocketReadable(int raw_socket_fd, int raw_cancel_fd) {
struct pollfd fds[2] = {
- {raw_socket_fd, POLLIN, 0}, {raw_cancel_fd, POLLIN, 0},
+ {raw_socket_fd, POLLIN, 0},
+ {raw_cancel_fd, POLLIN, 0},
};
if (HANDLE_EINTR(poll(fds, base::size(fds), -1)) <= 0) {
@@ -67,11 +70,22 @@ bool WaitForSocketReadable(int raw_socket_fd, int raw_cancel_fd) {
return true;
}
+bool HasCrosCameraTest() {
+ static constexpr char kCrosCameraTestPath[] =
+ "/usr/local/bin/cros_camera_test";
+
+ base::FilePath path(kCrosCameraTestPath);
+ return base::PathExists(path);
+}
+
class MojoCameraClientObserver : public CameraClientObserver {
public:
explicit MojoCameraClientObserver(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client)
- : client_(std::move(client)) {}
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token)
+ : CameraClientObserver(type, std::move(auth_token)),
+ client_(std::move(client)) {}
void OnChannelCreated(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
@@ -89,6 +103,33 @@ class MojoCameraClientObserver : public CameraClientObserver {
CameraClientObserver::~CameraClientObserver() = default;
+bool CameraClientObserver::Authenticate(TokenManager* token_manager) {
+ auto authenticated_type =
+ token_manager->AuthenticateClient(type_, auth_token_);
+ if (!authenticated_type) {
+ return false;
+ }
+ type_ = authenticated_type.value();
+ return true;
+}
+
+FailedCameraHalServerCallbacks::FailedCameraHalServerCallbacks()
+ : callbacks_(this) {}
+FailedCameraHalServerCallbacks::~FailedCameraHalServerCallbacks() = default;
+
+mojo::PendingRemote<cros::mojom::CameraHalServerCallbacks>
+FailedCameraHalServerCallbacks::GetRemote() {
+ return callbacks_.BindNewPipeAndPassRemote();
+}
+
+void FailedCameraHalServerCallbacks::CameraDeviceActivityChange(
+ int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) {}
+
+void FailedCameraHalServerCallbacks::CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) {}
+
// static
CameraHalDispatcherImpl* CameraHalDispatcherImpl::GetInstance() {
return base::Singleton<CameraHalDispatcherImpl>::get();
@@ -119,10 +160,25 @@ bool CameraHalDispatcherImpl::Start(
if (!StartThreads()) {
return false;
}
+ // This event is for adding camera category to categories list.
+ TRACE_EVENT0("camera", "CameraHalDispatcherImpl");
+ base::trace_event::TraceLog::GetInstance()->AddEnabledStateObserver(this);
+
jda_factory_ = std::move(jda_factory);
jea_factory_ = std::move(jea_factory);
base::WaitableEvent started(base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED);
+ // It's important we generate tokens before creating the socket, because once
+ // it is available, everyone connecting to socket would start fetching
+ // tokens.
+ if (!token_manager_.GenerateServerToken()) {
+ LOG(ERROR) << "Failed to generate authentication token for server";
+ return false;
+ }
+ if (HasCrosCameraTest() && !token_manager_.GenerateTestClientToken()) {
+ LOG(ERROR) << "Failed to generate token for test client";
+ return false;
+ }
blocking_io_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&CameraHalDispatcherImpl::CreateSocket,
@@ -132,14 +188,16 @@ bool CameraHalDispatcherImpl::Start(
}
void CameraHalDispatcherImpl::AddClientObserver(
- std::unique_ptr<CameraClientObserver> observer) {
+ std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback) {
// If |proxy_thread_| fails to start in Start() then CameraHalDelegate will
// not be created, and this function will not be called.
DCHECK(proxy_thread_.IsRunning());
proxy_thread_.task_runner()->PostTask(
FROM_HERE,
base::BindOnce(&CameraHalDispatcherImpl::AddClientObserverOnProxyThread,
- base::Unretained(this), std::move(observer)));
+ base::Unretained(this), std::move(observer),
+ std::move(result_callback)));
}
bool CameraHalDispatcherImpl::IsStarted() {
@@ -147,13 +205,58 @@ bool CameraHalDispatcherImpl::IsStarted() {
proxy_fd_.is_valid();
}
+void CameraHalDispatcherImpl::AddActiveClientObserver(
+ CameraActiveClientObserver* observer) {
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ for (auto& opened_camera_id_pair : opened_camera_id_map_) {
+ const auto& camera_client_type = opened_camera_id_pair.first;
+ const auto& camera_id_set = opened_camera_id_pair.second;
+ if (!camera_id_set.empty()) {
+ observer->OnActiveClientChange(camera_client_type, /*is_active=*/true);
+ }
+ }
+ active_client_observers_->AddObserver(observer);
+}
+
+void CameraHalDispatcherImpl::RemoveActiveClientObserver(
+ CameraActiveClientObserver* observer) {
+ active_client_observers_->RemoveObserver(observer);
+}
+
+cros::mojom::CameraPrivacySwitchState
+CameraHalDispatcherImpl::AddCameraPrivacySwitchObserver(
+ CameraPrivacySwitchObserver* observer) {
+ privacy_switch_observers_->AddObserver(observer);
+
+ base::AutoLock lock(privacy_switch_state_lock_);
+ return current_privacy_switch_state_;
+}
+
+void CameraHalDispatcherImpl::RemoveCameraPrivacySwitchObserver(
+ CameraPrivacySwitchObserver* observer) {
+ privacy_switch_observers_->RemoveObserver(observer);
+}
+
+void CameraHalDispatcherImpl::RegisterPluginVmToken(
+ const base::UnguessableToken& token) {
+ token_manager_.RegisterPluginVmToken(token);
+}
+
+void CameraHalDispatcherImpl::UnregisterPluginVmToken(
+ const base::UnguessableToken& token) {
+ token_manager_.UnregisterPluginVmToken(token);
+}
+
CameraHalDispatcherImpl::CameraHalDispatcherImpl()
: proxy_thread_("CameraProxyThread"),
- blocking_io_thread_("CameraBlockingIOThread") {
- // This event is for adding camera category to categories list.
- TRACE_EVENT0("camera", "CameraHalDispatcherImpl");
- base::trace_event::TraceLog::GetInstance()->AddEnabledStateObserver(this);
-}
+ blocking_io_thread_("CameraBlockingIOThread"),
+ camera_hal_server_callbacks_(this),
+ active_client_observers_(
+ new base::ObserverListThreadSafe<CameraActiveClientObserver>()),
+ current_privacy_switch_state_(
+ cros::mojom::CameraPrivacySwitchState::UNKNOWN),
+ privacy_switch_observers_(
+ new base::ObserverListThreadSafe<CameraPrivacySwitchObserver>()) {}
CameraHalDispatcherImpl::~CameraHalDispatcherImpl() {
VLOG(1) << "Stopping CameraHalDispatcherImpl...";
@@ -164,23 +267,41 @@ CameraHalDispatcherImpl::~CameraHalDispatcherImpl() {
proxy_thread_.Stop();
}
blocking_io_thread_.Stop();
- base::trace_event::TraceLog::GetInstance()->RemoveEnabledStateObserver(this);
- VLOG(1) << "CameraHalDispatcherImpl stopped";
+ CAMERA_LOG(EVENT) << "CameraHalDispatcherImpl stopped";
}
void CameraHalDispatcherImpl::RegisterServer(
mojo::PendingRemote<cros::mojom::CameraHalServer> camera_hal_server) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ LOG(ERROR) << "CameraHalDispatcher::RegisterServer is deprecated. "
+ "CameraHalServer will not be registered.";
+}
+
+void CameraHalDispatcherImpl::RegisterServerWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalServer> camera_hal_server,
+ const base::UnguessableToken& token,
+ RegisterServerWithTokenCallback callback) {
+ DCHECK(proxy_task_runner_->BelongsToCurrentThread());
if (camera_hal_server_) {
LOG(ERROR) << "Camera HAL server is already registered";
+ std::move(callback).Run(-EALREADY,
+ failed_camera_hal_server_callbacks_.GetRemote());
+ return;
+ }
+ if (!token_manager_.AuthenticateServer(token)) {
+ LOG(ERROR) << "Failed to authenticate server";
+ std::move(callback).Run(-EPERM,
+ failed_camera_hal_server_callbacks_.GetRemote());
return;
}
camera_hal_server_.Bind(std::move(camera_hal_server));
camera_hal_server_.set_disconnect_handler(
base::BindOnce(&CameraHalDispatcherImpl::OnCameraHalServerConnectionError,
base::Unretained(this)));
- VLOG(1) << "Camera HAL server registered";
+ CAMERA_LOG(EVENT) << "Camera HAL server registered";
+ std::move(callback).Run(
+ 0, camera_hal_server_callbacks_.BindNewPipeAndPassRemote());
// Set up the Mojo channels for clients which registered before the server
// registers.
@@ -191,13 +312,24 @@ void CameraHalDispatcherImpl::RegisterServer(
void CameraHalDispatcherImpl::RegisterClient(
mojo::PendingRemote<cros::mojom::CameraHalClient> client) {
- // RegisterClient can be called locally by ArcCameraBridge. Unretained
- // reference is safe here because CameraHalDispatcherImpl owns
+ NOTREACHED() << "RegisterClient() is disabled";
+}
+
+void CameraHalDispatcherImpl::RegisterClientWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& auth_token,
+ RegisterClientWithTokenCallback callback) {
+ base::UnguessableToken client_auth_token = auth_token;
+ // Unretained reference is safe here because CameraHalDispatcherImpl owns
// |proxy_thread_|.
proxy_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImpl::RegisterClientOnProxyThread,
- base::Unretained(this), std::move(client)));
+ base::BindOnce(
+ &CameraHalDispatcherImpl::RegisterClientWithTokenOnProxyThread,
+ base::Unretained(this), std::move(client), type,
+ std::move(client_auth_token),
+ media::BindToCurrentLoop(std::move(callback))));
}
void CameraHalDispatcherImpl::GetJpegDecodeAccelerator(
@@ -212,6 +344,64 @@ void CameraHalDispatcherImpl::GetJpegEncodeAccelerator(
jea_factory_.Run(std::move(jea_receiver));
}
+void CameraHalDispatcherImpl::CameraDeviceActivityChange(
+ int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) {
+ VLOG(1) << type << (opened ? " opened " : " closed ") << "camera "
+ << camera_id;
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ auto& camera_id_set = opened_camera_id_map_[type];
+ if (opened) {
+ auto result = camera_id_set.insert(camera_id);
+ if (!result.second) { // No element inserted.
+ LOG(WARNING) << "Received duplicated open notification for camera "
+ << camera_id;
+ return;
+ }
+ if (camera_id_set.size() == 1) {
+ VLOG(1) << type << " is active";
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange, type,
+ /*is_active=*/true);
+ }
+ } else {
+ auto it = camera_id_set.find(camera_id);
+ if (it == camera_id_set.end()) {
+ // This can happen if something happened to the client process and it
+ // simultaneous lost connections to both CameraHalDispatcher and
+ // CameraHalServer.
+ LOG(WARNING) << "Received close notification for camera " << camera_id
+ << " which is not opened";
+ return;
+ }
+ camera_id_set.erase(it);
+ if (camera_id_set.empty()) {
+ VLOG(1) << type << " is inactive";
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange, type,
+ /*is_active=*/false);
+ }
+ }
+}
+
+void CameraHalDispatcherImpl::CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) {
+ DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(privacy_switch_state_lock_);
+ current_privacy_switch_state_ = state;
+ privacy_switch_observers_->Notify(
+ FROM_HERE,
+ &CameraPrivacySwitchObserver::OnCameraPrivacySwitchStatusChanged,
+ current_privacy_switch_state_);
+}
+
+base::UnguessableToken CameraHalDispatcherImpl::GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type) {
+ return token_manager_.GetTokenForTrustedClient(type);
+}
+
void CameraHalDispatcherImpl::OnTraceLogEnabled() {
proxy_task_runner_->PostTask(
FROM_HERE,
@@ -347,33 +537,46 @@ void CameraHalDispatcherImpl::StartServiceLoop(base::ScopedFD socket_fd,
}
}
-void CameraHalDispatcherImpl::RegisterClientOnProxyThread(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client) {
+void CameraHalDispatcherImpl::RegisterClientWithTokenOnProxyThread(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token,
+ RegisterClientWithTokenCallback callback) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
- auto client_observer =
- std::make_unique<MojoCameraClientObserver>(std::move(client));
+ auto client_observer = std::make_unique<MojoCameraClientObserver>(
+ std::move(client), type, std::move(auth_token));
client_observer->client().set_disconnect_handler(base::BindOnce(
&CameraHalDispatcherImpl::OnCameraHalClientConnectionError,
base::Unretained(this), base::Unretained(client_observer.get())));
- AddClientObserver(std::move(client_observer));
- VLOG(1) << "Camera HAL client registered";
+ AddClientObserverOnProxyThread(std::move(client_observer),
+ std::move(callback));
}
void CameraHalDispatcherImpl::AddClientObserverOnProxyThread(
- std::unique_ptr<CameraClientObserver> observer) {
+ std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ if (!observer->Authenticate(&token_manager_)) {
+ LOG(ERROR) << "Failed to authenticate camera client observer";
+ std::move(result_callback).Run(-EPERM);
+ return;
+ }
if (camera_hal_server_) {
EstablishMojoChannel(observer.get());
}
client_observers_.insert(std::move(observer));
+ std::move(result_callback).Run(0);
+ CAMERA_LOG(EVENT) << "Camera HAL client registered";
}
void CameraHalDispatcherImpl::EstablishMojoChannel(
CameraClientObserver* client_observer) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
mojo::PendingRemote<cros::mojom::CameraModule> camera_module;
+ const auto& type = client_observer->GetType();
+ CAMERA_LOG(EVENT) << "Establishing server channel for " << type;
camera_hal_server_->CreateChannel(
- camera_module.InitWithNewPipeAndPassReceiver());
+ camera_module.InitWithNewPipeAndPassReceiver(), type);
client_observer->OnChannelCreated(std::move(camera_module));
}
@@ -388,22 +591,58 @@ void CameraHalDispatcherImpl::OnPeerConnected(
void CameraHalDispatcherImpl::OnCameraHalServerConnectionError() {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
- VLOG(1) << "Camera HAL server connection lost";
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ CAMERA_LOG(EVENT) << "Camera HAL server connection lost";
camera_hal_server_.reset();
+ camera_hal_server_callbacks_.reset();
+ for (auto& opened_camera_id_pair : opened_camera_id_map_) {
+ auto camera_client_type = opened_camera_id_pair.first;
+ const auto& camera_id_set = opened_camera_id_pair.second;
+ if (!camera_id_set.empty()) {
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange,
+ camera_client_type, /*is_active=*/false);
+ }
+ }
+ opened_camera_id_map_.clear();
+
+ base::AutoLock privacy_lock(privacy_switch_state_lock_);
+ current_privacy_switch_state_ =
+ cros::mojom::CameraPrivacySwitchState::UNKNOWN;
+ privacy_switch_observers_->Notify(
+ FROM_HERE,
+ &CameraPrivacySwitchObserver::OnCameraPrivacySwitchStatusChanged,
+ current_privacy_switch_state_);
}
void CameraHalDispatcherImpl::OnCameraHalClientConnectionError(
CameraClientObserver* client_observer) {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ base::AutoLock lock(opened_camera_id_map_lock_);
+ auto camera_client_type = client_observer->GetType();
+ auto opened_it = opened_camera_id_map_.find(camera_client_type);
+ if (opened_it == opened_camera_id_map_.end()) {
+ // This can happen if this camera client never opened a camera.
+ return;
+ }
+ const auto& camera_id_set = opened_it->second;
+ if (!camera_id_set.empty()) {
+ active_client_observers_->Notify(
+ FROM_HERE, &CameraActiveClientObserver::OnActiveClientChange,
+ camera_client_type, /*is_active=*/false);
+ }
+ opened_camera_id_map_.erase(opened_it);
+
auto it = client_observers_.find(client_observer);
if (it != client_observers_.end()) {
client_observers_.erase(it);
- VLOG(1) << "Camera HAL client connection lost";
+ CAMERA_LOG(EVENT) << "Camera HAL client connection lost";
}
}
void CameraHalDispatcherImpl::StopOnProxyThread() {
DCHECK(proxy_task_runner_->BelongsToCurrentThread());
+ base::trace_event::TraceLog::GetInstance()->RemoveEnabledStateObserver(this);
// TODO(crbug.com/1053569): Remove these lines once the issue is solved.
base::File::Info info;
@@ -422,6 +661,7 @@ void CameraHalDispatcherImpl::StopOnProxyThread() {
// Close |cancel_pipe_| to quit the loop in WaitForIncomingConnection.
cancel_pipe_.reset();
client_observers_.clear();
+ camera_hal_server_callbacks_.reset();
camera_hal_server_.reset();
receiver_set_.Clear();
}
@@ -446,4 +686,8 @@ void CameraHalDispatcherImpl::OnTraceLogDisabledOnProxyThread() {
camera_hal_server_->SetTracingEnabled(false);
}
+TokenManager* CameraHalDispatcherImpl::GetTokenManagerForTesting() {
+ return &token_manager_;
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h
new file mode 100644
index 00000000000..47f56d92dfb
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h
@@ -0,0 +1,283 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_CAMERA_HAL_DISPATCHER_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_CAMERA_HAL_DISPATCHER_IMPL_H_
+
+#include <memory>
+#include <set>
+
+#include "base/containers/flat_map.h"
+#include "base/containers/flat_set.h"
+#include "base/containers/unique_ptr_adapters.h"
+#include "base/files/scoped_file.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/memory/singleton.h"
+#include "base/observer_list_threadsafe.h"
+#include "base/observer_list_types.h"
+#include "base/synchronization/lock.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/thread_annotations.h"
+#include "base/threading/thread.h"
+#include "base/unguessable_token.h"
+#include "components/chromeos_camera/common/jpeg_encode_accelerator.mojom.h"
+#include "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
+#include "media/capture/video/chromeos/token_manager.h"
+#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
+#include "media/capture/video/video_capture_device_factory.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/receiver_set.h"
+#include "mojo/public/cpp/bindings/remote.h"
+#include "mojo/public/cpp/platform/platform_channel_server_endpoint.h"
+
+namespace base {
+
+class SingleThreadTaskRunner;
+class WaitableEvent;
+
+} // namespace base
+
+namespace media {
+
+using MojoJpegEncodeAcceleratorFactoryCB = base::RepeatingCallback<void(
+ mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>)>;
+
+class CAPTURE_EXPORT CameraClientObserver {
+ public:
+ CameraClientObserver(cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token)
+ : type_(type), auth_token_(auth_token) {}
+ virtual ~CameraClientObserver();
+ virtual void OnChannelCreated(
+ mojo::PendingRemote<cros::mojom::CameraModule> camera_module) = 0;
+
+ cros::mojom::CameraClientType GetType() { return type_; }
+ const base::UnguessableToken GetAuthToken() { return auth_token_; }
+
+ bool Authenticate(TokenManager* token_manager);
+
+ private:
+ cros::mojom::CameraClientType type_;
+ base::UnguessableToken auth_token_;
+};
+
+class CAPTURE_EXPORT CameraActiveClientObserver : public base::CheckedObserver {
+ public:
+ virtual void OnActiveClientChange(cros::mojom::CameraClientType type,
+ bool is_active) = 0;
+};
+
+// A class to provide a no-op remote to CameraHalServer that failed
+// registration. When CameraHalServer calls
+// CameraHalDispatcher::RegisterServerWithToken to register itself, a
+// PendingRemote<CameraHalServerCallbacks> is returned. Returning an unbound
+// pending remote would crash CameraHalServer immediately, and thus disallows
+// it from handling authentication failures.
+// TODO(b/170075468): Modify RegisterServerWithToken to return an optional
+// CameraHalServerCallbacks instead.
+class FailedCameraHalServerCallbacks
+ : public cros::mojom::CameraHalServerCallbacks {
+ private:
+ friend class CameraHalDispatcherImpl;
+
+ FailedCameraHalServerCallbacks();
+ ~FailedCameraHalServerCallbacks() final;
+
+ mojo::PendingRemote<cros::mojom::CameraHalServerCallbacks> GetRemote();
+
+ // CameraHalServerCallbacks implementations.
+ void CameraDeviceActivityChange(int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) final;
+ void CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) final;
+
+ mojo::Receiver<cros::mojom::CameraHalServerCallbacks> callbacks_;
+};
+
+class CAPTURE_EXPORT CameraPrivacySwitchObserver
+ : public base::CheckedObserver {
+ public:
+ virtual void OnCameraPrivacySwitchStatusChanged(
+ cros::mojom::CameraPrivacySwitchState state) = 0;
+
+ protected:
+ ~CameraPrivacySwitchObserver() override = default;
+};
+
+// The CameraHalDispatcherImpl hosts and waits on the unix domain socket
+// /var/run/camera3.sock. CameraHalServer and CameraHalClients connect to the
+// unix domain socket to create the initial Mojo connections with the
+// CameraHalDisptcherImpl, and CameraHalDispatcherImpl then creates and
+// dispaches the Mojo channels between CameraHalServer and CameraHalClients to
+// establish direct Mojo connections between the CameraHalServer and the
+// CameraHalClients.
+//
+// For general documentation about the CameraHalDispater Mojo interface see the
+// comments in mojo/cros_camera_service.mojom.
+class CAPTURE_EXPORT CameraHalDispatcherImpl final
+ : public cros::mojom::CameraHalDispatcher,
+ public cros::mojom::CameraHalServerCallbacks,
+ public base::trace_event::TraceLog::EnabledStateObserver {
+ public:
+ static CameraHalDispatcherImpl* GetInstance();
+
+ bool Start(MojoMjpegDecodeAcceleratorFactoryCB jda_factory,
+ MojoJpegEncodeAcceleratorFactoryCB jea_factory);
+
+ void AddClientObserver(std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback);
+
+ bool IsStarted();
+
+ // Adds an observer that watches for active camera client changes. Observer
+ // would be immediately notified of the current list of active clients.
+ void AddActiveClientObserver(CameraActiveClientObserver* observer);
+
+ // Removes the observer. A previously-added observer must be removed before
+ // being destroyed.
+ void RemoveActiveClientObserver(CameraActiveClientObserver* observer);
+
+ // Adds an observer to get notified when the camera privacy switch status
+ // changed. Please note that for some devices, the signal will only be
+ // detectable when the camera is currently on due to hardware limitations.
+ // Returns the current state of the camera privacy switch.
+ cros::mojom::CameraPrivacySwitchState AddCameraPrivacySwitchObserver(
+ CameraPrivacySwitchObserver* observer);
+
+ // Removes the observer. A previously-added observer must be removed before
+ // being destroyed.
+ void RemoveCameraPrivacySwitchObserver(CameraPrivacySwitchObserver* observer);
+
+ // Called by vm_permission_service to register the token used for pluginvm.
+ void RegisterPluginVmToken(const base::UnguessableToken& token);
+ void UnregisterPluginVmToken(const base::UnguessableToken& token);
+
+ // CameraHalDispatcher implementations.
+ void RegisterServer(
+ mojo::PendingRemote<cros::mojom::CameraHalServer> server) final;
+ void RegisterServerWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalServer> server,
+ const base::UnguessableToken& token,
+ RegisterServerWithTokenCallback callback) final;
+ void RegisterClient(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client) final;
+ void RegisterClientWithToken(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& auth_token,
+ RegisterClientWithTokenCallback callback) final;
+ void GetJpegDecodeAccelerator(
+ mojo::PendingReceiver<chromeos_camera::mojom::MjpegDecodeAccelerator>
+ jda_receiver) final;
+ void GetJpegEncodeAccelerator(
+ mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>
+ jea_receiver) final;
+
+ // CameraHalServerCallbacks implementations.
+ void CameraDeviceActivityChange(int32_t camera_id,
+ bool opened,
+ cros::mojom::CameraClientType type) final;
+ void CameraPrivacySwitchStateChange(
+ cros::mojom::CameraPrivacySwitchState state) final;
+
+ base::UnguessableToken GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type);
+
+ // base::trace_event::TraceLog::EnabledStateObserver implementation.
+ void OnTraceLogEnabled() final;
+ void OnTraceLogDisabled() final;
+
+ private:
+ friend struct base::DefaultSingletonTraits<CameraHalDispatcherImpl>;
+ // Allow the test to construct the class directly.
+ friend class CameraHalDispatcherImplTest;
+
+ CameraHalDispatcherImpl();
+ ~CameraHalDispatcherImpl() final;
+
+ bool StartThreads();
+
+ // Creates the unix domain socket for the camera client processes and the
+ // camera HALv3 adapter process to connect.
+ void CreateSocket(base::WaitableEvent* started);
+
+ // Waits for incoming connections (from HAL process or from client processes).
+ // Runs on |blocking_io_thread_|.
+ void StartServiceLoop(base::ScopedFD socket_fd, base::WaitableEvent* started);
+
+ void RegisterClientWithTokenOnProxyThread(
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken token,
+ RegisterClientWithTokenCallback callback);
+
+ void AddClientObserverOnProxyThread(
+ std::unique_ptr<CameraClientObserver> observer,
+ base::OnceCallback<void(int32_t)> result_callback);
+
+ void EstablishMojoChannel(CameraClientObserver* client_observer);
+
+ // Handler for incoming Mojo connection on the unix domain socket.
+ void OnPeerConnected(mojo::ScopedMessagePipeHandle message_pipe);
+
+ // Mojo connection error handlers.
+ void OnCameraHalServerConnectionError();
+ void OnCameraHalClientConnectionError(CameraClientObserver* client);
+
+ void StopOnProxyThread();
+
+ void OnTraceLogEnabledOnProxyThread();
+ void OnTraceLogDisabledOnProxyThread();
+
+ TokenManager* GetTokenManagerForTesting();
+
+ base::ScopedFD proxy_fd_;
+ base::ScopedFD cancel_pipe_;
+
+ base::Thread proxy_thread_;
+ base::Thread blocking_io_thread_;
+ scoped_refptr<base::SingleThreadTaskRunner> proxy_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> blocking_io_task_runner_;
+
+ mojo::ReceiverSet<cros::mojom::CameraHalDispatcher> receiver_set_;
+
+ mojo::Remote<cros::mojom::CameraHalServer> camera_hal_server_;
+
+ mojo::Receiver<cros::mojom::CameraHalServerCallbacks>
+ camera_hal_server_callbacks_;
+ FailedCameraHalServerCallbacks failed_camera_hal_server_callbacks_;
+
+ std::set<std::unique_ptr<CameraClientObserver>, base::UniquePtrComparator>
+ client_observers_;
+
+ MojoMjpegDecodeAcceleratorFactoryCB jda_factory_;
+
+ MojoJpegEncodeAcceleratorFactoryCB jea_factory_;
+
+ TokenManager token_manager_;
+
+ base::Lock opened_camera_id_map_lock_;
+ base::flat_map<cros::mojom::CameraClientType, base::flat_set<int32_t>>
+ opened_camera_id_map_ GUARDED_BY(opened_camera_id_map_lock_);
+
+ scoped_refptr<base::ObserverListThreadSafe<CameraActiveClientObserver>>
+ active_client_observers_;
+
+ base::Lock privacy_switch_state_lock_;
+ cros::mojom::CameraPrivacySwitchState current_privacy_switch_state_
+ GUARDED_BY(privacy_switch_state_lock_);
+
+ scoped_refptr<base::ObserverListThreadSafe<CameraPrivacySwitchObserver>>
+ privacy_switch_observers_;
+
+ DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_CAMERA_HAL_DISPATCHER_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc
new file mode 100644
index 00000000000..933d0e45440
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/camera_hal_dispatcher_impl_unittest.cc
@@ -0,0 +1,423 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
+
+#include <memory>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/posix/safe_strerror.h"
+#include "base/run_loop.h"
+#include "base/single_thread_task_runner.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/test/task_environment.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
+#include "mojo/public/cpp/bindings/receiver.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using testing::_;
+using testing::InvokeWithoutArgs;
+
+namespace media {
+namespace {
+
+class MockCameraHalServer : public cros::mojom::CameraHalServer {
+ public:
+ MockCameraHalServer() = default;
+
+ ~MockCameraHalServer() = default;
+
+ void CreateChannel(
+ mojo::PendingReceiver<cros::mojom::CameraModule> camera_module_receiver,
+ cros::mojom::CameraClientType camera_client_type) override {
+ DoCreateChannel(std::move(camera_module_receiver), camera_client_type);
+ }
+ MOCK_METHOD2(DoCreateChannel,
+ void(mojo::PendingReceiver<cros::mojom::CameraModule>
+ camera_module_receiver,
+ cros::mojom::CameraClientType camera_client_type));
+
+ MOCK_METHOD1(SetTracingEnabled, void(bool enabled));
+
+ mojo::PendingRemote<cros::mojom::CameraHalServer> GetPendingRemote() {
+ return receiver_.BindNewPipeAndPassRemote();
+ }
+
+ private:
+ mojo::Receiver<cros::mojom::CameraHalServer> receiver_{this};
+ DISALLOW_COPY_AND_ASSIGN(MockCameraHalServer);
+};
+
+class MockCameraHalClient : public cros::mojom::CameraHalClient {
+ public:
+ MockCameraHalClient() = default;
+
+ ~MockCameraHalClient() = default;
+
+ void SetUpChannel(
+ mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
+ DoSetUpChannel(std::move(camera_module));
+ }
+ MOCK_METHOD1(
+ DoSetUpChannel,
+ void(mojo::PendingRemote<cros::mojom::CameraModule> camera_module));
+
+ mojo::PendingRemote<cros::mojom::CameraHalClient> GetPendingRemote() {
+ return receiver_.BindNewPipeAndPassRemote();
+ }
+
+ private:
+ mojo::Receiver<cros::mojom::CameraHalClient> receiver_{this};
+ DISALLOW_COPY_AND_ASSIGN(MockCameraHalClient);
+};
+
+class MockCameraActiveClientObserver : public CameraActiveClientObserver {
+ public:
+ void OnActiveClientChange(cros::mojom::CameraClientType type,
+ bool is_active) override {
+ DoOnActiveClientChange(type, is_active);
+ }
+ MOCK_METHOD2(DoOnActiveClientChange,
+ void(cros::mojom::CameraClientType, bool));
+};
+
+} // namespace
+
+class CameraHalDispatcherImplTest : public ::testing::Test {
+ public:
+ CameraHalDispatcherImplTest()
+ : register_client_event_(base::WaitableEvent::ResetPolicy::AUTOMATIC) {}
+
+ ~CameraHalDispatcherImplTest() override = default;
+
+ void SetUp() override {
+ dispatcher_ = new CameraHalDispatcherImpl();
+ EXPECT_TRUE(dispatcher_->StartThreads());
+ }
+
+ void TearDown() override { delete dispatcher_; }
+
+ scoped_refptr<base::SingleThreadTaskRunner> GetProxyTaskRunner() {
+ return dispatcher_->proxy_task_runner_;
+ }
+
+ void DoLoop() {
+ run_loop_.reset(new base::RunLoop());
+ run_loop_->Run();
+ }
+
+ void QuitRunLoop() {
+ if (run_loop_) {
+ run_loop_->Quit();
+ }
+ }
+
+ static void RegisterServer(
+ CameraHalDispatcherImpl* dispatcher,
+ mojo::PendingRemote<cros::mojom::CameraHalServer> server,
+ cros::mojom::CameraHalDispatcher::RegisterServerWithTokenCallback
+ callback) {
+ auto token = base::UnguessableToken::Create();
+ dispatcher->GetTokenManagerForTesting()->AssignServerTokenForTesting(token);
+ dispatcher->RegisterServerWithToken(std::move(server), std::move(token),
+ std::move(callback));
+ }
+
+ static void RegisterClientWithToken(
+ CameraHalDispatcherImpl* dispatcher,
+ mojo::PendingRemote<cros::mojom::CameraHalClient> client,
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token,
+ cros::mojom::CameraHalDispatcher::RegisterClientWithTokenCallback
+ callback) {
+ dispatcher->RegisterClientWithToken(std::move(client), type, token,
+ std::move(callback));
+ }
+
+ void OnRegisteredServer(
+ int32_t result,
+ mojo::PendingRemote<cros::mojom::CameraHalServerCallbacks> callbacks) {
+ if (result != 0) {
+ ADD_FAILURE() << "Failed to register server: "
+ << base::safe_strerror(-result);
+ QuitRunLoop();
+ }
+ }
+
+ void OnRegisteredClient(int32_t result) {
+ last_register_client_result_ = result;
+ if (result != 0) {
+ // If registration fails, CameraHalClient::SetUpChannel() will not be
+ // called, and we need to quit the run loop here.
+ QuitRunLoop();
+ }
+ register_client_event_.Signal();
+ }
+
+ protected:
+ // We can't use std::unique_ptr here because the constructor and destructor of
+ // CameraHalDispatcherImpl are private.
+ CameraHalDispatcherImpl* dispatcher_;
+ base::WaitableEvent register_client_event_;
+ int32_t last_register_client_result_;
+
+ private:
+ base::test::TaskEnvironment task_environment_;
+ std::unique_ptr<base::RunLoop> run_loop_;
+ DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImplTest);
+};
+
+// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
+// for the client when the server crashes.
+TEST_F(CameraHalDispatcherImplTest, ServerConnectionError) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+ auto client = mock_client->GetPendingRemote();
+ auto type = cros::mojom::CameraClientType::TESTING;
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the client gets the established Mojo channel.
+ DoLoop();
+
+ // The client registration callback may be called after
+ // CameraHalClient::SetUpChannel(). Use a waitable event to make sure we have
+ // the result.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+
+ // Re-create a new server to simulate a server crash.
+ mock_server = std::make_unique<MockCameraHalServer>();
+
+ // Make sure we creates a new Mojo channel from the new server to the same
+ // client.
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+
+ // Wait until the clients gets the newly established Mojo channel.
+ DoLoop();
+}
+
+// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
+// for the client when the client reconnects after crash.
+TEST_F(CameraHalDispatcherImplTest, ClientConnectionError) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+ auto client = mock_client->GetPendingRemote();
+ auto type = cros::mojom::CameraClientType::TESTING;
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the client gets the established Mojo channel.
+ DoLoop();
+
+ // The client registration callback may be called after
+ // CameraHalClient::SetUpChannel(). Use a waitable event to make sure we have
+ // the result.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+
+ // Re-create a new client to simulate a client crash.
+ mock_client = std::make_unique<MockCameraHalClient>();
+
+ // Make sure we re-create the Mojo channel from the same server to the new
+ // client.
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ client = mock_client->GetPendingRemote();
+ type = cros::mojom::CameraClientType::TESTING;
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the clients gets the newly established Mojo channel.
+ DoLoop();
+
+ // Make sure the client is still successfully registered.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+}
+
+// Test that trusted camera HAL clients (e.g., Chrome, Android, Testing) can be
+// registered successfully.
+TEST_F(CameraHalDispatcherImplTest, RegisterClientSuccess) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+
+ for (auto type : TokenManager::kTrustedClientTypes) {
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+ EXPECT_CALL(*mock_server, DoCreateChannel(_, _)).Times(1);
+ EXPECT_CALL(*mock_client, DoSetUpChannel(_))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+
+ auto client = mock_client->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client), type,
+ dispatcher_->GetTokenForTrustedClient(type),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // Wait until the client gets the established Mojo channel.
+ DoLoop();
+
+ // The client registration callback may be called after
+ // CameraHalClient::SetUpChannel(). Use a waitable event to make sure we
+ // have the result.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, 0);
+ }
+}
+
+// Test that CameraHalClient registration fails when a wrong (empty) token is
+// provided.
+TEST_F(CameraHalDispatcherImplTest, RegisterClientFail) {
+ // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
+ // between the server and the client.
+ auto mock_server = std::make_unique<MockCameraHalServer>();
+ auto mock_client = std::make_unique<MockCameraHalClient>();
+
+ auto server = mock_server->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterServer,
+ base::Unretained(dispatcher_), std::move(server),
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredServer,
+ base::Unretained(this))));
+
+ // Use an empty token to make sure authentication fails.
+ base::UnguessableToken empty_token;
+ auto client = mock_client->GetPendingRemote();
+ GetProxyTaskRunner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CameraHalDispatcherImplTest::RegisterClientWithToken,
+ base::Unretained(dispatcher_), std::move(client),
+ cros::mojom::CameraClientType::TESTING, empty_token,
+ base::BindOnce(&CameraHalDispatcherImplTest::OnRegisteredClient,
+ base::Unretained(this))));
+
+ // We do not need to enter a run loop here because
+ // CameraHalClient::SetUpChannel() isn't expected to called, and we only need
+ // to wait for the callback from CameraHalDispatcher::RegisterClientWithToken.
+ register_client_event_.Wait();
+ ASSERT_EQ(last_register_client_result_, -EPERM);
+}
+
+// Test that CameraHalDispatcherImpl correctly fires CameraActiveClientObserver
+// when a camera device is opened or closed by a client.
+TEST_F(CameraHalDispatcherImplTest, CameraActiveClientObserverTest) {
+ MockCameraActiveClientObserver observer;
+ dispatcher_->AddActiveClientObserver(&observer);
+
+ EXPECT_CALL(observer, DoOnActiveClientChange(
+ cros::mojom::CameraClientType::TESTING, true))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+ dispatcher_->CameraDeviceActivityChange(
+ /*camera_id=*/0, /*opened=*/true, cros::mojom::CameraClientType::TESTING);
+
+ DoLoop();
+
+ EXPECT_CALL(observer, DoOnActiveClientChange(
+ cros::mojom::CameraClientType::TESTING, false))
+ .Times(1)
+ .WillOnce(
+ InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
+ dispatcher_->CameraDeviceActivityChange(
+ /*camera_id=*/0, /*opened=*/false,
+ cros::mojom::CameraClientType::TESTING);
+
+ DoLoop();
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc
new file mode 100644
index 00000000000..321b9b3e82c
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.cc
@@ -0,0 +1,84 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/ash/power_manager_client_proxy.h"
+
+namespace media {
+
+PowerManagerClientProxy::PowerManagerClientProxy() = default;
+
+void PowerManagerClientProxy::Init(
+ base::WeakPtr<Observer> observer,
+ const std::string& debug_info,
+ scoped_refptr<base::SingleThreadTaskRunner> observer_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner) {
+ observer_ = std::move(observer);
+ debug_info_ = debug_info;
+ observer_task_runner_ = std::move(observer_task_runner);
+ dbus_task_runner_ = std::move(dbus_task_runner);
+
+ dbus_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::InitOnDBusThread, this));
+}
+
+void PowerManagerClientProxy::Shutdown() {
+ dbus_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::ShutdownOnDBusThread, this));
+}
+
+void PowerManagerClientProxy::UnblockSuspend(
+ const base::UnguessableToken& unblock_suspend_token) {
+ dbus_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::UnblockSuspendOnDBusThread, this,
+ unblock_suspend_token));
+}
+
+PowerManagerClientProxy::~PowerManagerClientProxy() = default;
+
+void PowerManagerClientProxy::InitOnDBusThread() {
+ DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
+ chromeos::PowerManagerClient::Get()->AddObserver(this);
+}
+
+void PowerManagerClientProxy::ShutdownOnDBusThread() {
+ DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
+ chromeos::PowerManagerClient::Get()->RemoveObserver(this);
+}
+
+void PowerManagerClientProxy::UnblockSuspendOnDBusThread(
+ const base::UnguessableToken& unblock_suspend_token) {
+ DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
+ chromeos::PowerManagerClient::Get()->UnblockSuspend(unblock_suspend_token);
+}
+
+void PowerManagerClientProxy::SuspendImminentOnObserverThread(
+ base::UnguessableToken unblock_suspend_token) {
+ DCHECK(observer_task_runner_->RunsTasksInCurrentSequence());
+ // TODO(b/175168296): Ensure that the weak pointer |observer| is dereferenced
+ // and invalidated on the same thread.
+ if (observer_) {
+ observer_->SuspendImminent();
+ }
+ UnblockSuspend(std::move(unblock_suspend_token));
+}
+
+void PowerManagerClientProxy::SuspendImminent(
+ power_manager::SuspendImminent::Reason reason) {
+ auto token = base::UnguessableToken::Create();
+ chromeos::PowerManagerClient::Get()->BlockSuspend(token, debug_info_);
+ observer_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&PowerManagerClientProxy::SuspendImminentOnObserverThread,
+ this, std::move(token)));
+}
+
+void PowerManagerClientProxy::SuspendDone(base::TimeDelta sleep_duration) {
+ observer_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&Observer::SuspendDone, observer_));
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h
new file mode 100644
index 00000000000..ced96772f45
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/ash/power_manager_client_proxy.h
@@ -0,0 +1,66 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_POWER_MANAGER_CLIENT_PROXY_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_POWER_MANAGER_CLIENT_PROXY_H_
+
+#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/unguessable_token.h"
+#include "chromeos/dbus/power/power_manager_client.h"
+
+namespace media {
+
+class PowerManagerClientProxy
+ : public base::RefCountedThreadSafe<PowerManagerClientProxy>,
+ public chromeos::PowerManagerClient::Observer {
+ public:
+ class Observer {
+ public:
+ virtual void SuspendDone() = 0;
+ virtual void SuspendImminent() = 0;
+ };
+
+ PowerManagerClientProxy();
+ PowerManagerClientProxy(const PowerManagerClientProxy&) = delete;
+ PowerManagerClientProxy& operator=(const PowerManagerClientProxy&) = delete;
+
+ void Init(base::WeakPtr<Observer> observer,
+ const std::string& debug_info,
+ scoped_refptr<base::SingleThreadTaskRunner> observer_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner);
+
+ void Shutdown();
+
+ void UnblockSuspend(const base::UnguessableToken& unblock_suspend_token);
+
+ private:
+ friend class base::RefCountedThreadSafe<PowerManagerClientProxy>;
+
+ ~PowerManagerClientProxy() override;
+
+ void InitOnDBusThread();
+
+ void ShutdownOnDBusThread();
+
+ void UnblockSuspendOnDBusThread(
+ const base::UnguessableToken& unblock_suspend_token);
+
+ void SuspendImminentOnObserverThread(
+ base::UnguessableToken unblock_suspend_token);
+
+ // chromeos::PowerManagerClient::Observer:
+ void SuspendImminent(power_manager::SuspendImminent::Reason reason) final;
+
+ void SuspendDone(base::TimeDelta sleep_duration) final;
+
+ base::WeakPtr<Observer> observer_;
+ std::string debug_info_;
+ scoped_refptr<base::SingleThreadTaskRunner> observer_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_ASH_POWER_MANAGER_CLIENT_PROXY_H_
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.cc b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
index 16079425ae8..4edbf5153f7 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.cc
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
@@ -7,6 +7,7 @@
#include <utility>
#include "base/bind.h"
+#include "base/containers/contains.h"
#include "base/numerics/ranges.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
@@ -51,8 +52,7 @@ Camera3AController::Camera3AController(
ANDROID_CONTROL_AWB_STATE_INACTIVE),
awb_mode_set_(false),
set_point_of_interest_running_(false),
- ae_locked_for_point_of_interest_(false),
- zero_shutter_lag_enabled_(false) {
+ ae_locked_for_point_of_interest_(false) {
DCHECK(task_runner_->BelongsToCurrentThread());
capture_metadata_dispatcher_->AddResultMetadataObserver(this);
@@ -157,6 +157,29 @@ Camera3AController::Camera3AController(
base::checked_cast<uint8_t>(ae_mode_));
Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE,
base::checked_cast<uint8_t>(awb_mode_));
+
+ // Enable face detection if it's available.
+ auto face_modes = GetMetadataEntryAsSpan<uint8_t>(
+ static_metadata, cros::mojom::CameraMetadataTag::
+ ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);
+ // We don't need face landmarks and ids, so using SIMPLE mode instead of FULL
+ // mode should be enough.
+ const auto face_mode_simple = cros::mojom::AndroidStatisticsFaceDetectMode::
+ ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE;
+ if (base::Contains(face_modes,
+ base::checked_cast<uint8_t>(face_mode_simple))) {
+ SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_STATISTICS_FACE_DETECT_MODE,
+ face_mode_simple);
+ }
+
+ auto request_keys = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ zero_shutter_lag_supported_ = base::Contains(
+ request_keys,
+ static_cast<int32_t>(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_ENABLE_ZSL));
}
Camera3AController::~Camera3AController() {
@@ -184,7 +207,7 @@ void Camera3AController::Stabilize3AForStillCapture(
return;
}
- if (Is3AStabilized() || zero_shutter_lag_enabled_) {
+ if (Is3AStabilized() || zero_shutter_lag_supported_) {
std::move(on_3a_stabilized_callback).Run();
return;
}
@@ -427,6 +450,36 @@ void Camera3AController::SetExposureTime(bool enable_auto,
DVLOG(1) << "Setting AE mode to: " << ae_mode_;
}
+void Camera3AController::SetFocusDistance(bool enable_auto,
+ float focus_distance_diopters) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (enable_auto) {
+ if (!available_af_modes_.count(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO)) {
+ LOG(WARNING) << "Don't support ANDROID_CONTROL_AF_MODE_AUTO";
+ return;
+ }
+ af_mode_ = cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_AUTO;
+ capture_metadata_dispatcher_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_FOCUS_DISTANCE);
+ } else {
+ if (!available_af_modes_.count(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF)) {
+ LOG(WARNING) << "Don't support ANDROID_CONTROL_AE_MODE_OFF";
+ return;
+ }
+ af_mode_ = cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF;
+ SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_FOCUS_DISTANCE,
+ focus_distance_diopters);
+ }
+
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE,
+ base::checked_cast<uint8_t>(af_mode_));
+ DVLOG(1) << "Setting AF mode to: " << af_mode_;
+}
+
bool Camera3AController::IsPointOfInterestSupported() {
return point_of_interest_supported_;
}
@@ -553,10 +606,6 @@ void Camera3AController::SetPointOfInterestUnlockAe() {
ClearRepeatingCaptureMetadata();
}
-void Camera3AController::UpdateZeroShutterLagAvailability(bool enabled) {
- zero_shutter_lag_enabled_ = enabled;
-}
-
base::WeakPtr<Camera3AController> Camera3AController::GetWeakPtr() {
DCHECK(task_runner_->BelongsToCurrentThread());
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.h b/chromium/media/capture/video/chromeos/camera_3a_controller.h
index 03ff69669f1..e87c8c2264c 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.h
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.h
@@ -50,16 +50,17 @@ class CAPTURE_EXPORT Camera3AController final
// only effective if |enable_auto| is set to false
void SetExposureTime(bool enable_auto, int64_t exposure_time_nanoseconds);
+ // Set focus distance.
+ // |enable_auto| enables auto focus mode. |focus_distance_diopters| is only
+ // effective if |enable_auto| is set to false
+ void SetFocusDistance(bool enable_auto, float focus_distance_diopters);
+
bool IsPointOfInterestSupported();
// Set point of interest. The coordinate system is based on the active
// pixel array.
void SetPointOfInterest(gfx::Point point);
- // Updates the availability of Zero-Shutter Lag (ZSL). We skip 3A (AE, AF,
- // AWB) if ZSL is enabled.
- void UpdateZeroShutterLagAvailability(bool enabled);
-
base::WeakPtr<Camera3AController> GetWeakPtr();
private:
@@ -98,6 +99,7 @@ class CAPTURE_EXPORT Camera3AController final
bool ae_region_supported_;
bool af_region_supported_;
bool point_of_interest_supported_;
+ bool zero_shutter_lag_supported_;
CaptureMetadataDispatcher* capture_metadata_dispatcher_;
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
@@ -127,8 +129,6 @@ class CAPTURE_EXPORT Camera3AController final
bool ae_locked_for_point_of_interest_;
- bool zero_shutter_lag_enabled_;
-
base::TimeDelta latest_sensor_timestamp_;
std::unordered_set<cros::mojom::CameraMetadataTag> repeating_metadata_tags_;
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
index b2fa774a4af..d9ddaf31832 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
@@ -6,19 +6,43 @@
#include <string>
+#include "base/callback_helpers.h"
#include "base/command_line.h"
+#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
namespace media {
-CameraAppDeviceBridgeImpl::CameraAppDeviceBridgeImpl() {}
+namespace {
+
+void InvalidateDevicePtrsOnDeviceIpcThread(
+ base::WeakPtr<CameraAppDeviceImpl> device,
+ base::OnceClosure callback) {
+ if (device) {
+ device->InvalidatePtrs(std::move(callback));
+ }
+}
+
+} // namespace
+
+CameraAppDeviceBridgeImpl::CameraAppDeviceBridgeImpl() {
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ bool use_fake_camera =
+ command_line->HasSwitch(switches::kUseFakeDeviceForMediaStream);
+ bool use_file_camera =
+ command_line->HasSwitch(switches::kUseFileForFakeVideoCapture);
+ is_supported_ =
+ ShouldUseCrosCameraService() && !use_fake_camera && !use_file_camera;
+}
CameraAppDeviceBridgeImpl::~CameraAppDeviceBridgeImpl() = default;
-void CameraAppDeviceBridgeImpl::SetIsSupported(bool is_supported) {
- is_supported_ = is_supported;
+// static
+CameraAppDeviceBridgeImpl* CameraAppDeviceBridgeImpl::GetInstance() {
+ return base::Singleton<CameraAppDeviceBridgeImpl>::get();
}
void CameraAppDeviceBridgeImpl::BindReceiver(
@@ -26,29 +50,85 @@ void CameraAppDeviceBridgeImpl::BindReceiver(
receivers_.Add(this, std::move(receiver));
}
-void CameraAppDeviceBridgeImpl::OnDeviceClosed(const std::string& device_id) {
- auto it = camera_app_devices_.find(device_id);
- if (it != camera_app_devices_.end()) {
- camera_app_devices_.erase(it);
+void CameraAppDeviceBridgeImpl::OnVideoCaptureDeviceCreated(
+ const std::string& device_id,
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner) {
+ base::AutoLock lock(task_runner_map_lock_);
+ DCHECK_EQ(ipc_task_runners_.count(device_id), 0u);
+ ipc_task_runners_.emplace(device_id, ipc_task_runner);
+}
+
+void CameraAppDeviceBridgeImpl::OnVideoCaptureDeviceClosing(
+ const std::string& device_id) {
+ base::AutoLock lock(task_runner_map_lock_);
+ DCHECK_EQ(ipc_task_runners_.count(device_id), 1u);
+ ipc_task_runners_[device_id]->PostTask(
+ FROM_HERE, base::BindOnce(&InvalidateDevicePtrsOnDeviceIpcThread,
+ GetWeakCameraAppDevice(device_id),
+ base::DoNothing::Once()));
+ ipc_task_runners_.erase(device_id);
+}
+
+void CameraAppDeviceBridgeImpl::OnDeviceMojoDisconnected(
+ const std::string& device_id) {
+ auto remove_device = media::BindToCurrentLoop(
+ base::BindOnce(&CameraAppDeviceBridgeImpl::RemoveCameraAppDevice,
+ base::Unretained(this), device_id));
+ {
+ base::AutoLock lock(task_runner_map_lock_);
+ auto it = ipc_task_runners_.find(device_id);
+ if (it != ipc_task_runners_.end()) {
+ it->second->PostTask(
+ FROM_HERE, base::BindOnce(&InvalidateDevicePtrsOnDeviceIpcThread,
+ GetWeakCameraAppDevice(device_id),
+ std::move(remove_device)));
+ return;
+ }
}
+ std::move(remove_device).Run();
}
void CameraAppDeviceBridgeImpl::SetCameraInfoGetter(
CameraInfoGetter camera_info_getter) {
+ base::AutoLock lock(camera_info_getter_lock_);
camera_info_getter_ = std::move(camera_info_getter);
}
void CameraAppDeviceBridgeImpl::UnsetCameraInfoGetter() {
+ base::AutoLock lock(camera_info_getter_lock_);
camera_info_getter_ = {};
}
-CameraAppDeviceImpl* CameraAppDeviceBridgeImpl::GetCameraAppDevice(
+void CameraAppDeviceBridgeImpl::SetVirtualDeviceController(
+ VirtualDeviceController virtual_device_controller) {
+ base::AutoLock lock(virtual_device_controller_lock_);
+ virtual_device_controller_ = std::move(virtual_device_controller);
+}
+
+void CameraAppDeviceBridgeImpl::UnsetVirtualDeviceController() {
+ base::AutoLock lock(virtual_device_controller_lock_);
+ virtual_device_controller_ = {};
+}
+
+base::WeakPtr<CameraAppDeviceImpl>
+CameraAppDeviceBridgeImpl::GetWeakCameraAppDevice(
const std::string& device_id) {
+ base::AutoLock lock(device_map_lock_);
auto it = camera_app_devices_.find(device_id);
- if (it != camera_app_devices_.end()) {
- return it->second.get();
+ if (it == camera_app_devices_.end()) {
+ return nullptr;
}
- return CreateCameraAppDevice(device_id);
+ return it->second->GetWeakPtr();
+}
+
+void CameraAppDeviceBridgeImpl::RemoveCameraAppDevice(
+ const std::string& device_id) {
+ base::AutoLock lock(device_map_lock_);
+ auto it = camera_app_devices_.find(device_id);
+ if (it == camera_app_devices_.end()) {
+ return;
+ }
+ camera_app_devices_.erase(it);
}
void CameraAppDeviceBridgeImpl::GetCameraAppDevice(
@@ -56,16 +136,31 @@ void CameraAppDeviceBridgeImpl::GetCameraAppDevice(
GetCameraAppDeviceCallback callback) {
DCHECK(is_supported_);
- mojo::PendingRemote<cros::mojom::CameraAppDevice> device;
- GetCameraAppDevice(device_id)->BindReceiver(
- device.InitWithNewPipeAndPassReceiver());
+ mojo::PendingRemote<cros::mojom::CameraAppDevice> device_remote;
+ auto* device = GetOrCreateCameraAppDevice(device_id);
+ DCHECK(device);
+
+ device->BindReceiver(device_remote.InitWithNewPipeAndPassReceiver());
std::move(callback).Run(cros::mojom::GetCameraAppDeviceStatus::SUCCESS,
- std::move(device));
+ std::move(device_remote));
}
-media::CameraAppDeviceImpl* CameraAppDeviceBridgeImpl::CreateCameraAppDevice(
+media::CameraAppDeviceImpl*
+CameraAppDeviceBridgeImpl::GetOrCreateCameraAppDevice(
const std::string& device_id) {
+ base::AutoLock lock(device_map_lock_);
+ auto it = camera_app_devices_.find(device_id);
+ if (it != camera_app_devices_.end()) {
+ return it->second.get();
+ }
+
+ base::AutoLock camera_info_lock(camera_info_getter_lock_);
+ // Since we ensure that VideoCaptureDeviceFactory is created before binding
+ // CameraAppDeviceBridge and VideoCaptureDeviceFactory is only destroyed when
+ // the video capture service dies, we can guarantee that |camera_info_getter_|
+ // is always valid here.
DCHECK(camera_info_getter_);
+
auto device_info = camera_info_getter_.Run(device_id);
auto device_impl = std::make_unique<media::CameraAppDeviceImpl>(
device_id, std::move(device_info));
@@ -77,4 +172,18 @@ void CameraAppDeviceBridgeImpl::IsSupported(IsSupportedCallback callback) {
std::move(callback).Run(is_supported_);
}
+void CameraAppDeviceBridgeImpl::SetMultipleStreamsEnabled(
+ const std::string& device_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) {
+ base::AutoLock lock(virtual_device_controller_lock_);
+ if (!virtual_device_controller_) {
+ std::move(callback).Run(false);
+ return;
+ }
+
+ virtual_device_controller_.Run(device_id, enabled);
+ std::move(callback).Run(true);
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
index 42a1972d1d9..10e2646f738 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
@@ -7,6 +7,7 @@
#include <string>
+#include "base/memory/singleton.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/chromeos/camera_app_device_impl.h"
#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
@@ -14,30 +15,47 @@
namespace media {
-// A bridge class which helps to construct the connection of CameraAppDevice
-// between remote side (Chrome) and receiver side (Video Capture Service).
+// A singleton bridge class between Chrome Camera App and Video Capture Service
+// which helps to construct CameraAppDevice for communication between these two
+// components.
class CAPTURE_EXPORT CameraAppDeviceBridgeImpl
: public cros::mojom::CameraAppDeviceBridge {
public:
using CameraInfoGetter =
base::RepeatingCallback<cros::mojom::CameraInfoPtr(const std::string&)>;
+ using VirtualDeviceController =
+ base::RepeatingCallback<void(const std::string&, bool)>;
CameraAppDeviceBridgeImpl();
~CameraAppDeviceBridgeImpl() override;
- void SetIsSupported(bool is_supported);
+ static CameraAppDeviceBridgeImpl* GetInstance();
void BindReceiver(
mojo::PendingReceiver<cros::mojom::CameraAppDeviceBridge> receiver);
- void OnDeviceClosed(const std::string& device_id);
+ void OnVideoCaptureDeviceCreated(
+ const std::string& device_id,
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
+
+ void OnVideoCaptureDeviceClosing(const std::string& device_id);
+
+ void OnDeviceMojoDisconnected(const std::string& device_id);
void SetCameraInfoGetter(CameraInfoGetter camera_info_getter);
void UnsetCameraInfoGetter();
- CameraAppDeviceImpl* GetCameraAppDevice(const std::string& device_id);
+ void SetVirtualDeviceController(
+ VirtualDeviceController virtual_device_controller);
+
+ void UnsetVirtualDeviceController();
+
+ base::WeakPtr<CameraAppDeviceImpl> GetWeakCameraAppDevice(
+ const std::string& device_id);
+
+ void RemoveCameraAppDevice(const std::string& device_id);
// cros::mojom::CameraAppDeviceBridge implementations.
void GetCameraAppDevice(const std::string& device_id,
@@ -45,21 +63,38 @@ class CAPTURE_EXPORT CameraAppDeviceBridgeImpl
void IsSupported(IsSupportedCallback callback) override;
+ void SetMultipleStreamsEnabled(
+ const std::string& device_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) override;
+
private:
- CameraAppDeviceImpl* CreateCameraAppDevice(const std::string& device_id);
+ friend struct base::DefaultSingletonTraits<CameraAppDeviceBridgeImpl>;
+
+ CameraAppDeviceImpl* GetOrCreateCameraAppDevice(const std::string& device_id);
bool is_supported_;
- CameraInfoGetter camera_info_getter_;
+ base::Lock camera_info_getter_lock_;
+ CameraInfoGetter camera_info_getter_ GUARDED_BY(camera_info_getter_lock_);
+
+ base::Lock virtual_device_controller_lock_;
+ VirtualDeviceController virtual_device_controller_
+ GUARDED_BY(virtual_device_controller_lock_);
mojo::ReceiverSet<cros::mojom::CameraAppDeviceBridge> receivers_;
+ base::Lock device_map_lock_;
base::flat_map<std::string, std::unique_ptr<media::CameraAppDeviceImpl>>
- camera_app_devices_;
+ camera_app_devices_ GUARDED_BY(device_map_lock_);
+
+ base::Lock task_runner_map_lock_;
+ base::flat_map<std::string, scoped_refptr<base::SingleThreadTaskRunner>>
+ ipc_task_runners_ GUARDED_BY(task_runner_map_lock_);
DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceBridgeImpl);
};
} // namespace media
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_BRIDGE_IMPL_H_ \ No newline at end of file
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_BRIDGE_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
index d81afe5b3e5..3aba77a914c 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
@@ -4,6 +4,8 @@
#include "media/capture/video/chromeos/camera_app_device_impl.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
namespace media {
@@ -61,20 +63,36 @@ CameraAppDeviceImpl::CameraAppDeviceImpl(const std::string& device_id,
cros::mojom::CameraInfoPtr camera_info)
: device_id_(device_id),
camera_info_(std::move(camera_info)),
- task_runner_(base::ThreadTaskRunnerHandle::Get()),
capture_intent_(cros::mojom::CaptureIntent::DEFAULT),
next_metadata_observer_id_(0),
- next_camera_event_observer_id_(0),
- weak_ptr_factory_(
- std::make_unique<base::WeakPtrFactory<CameraAppDeviceImpl>>(this)) {}
+ next_camera_event_observer_id_(0) {}
CameraAppDeviceImpl::~CameraAppDeviceImpl() {
- task_runner_->DeleteSoon(FROM_HERE, std::move(weak_ptr_factory_));
+ // If the instance is bound, then this instance should only be destroyed when
+ // the mojo connection is dropped, which also happens on the mojo thread.
+ DCHECK(!mojo_task_runner_ || mojo_task_runner_->BelongsToCurrentThread());
+
+ // All the weak pointers of |weak_ptr_factory_| should be invalidated on
+ // camera device IPC thread before destroying CameraAppDeviceImpl.
+ DCHECK(!weak_ptr_factory_.HasWeakPtrs());
}
void CameraAppDeviceImpl::BindReceiver(
mojo::PendingReceiver<cros::mojom::CameraAppDevice> receiver) {
receivers_.Add(this, std::move(receiver));
+ receivers_.set_disconnect_handler(
+ base::BindRepeating(&CameraAppDeviceImpl::OnMojoConnectionError,
+ weak_ptr_factory_for_mojo_.GetWeakPtr()));
+ mojo_task_runner_ = base::ThreadTaskRunnerHandle::Get();
+}
+
+base::WeakPtr<CameraAppDeviceImpl> CameraAppDeviceImpl::GetWeakPtr() {
+ return weak_ptr_factory_.GetWeakPtr();
+}
+
+void CameraAppDeviceImpl::InvalidatePtrs(base::OnceClosure callback) {
+ weak_ptr_factory_.InvalidateWeakPtrs();
+ std::move(callback).Run();
}
void CameraAppDeviceImpl::ConsumeReprocessOptions(
@@ -130,26 +148,30 @@ void CameraAppDeviceImpl::OnResultMetadataAvailable(
}
void CameraAppDeviceImpl::OnShutterDone() {
- base::AutoLock lock(camera_event_observers_lock_);
-
- for (auto& observer : camera_event_observers_) {
- observer.second->OnShutterDone();
- }
+ mojo_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraAppDeviceImpl::NotifyShutterDoneOnMojoThread,
+ weak_ptr_factory_for_mojo_.GetWeakPtr()));
}
void CameraAppDeviceImpl::GetCameraInfo(GetCameraInfoCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
DCHECK(camera_info_);
+
std::move(callback).Run(camera_info_.Clone());
}
void CameraAppDeviceImpl::SetReprocessOption(
cros::mojom::Effect effect,
SetReprocessOptionCallback reprocess_result_callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
ReprocessTask task;
task.effect = effect;
- task.callback = base::BindOnce(&CameraAppDeviceImpl::SetReprocessResult,
- weak_ptr_factory_->GetWeakPtr(),
- std::move(reprocess_result_callback));
+ task.callback = media::BindToCurrentLoop(
+ base::BindOnce(&CameraAppDeviceImpl::SetReprocessResultOnMojoThread,
+ weak_ptr_factory_for_mojo_.GetWeakPtr(),
+ std::move(reprocess_result_callback)));
if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
auto e = BuildMetadataEntry(
@@ -165,6 +187,8 @@ void CameraAppDeviceImpl::SetReprocessOption(
void CameraAppDeviceImpl::SetFpsRange(const gfx::Range& fps_range,
SetFpsRangeCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
const int entry_length = 2;
auto& static_metadata = camera_info_->static_camera_characteristics;
@@ -198,6 +222,8 @@ void CameraAppDeviceImpl::SetFpsRange(const gfx::Range& fps_range,
void CameraAppDeviceImpl::SetStillCaptureResolution(
const gfx::Size& resolution,
SetStillCaptureResolutionCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(still_capture_resolution_lock_);
still_capture_resolution_ = resolution;
std::move(callback).Run();
@@ -206,6 +232,8 @@ void CameraAppDeviceImpl::SetStillCaptureResolution(
void CameraAppDeviceImpl::SetCaptureIntent(
cros::mojom::CaptureIntent capture_intent,
SetCaptureIntentCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(capture_intent_lock_);
capture_intent_ = capture_intent;
std::move(callback).Run();
@@ -215,6 +243,8 @@ void CameraAppDeviceImpl::AddResultMetadataObserver(
mojo::PendingRemote<cros::mojom::ResultMetadataObserver> observer,
cros::mojom::StreamType stream_type,
AddResultMetadataObserverCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(metadata_observers_lock_);
uint32_t id = next_metadata_observer_id_++;
@@ -228,6 +258,8 @@ void CameraAppDeviceImpl::AddResultMetadataObserver(
void CameraAppDeviceImpl::RemoveResultMetadataObserver(
uint32_t id,
RemoveResultMetadataObserverCallback callback) {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
base::AutoLock lock(metadata_observers_lock_);
if (metadata_observers_.erase(id) == 0) {
@@ -245,7 +277,7 @@ void CameraAppDeviceImpl::RemoveResultMetadataObserver(
void CameraAppDeviceImpl::AddCameraEventObserver(
mojo::PendingRemote<cros::mojom::CameraEventObserver> observer,
AddCameraEventObserverCallback callback) {
- base::AutoLock lock(camera_event_observers_lock_);
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
uint32_t id = next_camera_event_observer_id_++;
camera_event_observers_[id] =
@@ -256,7 +288,7 @@ void CameraAppDeviceImpl::AddCameraEventObserver(
void CameraAppDeviceImpl::RemoveCameraEventObserver(
uint32_t id,
RemoveCameraEventObserverCallback callback) {
- base::AutoLock lock(camera_event_observers_lock_);
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
bool is_success = camera_event_observers_.erase(id) == 1;
std::move(callback).Run(is_success);
@@ -274,17 +306,26 @@ void CameraAppDeviceImpl::DisableEeNr(ReprocessTask* task) {
task->extra_metadata.push_back(std::move(nr_entry));
}
-void CameraAppDeviceImpl::SetReprocessResult(
+void CameraAppDeviceImpl::OnMojoConnectionError() {
+ CameraAppDeviceBridgeImpl::GetInstance()->OnDeviceMojoDisconnected(
+ device_id_);
+}
+
+void CameraAppDeviceImpl::SetReprocessResultOnMojoThread(
SetReprocessOptionCallback callback,
const int32_t status,
media::mojom::BlobPtr blob) {
- auto callback_on_mojo_thread = base::BindOnce(
- [](const int32_t status, media::mojom::BlobPtr blob,
- SetReprocessOptionCallback callback) {
- std::move(callback).Run(status, std::move(blob));
- },
- status, std::move(blob), std::move(callback));
- task_runner_->PostTask(FROM_HERE, std::move(callback_on_mojo_thread));
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
+ std::move(callback).Run(status, std::move(blob));
+}
+
+void CameraAppDeviceImpl::NotifyShutterDoneOnMojoThread() {
+ DCHECK(mojo_task_runner_->BelongsToCurrentThread());
+
+ for (auto& observer : camera_event_observers_) {
+ observer.second->OnShutterDone();
+ }
}
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
index a0853f0ac2e..4bdd9991899 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
@@ -66,6 +66,12 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
void BindReceiver(
mojo::PendingReceiver<cros::mojom::CameraAppDevice> receiver);
+ // All the weak pointers should be dereferenced and invalidated on the camera
+ // device ipc thread.
+ base::WeakPtr<CameraAppDeviceImpl> GetWeakPtr();
+
+ void InvalidatePtrs(base::OnceClosure callback);
+
// Consumes all the pending reprocess tasks if there is any and eventually
// generates a ReprocessTaskQueue which contains:
// 1. A regular capture task with |take_photo_callback|.
@@ -121,9 +127,13 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
private:
static void DisableEeNr(ReprocessTask* task);
- void SetReprocessResult(SetReprocessOptionCallback callback,
- const int32_t status,
- media::mojom::BlobPtr blob);
+ void OnMojoConnectionError();
+
+ void SetReprocessResultOnMojoThread(SetReprocessOptionCallback callback,
+ const int32_t status,
+ media::mojom::BlobPtr blob);
+
+ void NotifyShutterDoneOnMojoThread();
std::string device_id_;
@@ -131,7 +141,8 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
cros::mojom::CameraInfoPtr camera_info_;
- const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ // It is used for calls which should run on the mojo thread.
+ scoped_refptr<base::SingleThreadTaskRunner> mojo_task_runner_;
// The queue will be enqueued and dequeued from different threads.
base::Lock reprocess_tasks_lock_;
@@ -159,14 +170,17 @@ class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
base::flat_map<cros::mojom::StreamType, base::flat_set<uint32_t>>
stream_metadata_observer_ids_ GUARDED_BY(metadata_observers_lock_);
- // Those maps will be changed and used from different threads.
- base::Lock camera_event_observers_lock_;
- uint32_t next_camera_event_observer_id_
- GUARDED_BY(camera_event_observers_lock_);
+ uint32_t next_camera_event_observer_id_;
base::flat_map<uint32_t, mojo::Remote<cros::mojom::CameraEventObserver>>
- camera_event_observers_ GUARDED_BY(camera_event_observers_lock_);
+ camera_event_observers_;
+
+ // The weak pointers should be dereferenced and invalidated on camera device
+ // ipc thread.
+ base::WeakPtrFactory<CameraAppDeviceImpl> weak_ptr_factory_{this};
- std::unique_ptr<base::WeakPtrFactory<CameraAppDeviceImpl>> weak_ptr_factory_;
+ // The weak pointers should be dereferenced and invalidated on the Mojo
+ // thread.
+ base::WeakPtrFactory<CameraAppDeviceImpl> weak_ptr_factory_for_mojo_{this};
DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceImpl);
};
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
index 0cca22186c4..53defd23bee 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
@@ -54,4 +54,27 @@ void CameraAppDeviceProviderImpl::IsSupported(IsSupportedCallback callback) {
bridge_->IsSupported(std::move(callback));
}
-} // namespace media \ No newline at end of file
+void CameraAppDeviceProviderImpl::SetMultipleStreamsEnabled(
+ const std::string& source_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) {
+ mapping_callback_.Run(
+ source_id,
+ media::BindToCurrentLoop(base::BindOnce(
+ &CameraAppDeviceProviderImpl::SetMultipleStreamsEnabledWithDeviceId,
+ weak_ptr_factory_.GetWeakPtr(), enabled, std::move(callback))));
+}
+
+void CameraAppDeviceProviderImpl::SetMultipleStreamsEnabledWithDeviceId(
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback,
+ const base::Optional<std::string>& device_id) {
+ if (!device_id.has_value()) {
+ std::move(callback).Run(false);
+ return;
+ }
+
+ bridge_->SetMultipleStreamsEnabled(*device_id, enabled, std::move(callback));
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
index 615d6bdbaac..6bf036ef614 100644
--- a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
@@ -32,12 +32,21 @@ class CAPTURE_EXPORT CameraAppDeviceProviderImpl
void GetCameraAppDevice(const std::string& source_id,
GetCameraAppDeviceCallback callback) override;
void IsSupported(IsSupportedCallback callback) override;
+ void SetMultipleStreamsEnabled(
+ const std::string& device_id,
+ bool enabled,
+ SetMultipleStreamsEnabledCallback callback) override;
private:
void GetCameraAppDeviceWithDeviceId(
GetCameraAppDeviceCallback callback,
const base::Optional<std::string>& device_id);
+ void SetMultipleStreamsEnabledWithDeviceId(
+ bool enable,
+ SetMultipleStreamsEnabledCallback callback,
+ const base::Optional<std::string>& device_id);
+
mojo::Remote<cros::mojom::CameraAppDeviceBridge> bridge_;
DeviceIdMappingCallback mapping_callback_;
@@ -51,4 +60,4 @@ class CAPTURE_EXPORT CameraAppDeviceProviderImpl
} // namespace media
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_PROVIDER_IMPL_H_ \ No newline at end of file
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_PROVIDER_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/camera_device_context.cc b/chromium/media/capture/video/chromeos/camera_device_context.cc
index 2de1625b056..6f5f6599821 100644
--- a/chromium/media/capture/video/chromeos/camera_device_context.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_context.cc
@@ -153,4 +153,9 @@ bool CameraDeviceContext::ReserveVideoCaptureBufferFromPool(
return result == VideoCaptureDevice::Client::ReserveResult::kSucceeded;
}
+bool CameraDeviceContext::HasClient() {
+ base::AutoLock lock(client_lock_);
+ return !clients_.empty();
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index ab213906718..36164f2f071 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -11,6 +11,7 @@
#include <utility>
#include <vector>
+#include "ash/constants/ash_features.h"
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/no_destructor.h"
@@ -22,6 +23,7 @@
#include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/blob_utils.h"
#include "media/capture/video/chromeos/camera_3a_controller.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
@@ -210,6 +212,8 @@ StreamType StreamIdToStreamType(uint64_t stream_id) {
return StreamType::kYUVInput;
case 3:
return StreamType::kYUVOutput;
+ case 4:
+ return StreamType::kRecordingOutput;
default:
return StreamType::kUnknown;
}
@@ -225,6 +229,8 @@ std::string StreamTypeToString(StreamType stream_type) {
return std::string("StreamType::kYUVInput");
case StreamType::kYUVOutput:
return std::string("StreamType::kYUVOutput");
+ case StreamType::kRecordingOutput:
+ return std::string("StreamType::kRecordingOutput");
default:
return std::string("Unknown StreamType value: ") +
base::NumberToString(static_cast<int32_t>(stream_type));
@@ -270,19 +276,15 @@ ResultMetadata::~ResultMetadata() = default;
CameraDeviceDelegate::CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type)
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
: device_descriptor_(device_descriptor),
camera_hal_delegate_(std::move(camera_hal_delegate)),
- ipc_task_runner_(std::move(ipc_task_runner)),
- camera_app_device_(camera_app_device),
- client_type_(client_type) {}
+ ipc_task_runner_(std::move(ipc_task_runner)) {}
CameraDeviceDelegate::~CameraDeviceDelegate() = default;
void CameraDeviceDelegate::AllocateAndStart(
- const VideoCaptureParams& params,
+ const base::flat_map<ClientType, VideoCaptureParams>& params,
CameraDeviceContext* device_context) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -291,7 +293,10 @@ void CameraDeviceDelegate::AllocateAndStart(
is_set_awb_mode_ = false;
is_set_brightness_ = false;
is_set_contrast_ = false;
+ is_set_exposure_compensation_ = false;
is_set_exposure_time_ = false;
+ is_set_focus_distance_ = false;
+ is_set_iso_ = false;
is_set_pan_ = false;
is_set_saturation_ = false;
is_set_sharpness_ = false;
@@ -426,6 +431,13 @@ void CameraDeviceDelegate::SetPhotoOptions(
// Set the vendor tag into with given |name| and |value|. Returns true if
// the vendor tag is set and false otherwise.
+ auto to_uint8_vector = [](int32_t value) {
+ std::vector<uint8_t> temp(sizeof(int32_t));
+ auto* temp_ptr = reinterpret_cast<int32_t*>(temp.data());
+ *temp_ptr = value;
+ return temp;
+ };
+
auto set_vendor_int = [&](const std::string& name, bool has_field,
double value, bool is_set) {
const VendorTagInfo* info =
@@ -436,11 +448,8 @@ void CameraDeviceDelegate::SetPhotoOptions(
}
return false;
}
- std::vector<uint8_t> temp(sizeof(int32_t));
- auto* temp_ptr = reinterpret_cast<int32_t*>(temp.data());
- *temp_ptr = value;
request_manager_->SetRepeatingCaptureMetadata(info->tag, info->type, 1,
- std::move(temp));
+ to_uint8_vector(value));
return true;
};
is_set_brightness_ = set_vendor_int(kBrightness, settings->has_brightness,
@@ -524,6 +533,48 @@ void CameraDeviceDelegate::SetPhotoOptions(
is_set_exposure_time_ = false;
}
+ if (settings->has_focus_mode &&
+ settings->focus_mode == mojom::MeteringMode::MANUAL &&
+ settings->has_focus_distance) {
+ // The unit of settings is meter but it is diopter of android metadata.
+ float focus_distance_diopters_ = 1.0 / settings->focus_distance;
+ camera_3a_controller_->SetFocusDistance(false, focus_distance_diopters_);
+ is_set_focus_distance_ = true;
+ } else if (is_set_focus_distance_) {
+ camera_3a_controller_->SetFocusDistance(true, 0);
+ is_set_focus_distance_ = false;
+ }
+
+ if (settings->has_iso) {
+ request_manager_->SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_SENSITIVITY,
+ cros::mojom::EntryType::TYPE_INT32, 1, to_uint8_vector(settings->iso));
+ is_set_iso_ = true;
+ if (!is_set_exposure_time_) {
+ LOG(WARNING) << "set iso doesn't work due to auto exposure time";
+ }
+ } else if (is_set_iso_) {
+ request_manager_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_SENSITIVITY);
+ is_set_iso_ = false;
+ }
+
+ if (settings->has_exposure_compensation) {
+ int metadata_exposure_compensation =
+ std::round(settings->exposure_compensation / ae_compensation_step_);
+ request_manager_->SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+ cros::mojom::EntryType::TYPE_INT32, 1,
+ to_uint8_vector(metadata_exposure_compensation));
+ is_set_exposure_compensation_ = true;
+ } else if (is_set_exposure_compensation_) {
+ request_manager_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
+ is_set_exposure_compensation_ = false;
+ }
+
// If there is callback of SetPhotoOptions(), the streams might being
// reconfigured and we should notify them once the reconfiguration is done.
auto on_reconfigured_callback = base::BindOnce(
@@ -543,6 +594,18 @@ void CameraDeviceDelegate::SetPhotoOptions(
result_metadata_frame_number_for_photo_state_ = current_request_frame_number_;
}
+void CameraDeviceDelegate::ReconfigureStreams(
+ const base::flat_map<ClientType, VideoCaptureParams>& params) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+ chrome_capture_params_ = params;
+ if (request_manager_) {
+ // ReconfigureStreams is used for video recording. It does not require
+ // photo.
+ request_manager_->StopPreview(base::BindOnce(
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), false, base::nullopt));
+ }
+}
+
void CameraDeviceDelegate::SetRotation(int rotation) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(rotation >= 0 && rotation < 360 && rotation % 90 == 0);
@@ -570,11 +633,11 @@ bool CameraDeviceDelegate::MaybeReconfigureForPhotoStream(
gfx::Size new_blob_resolution(static_cast<int32_t>(settings->width),
static_cast<int32_t>(settings->height));
request_manager_->StopPreview(
- base::BindOnce(&CameraDeviceDelegate::OnFlushed, GetWeakPtr(),
+ base::BindOnce(&CameraDeviceDelegate::OnFlushed, GetWeakPtr(), true,
std::move(new_blob_resolution)));
} else {
request_manager_->StopPreview(base::BindOnce(
- &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), base::nullopt));
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), true, base::nullopt));
}
return true;
}
@@ -595,7 +658,7 @@ void CameraDeviceDelegate::TakePhotoImpl() {
// Trigger the reconfigure process if it not yet triggered.
if (on_reconfigured_callbacks_.empty()) {
request_manager_->StopPreview(base::BindOnce(
- &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), base::nullopt));
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), true, base::nullopt));
}
auto on_reconfigured_callback = base::BindOnce(
[](base::WeakPtr<Camera3AController> controller,
@@ -631,6 +694,7 @@ void CameraDeviceDelegate::OnMojoConnectionError() {
}
void CameraDeviceDelegate::OnFlushed(
+ bool require_photo,
base::Optional<gfx::Size> new_blob_resolution,
int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -642,7 +706,7 @@ void CameraDeviceDelegate::OnFlushed(
return;
}
device_context_->SetState(CameraDeviceContext::State::kInitialized);
- ConfigureStreams(true, std::move(new_blob_resolution));
+ ConfigureStreams(require_photo, std::move(new_blob_resolution));
}
void CameraDeviceDelegate::OnClosed(int32_t result) {
@@ -710,13 +774,15 @@ void CameraDeviceDelegate::Initialize() {
DCHECK_EQ(device_context_->GetState(), CameraDeviceContext::State::kStarting);
mojo::PendingRemote<cros::mojom::Camera3CallbackOps> callback_ops;
+ // Assumes the buffer_type will be the same for all |chrome_capture_params|.
request_manager_ = std::make_unique<RequestManager>(
+ device_descriptor_.device_id,
callback_ops.InitWithNewPipeAndPassReceiver(),
std::make_unique<StreamCaptureInterfaceImpl>(GetWeakPtr()),
- device_context_, chrome_capture_params_.buffer_type,
+ device_context_,
+ chrome_capture_params_[ClientType::kPreviewClient].buffer_type,
std::make_unique<CameraBufferFactory>(),
- base::BindRepeating(&RotateAndBlobify), ipc_task_runner_,
- camera_app_device_, client_type_);
+ base::BindRepeating(&RotateAndBlobify), ipc_task_runner_);
camera_3a_controller_ = std::make_unique<Camera3AController>(
static_metadata_, request_manager_.get(), ipc_task_runner_);
device_ops_->Initialize(
@@ -744,10 +810,13 @@ void CameraDeviceDelegate::OnInitialized(int32_t result) {
}
device_context_->SetState(CameraDeviceContext::State::kInitialized);
bool require_photo = [&] {
- if (camera_app_device_ == nullptr) {
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+ if (!camera_app_device) {
return false;
}
- auto capture_intent = camera_app_device_->GetCaptureIntent();
+ auto capture_intent = camera_app_device->GetCaptureIntent();
switch (capture_intent) {
case cros::mojom::CaptureIntent::DEFAULT:
return false;
@@ -770,27 +839,33 @@ void CameraDeviceDelegate::ConfigureStreams(
DCHECK_EQ(device_context_->GetState(),
CameraDeviceContext::State::kInitialized);
- // Set up context for preview stream.
- cros::mojom::Camera3StreamPtr preview_stream =
- cros::mojom::Camera3Stream::New();
- preview_stream->id = static_cast<uint64_t>(StreamType::kPreviewOutput);
- preview_stream->stream_type =
- cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
- preview_stream->width =
- chrome_capture_params_.requested_format.frame_size.width();
- preview_stream->height =
- chrome_capture_params_.requested_format.frame_size.height();
- preview_stream->format =
- cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
- preview_stream->usage = cros::mojom::GRALLOC_USAGE_HW_COMPOSER |
- cros::mojom::GRALLOC_USAGE_HW_VIDEO_ENCODER;
- preview_stream->data_space = 0;
- preview_stream->rotation =
- cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
-
cros::mojom::Camera3StreamConfigurationPtr stream_config =
cros::mojom::Camera3StreamConfiguration::New();
- stream_config->streams.push_back(std::move(preview_stream));
+ for (const auto& param : chrome_capture_params_) {
+ // Set up context for preview stream and record stream.
+ cros::mojom::Camera3StreamPtr stream = cros::mojom::Camera3Stream::New();
+ StreamType stream_type = (param.first == ClientType::kPreviewClient)
+ ? StreamType::kPreviewOutput
+ : StreamType::kRecordingOutput;
+ // TODO(henryhsu): PreviewClient should remove HW_VIDEO_ENCODER usage when
+ // multiple streams enabled.
+ auto usage = (param.first == ClientType::kPreviewClient)
+ ? (cros::mojom::GRALLOC_USAGE_HW_COMPOSER |
+ cros::mojom::GRALLOC_USAGE_HW_VIDEO_ENCODER)
+ : cros::mojom::GRALLOC_USAGE_HW_VIDEO_ENCODER;
+ stream->id = static_cast<uint64_t>(stream_type);
+ stream->stream_type = cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT;
+ stream->width = param.second.requested_format.frame_size.width();
+ stream->height = param.second.requested_format.frame_size.height();
+ stream->format =
+ cros::mojom::HalPixelFormat::HAL_PIXEL_FORMAT_YCbCr_420_888;
+ stream->usage = usage;
+ stream->data_space = 0;
+ stream->rotation =
+ cros::mojom::Camera3StreamRotation::CAMERA3_STREAM_ROTATION_0;
+
+ stream_config->streams.push_back(std::move(stream));
+ }
// Set up context for still capture stream. We set still capture stream to the
// JPEG stream configuration with maximum supported resolution.
@@ -898,22 +973,12 @@ void CameraDeviceDelegate::OnConfiguredStreams(
return;
}
- bool zero_shutter_lag_enabled = false;
- for (const auto& stream : updated_config->streams) {
- if (stream->usage & cros::mojom::GRALLOC_USAGE_ZERO_SHUTTER_LAG_ENABLED) {
- zero_shutter_lag_enabled = true;
- break;
- }
- }
- camera_3a_controller_->UpdateZeroShutterLagAvailability(
- zero_shutter_lag_enabled);
-
current_blob_resolution_.SetSize(blob_resolution.width(),
blob_resolution.height());
- request_manager_->SetUpStreamsAndBuffers(
- chrome_capture_params_.requested_format, static_metadata_,
- std::move(updated_config->streams));
+ request_manager_->SetUpStreamsAndBuffers(chrome_capture_params_,
+ static_metadata_,
+ std::move(updated_config->streams));
device_context_->SetState(CameraDeviceContext::State::kStreamConfigured);
// Kick off the preview stream.
@@ -1001,9 +1066,12 @@ void CameraDeviceDelegate::ConstructDefaultRequestSettings(
if (stream_type == StreamType::kPreviewOutput) {
// CCA uses the same stream for preview and video recording. Choose proper
// template here so the underlying camera HAL can set 3A tuning accordingly.
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
auto request_template =
- camera_app_device_ && camera_app_device_->GetCaptureIntent() ==
- cros::mojom::CaptureIntent::VIDEO_RECORD
+ camera_app_device && camera_app_device->GetCaptureIntent() ==
+ cros::mojom::CaptureIntent::VIDEO_RECORD
? cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_VIDEO_RECORD
: cros::mojom::Camera3RequestTemplate::CAMERA3_TEMPLATE_PREVIEW;
device_ops_->ConstructDefaultRequestSettings(
@@ -1040,51 +1108,29 @@ void CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings(
return;
}
- if (camera_app_device_) {
- OnGotFpsRange(std::move(settings), camera_app_device_->GetFpsRange());
- } else {
- OnGotFpsRange(std::move(settings), {});
- }
-}
-
-void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
- cros::mojom::CameraMetadataPtr settings) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- while (!take_photo_callbacks_.empty()) {
- auto take_photo_callback = base::BindOnce(
- &TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
- base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
- camera_3a_controller_->GetWeakPtr()));
- if (camera_app_device_) {
- camera_app_device_->ConsumeReprocessOptions(
- std::move(take_photo_callback),
- media::BindToCurrentLoop(base::BindOnce(
- &RequestManager::TakePhoto, request_manager_->GetWeakPtr(),
- settings.Clone())));
- } else {
- request_manager_->TakePhoto(
- settings.Clone(), CameraAppDeviceImpl::GetSingleShotReprocessOptions(
- std::move(take_photo_callback)));
- }
- take_photo_callbacks_.pop();
- }
-}
-
-void CameraDeviceDelegate::OnGotFpsRange(
- cros::mojom::CameraMetadataPtr settings,
- base::Optional<gfx::Range> specified_fps_range) {
device_context_->SetState(CameraDeviceContext::State::kCapturing);
camera_3a_controller_->SetAutoFocusModeForStillCapture();
- if (specified_fps_range.has_value()) {
+
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+ auto specified_fps_range =
+ camera_app_device ? camera_app_device->GetFpsRange() : base::nullopt;
+ if (specified_fps_range) {
SetFpsRangeInMetadata(&settings, specified_fps_range->GetMin(),
specified_fps_range->GetMax());
} else {
+ // Assumes the frame_rate will be the same for all |chrome_capture_params|.
int32_t requested_frame_rate =
- std::round(chrome_capture_params_.requested_format.frame_rate);
+ std::round(chrome_capture_params_[ClientType::kPreviewClient]
+ .requested_format.frame_rate);
bool prefer_constant_frame_rate =
- camera_app_device_ && camera_app_device_->GetCaptureIntent() ==
- cros::mojom::CaptureIntent::VIDEO_RECORD;
+ base::FeatureList::IsEnabled(
+ chromeos::features::kPreferConstantFrameRate) ||
+ (camera_app_device && camera_app_device->GetCaptureIntent() ==
+ cros::mojom::CaptureIntent::VIDEO_RECORD);
int32_t target_min, target_max;
std::tie(target_min, target_max) = GetTargetFrameRateRange(
static_metadata_, requested_frame_rate, prefer_constant_frame_rate);
@@ -1110,8 +1156,38 @@ void CameraDeviceDelegate::OnGotFpsRange(
}
}
+void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
+ cros::mojom::CameraMetadataPtr settings) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+
+ while (!take_photo_callbacks_.empty()) {
+ auto take_photo_callback = base::BindOnce(
+ &TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ camera_3a_controller_->GetWeakPtr()));
+ if (camera_app_device) {
+ camera_app_device->ConsumeReprocessOptions(
+ std::move(take_photo_callback),
+ media::BindToCurrentLoop(base::BindOnce(
+ &RequestManager::TakePhoto, request_manager_->GetWeakPtr(),
+ settings.Clone())));
+ } else {
+ request_manager_->TakePhoto(
+ settings.Clone(), CameraAppDeviceImpl::GetSingleShotReprocessOptions(
+ std::move(take_photo_callback)));
+ }
+ take_photo_callbacks_.pop();
+ }
+}
+
gfx::Size CameraDeviceDelegate::GetBlobResolution(
base::Optional<gfx::Size> new_blob_resolution) {
+ DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
std::vector<gfx::Size> blob_resolutions;
GetStreamResolutions(
static_metadata_, cros::mojom::Camera3StreamType::CAMERA3_STREAM_OUTPUT,
@@ -1128,9 +1204,12 @@ gfx::Size CameraDeviceDelegate::GetBlobResolution(
return *new_blob_resolution;
}
- if (camera_app_device_) {
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_descriptor_.device_id);
+ if (camera_app_device) {
auto specified_capture_resolution =
- camera_app_device_->GetStillCaptureResolution();
+ camera_app_device->GetStillCaptureResolution();
if (!specified_capture_resolution.IsEmpty() &&
base::Contains(blob_resolutions, specified_capture_resolution)) {
return specified_capture_resolution;
@@ -1299,6 +1378,33 @@ void CameraDeviceDelegate::OnResultMetadataAvailable(
if (awb_mode.size() == 1)
result_metadata_.awb_mode = awb_mode[0];
+ result_metadata_.af_mode.reset();
+ auto af_mode = GetMetadataEntryAsSpan<uint8_t>(
+ result_metadata, cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_MODE);
+ if (af_mode.size() == 1)
+ result_metadata_.af_mode = af_mode[0];
+
+ result_metadata_.focus_distance.reset();
+ auto focus_distance = GetMetadataEntryAsSpan<float>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_FOCUS_DISTANCE);
+ if (focus_distance.size() == 1)
+ result_metadata_.focus_distance = focus_distance[0];
+
+ result_metadata_.sensitivity.reset();
+ auto sensitivity = GetMetadataEntryAsSpan<int32_t>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_SENSITIVITY);
+ if (sensitivity.size() == 1)
+ result_metadata_.sensitivity = sensitivity[0];
+
+ result_metadata_.ae_compensation.reset();
+ auto ae_compensation = GetMetadataEntryAsSpan<int32_t>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION);
+ if (ae_compensation.size() == 1)
+ result_metadata_.ae_compensation = ae_compensation[0];
+
result_metadata_frame_number_ = frame_number;
// We need to wait the new result metadata for new settings.
if (result_metadata_frame_number_ >
@@ -1470,6 +1576,96 @@ void CameraDeviceDelegate::DoGetPhotoState(
result_metadata_.exposure_time.value() / (100 * kMicroToNano);
}
+ auto af_available_modes = GetMetadataEntryAsSpan<uint8_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AF_AVAILABLE_MODES);
+ bool support_manual_focus_distance = false;
+ if (af_available_modes.size() > 1 && result_metadata_.af_mode) {
+ support_manual_focus_distance = base::Contains(
+ af_available_modes,
+ static_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF));
+ }
+
+ auto minimum_focus_distance = GetMetadataEntryAsSpan<float>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+ // If the lens is fixed-focus, minimum_focus_distance will be 0.
+ if (support_manual_focus_distance && minimum_focus_distance.size() == 1 &&
+ minimum_focus_distance[0] != 0 && result_metadata_.focus_distance) {
+ photo_state->supported_focus_modes.push_back(mojom::MeteringMode::MANUAL);
+ photo_state->supported_focus_modes.push_back(
+ mojom::MeteringMode::CONTINUOUS);
+ if (result_metadata_.af_mode ==
+ static_cast<uint8_t>(
+ cros::mojom::AndroidControlAfMode::ANDROID_CONTROL_AF_MODE_OFF))
+ photo_state->current_focus_mode = mojom::MeteringMode::MANUAL;
+ else
+ photo_state->current_focus_mode = mojom::MeteringMode::CONTINUOUS;
+
+ // The unit of photo_state->focus_distance is meter and from metadata is
+ // diopter.
+ photo_state->focus_distance->min =
+ std::roundf(100.0 / minimum_focus_distance[0]) / 100.0;
+ photo_state->focus_distance->max = std::numeric_limits<double>::infinity();
+ photo_state->focus_distance->step = 0.01;
+ if (result_metadata_.focus_distance.value() == 0) {
+ photo_state->focus_distance->current =
+ std::numeric_limits<double>::infinity();
+ } else {
+ // We want to make sure |current| is a possible value of
+ // |min| + |steps(0.01)|*X. The minimum can be divided by step(0.01). So
+ // we only need to round the value less than 0.01.
+ double meters = 1.0 / result_metadata_.focus_distance.value();
+ photo_state->focus_distance->current = std::roundf(meters * 100) / 100.0;
+ }
+ }
+
+ auto sensitivity_range = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
+ if (sensitivity_range.size() == 2 && result_metadata_.sensitivity) {
+ photo_state->iso->min = sensitivity_range[0];
+ photo_state->iso->max = sensitivity_range[1];
+ photo_state->iso->step = 1;
+ photo_state->iso->current = result_metadata_.sensitivity.value();
+ }
+
+ auto ae_compensation_range = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_COMPENSATION_RANGE);
+ ae_compensation_step_ = 0.0;
+ if (ae_compensation_range.size() == 2) {
+ if (ae_compensation_range[0] != 0 || ae_compensation_range[1] != 0) {
+ auto ae_compensation_step = GetMetadataEntryAsSpan<Rational>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_COMPENSATION_STEP);
+ if (ae_compensation_step.size() == 1) {
+ if (ae_compensation_step[0].numerator == 0 ||
+ ae_compensation_step[0].denominator == 0) {
+ LOG(WARNING) << "AE_COMPENSATION_STEP: numerator:"
+ << ae_compensation_step[0].numerator << ", denominator:"
+ << ae_compensation_step[0].denominator;
+ } else {
+ ae_compensation_step_ =
+ static_cast<float>(ae_compensation_step[0].numerator) /
+ static_cast<float>(ae_compensation_step[0].denominator);
+ photo_state->exposure_compensation->min =
+ ae_compensation_range[0] * ae_compensation_step_;
+ photo_state->exposure_compensation->max =
+ ae_compensation_range[1] * ae_compensation_step_;
+ photo_state->exposure_compensation->step = ae_compensation_step_;
+ if (result_metadata_.ae_compensation)
+ photo_state->exposure_compensation->current =
+ result_metadata_.ae_compensation.value() *
+ ae_compensation_step_;
+ else
+ photo_state->exposure_compensation->current = 0;
+ }
+ }
+ }
+ }
+
std::move(callback).Run(std::move(photo_state));
}
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.h b/chromium/media/capture/video/chromeos/camera_device_delegate.h
index 01a37cfd67e..3f821f96b54 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.h
@@ -8,6 +8,7 @@
#include <memory>
#include <queue>
+#include "base/containers/flat_map.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
@@ -34,7 +35,19 @@ enum class StreamType : uint64_t {
kJpegOutput = 1,
kYUVInput = 2,
kYUVOutput = 3,
- kUnknown,
+ kRecordingOutput = 4,
+ kUnknown = 5,
+};
+
+// A map to know that each StreamType belongs to which ClientType.
+// The index is StreamType value.
+constexpr std::array<ClientType, static_cast<int>(StreamType::kUnknown)>
+ kStreamClientTypeMap = {
+ ClientType::kPreviewClient, // kPreviewOutput
+ ClientType::kPreviewClient, // kJpegOutput
+ ClientType::kPreviewClient, // kYUVInput
+ ClientType::kPreviewClient, // kYUVOutput
+ ClientType::kVideoClient, // kRecordingOutput
};
// The metadata might be large so clone a whole metadata might be relatively
@@ -44,12 +57,16 @@ struct ResultMetadata {
~ResultMetadata();
base::Optional<uint8_t> ae_mode;
+ base::Optional<int32_t> ae_compensation;
+ base::Optional<uint8_t> af_mode;
base::Optional<uint8_t> awb_mode;
base::Optional<int32_t> brightness;
base::Optional<int32_t> contrast;
base::Optional<int64_t> exposure_time;
+ base::Optional<float> focus_distance;
base::Optional<int32_t> pan;
base::Optional<int32_t> saturation;
+ base::Optional<int32_t> sensitivity;
base::Optional<int32_t> sharpness;
base::Optional<int32_t> tilt;
base::Optional<int32_t> zoom;
@@ -92,27 +109,34 @@ class CAPTURE_EXPORT StreamCaptureInterface {
// AllocateAndStart of VideoCaptureDeviceArcChromeOS runs on. All the methods
// in CameraDeviceDelegate run on |ipc_task_runner_| and hence all the
// access to member variables is sequenced.
+//
+// CameraDeviceDelegate supports multiple clients.
+// It will use the first client for preview stream and photo stream and use
+// second client for recording stream.
+// The second client will be a virtual camera device which is only used in CCA.
class CAPTURE_EXPORT CameraDeviceDelegate final
: public CaptureMetadataDispatcher::ResultMetadataObserver {
public:
CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type);
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
~CameraDeviceDelegate() final;
// Delegation methods for the VideoCaptureDevice interface.
- void AllocateAndStart(const VideoCaptureParams& params,
- CameraDeviceContext* device_context);
+ void AllocateAndStart(
+ const base::flat_map<ClientType, VideoCaptureParams>& params,
+ CameraDeviceContext* device_context);
void StopAndDeAllocate(base::OnceClosure device_close_callback);
void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback);
void GetPhotoState(VideoCaptureDevice::GetPhotoStateCallback callback);
void SetPhotoOptions(mojom::PhotoSettingsPtr settings,
VideoCaptureDevice::SetPhotoOptionsCallback callback);
+ void ReconfigureStreams(
+ const base::flat_map<ClientType, VideoCaptureParams>& params);
+
// Sets the frame rotation angle in |rotation_|. |rotation_| is clockwise
// rotation in degrees, and is passed to |client_| along with the captured
// frames.
@@ -134,8 +158,10 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
// Mojo connection error handler.
void OnMojoConnectionError();
- // Reconfigure streams for picture taking.
- void OnFlushed(base::Optional<gfx::Size> new_blob_resolution, int32_t result);
+ // Reconfigure streams for picture taking and recording.
+ void OnFlushed(bool require_photo,
+ base::Optional<gfx::Size> new_blob_resolution,
+ int32_t result);
// Callback method for the Close Mojo IPC call. This method resets the Mojo
// connection and closes the camera device.
@@ -188,9 +214,6 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
void OnConstructedDefaultStillCaptureRequestSettings(
cros::mojom::CameraMetadataPtr settings);
- void OnGotFpsRange(cros::mojom::CameraMetadataPtr settings,
- base::Optional<gfx::Range> specified_fps_range);
-
gfx::Size GetBlobResolution(base::Optional<gfx::Size> new_blob_resolution);
// StreamCaptureInterface implementations. These methods are called by
@@ -222,7 +245,8 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
const scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
- VideoCaptureParams chrome_capture_params_;
+ // Map client type to video capture parameter.
+ base::flat_map<ClientType, VideoCaptureParams> chrome_capture_params_;
CameraDeviceContext* device_context_;
@@ -246,13 +270,16 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
std::queue<base::OnceClosure> on_reconfigured_callbacks_;
- CameraAppDeviceImpl* camera_app_device_; // Weak.
+ base::WeakPtr<CameraAppDeviceImpl> camera_app_device_;
// States of SetPhotoOptions
bool is_set_awb_mode_;
bool is_set_brightness_;
bool is_set_contrast_;
+ bool is_set_exposure_compensation_;
bool is_set_exposure_time_;
+ bool is_set_focus_distance_;
+ bool is_set_iso_;
bool is_set_pan_;
bool is_set_saturation_;
bool is_set_sharpness_;
@@ -261,6 +288,8 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
std::vector<base::OnceClosure> get_photo_state_queue_;
bool use_digital_zoom_;
+ float ae_compensation_step_;
+
// We reply GetPhotoState when |result_metadata_frame_number_| >
// |result_metadata_frame_number_for_photo_state_|. Otherwise javascript API
// getSettings() will get non-updated settings.
@@ -272,8 +301,6 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
ResultMetadata result_metadata_;
gfx::Rect active_array_size_;
- ClientType client_type_;
-
base::WeakPtrFactory<CameraDeviceDelegate> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(CameraDeviceDelegate);
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index 01ed4b806da..1c1bb4ad006 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -103,6 +103,15 @@ class MockCameraDevice : public cros::mojom::Camera3DeviceOps {
void Close(CloseCallback callback) override { DoClose(callback); }
MOCK_METHOD1(DoClose, void(CloseCallback& callback));
+ void ConfigureStreamsAndGetAllocatedBuffers(
+ cros::mojom::Camera3StreamConfigurationPtr config,
+ ConfigureStreamsAndGetAllocatedBuffersCallback callback) override {
+ DoConfigureStreamsAndGetAllocatedBuffers(config, callback);
+ }
+ MOCK_METHOD2(DoConfigureStreamsAndGetAllocatedBuffers,
+ void(cros::mojom::Camera3StreamConfigurationPtr& config,
+ ConfigureStreamsAndGetAllocatedBuffersCallback& callback));
+
private:
DISALLOW_COPY_AND_ASSIGN(MockCameraDevice);
};
@@ -111,11 +120,13 @@ constexpr int32_t kJpegMaxBufferSize = 1024;
constexpr size_t kDefaultWidth = 1280, kDefaultHeight = 720;
constexpr int32_t kDefaultMinFrameRate = 1, kDefaultMaxFrameRate = 30;
-VideoCaptureParams GetDefaultCaptureParams() {
+base::flat_map<ClientType, VideoCaptureParams> GetDefaultCaptureParams() {
VideoCaptureParams params;
+ base::flat_map<ClientType, VideoCaptureParams> capture_params;
params.requested_format = {gfx::Size(kDefaultWidth, kDefaultHeight),
float{kDefaultMaxFrameRate}, PIXEL_FORMAT_I420};
- return params;
+ capture_params[ClientType::kPreviewClient] = params;
+ return capture_params;
}
} // namespace
@@ -162,7 +173,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
devices_info[0].descriptor, camera_hal_delegate_,
- device_delegate_thread_.task_runner(), nullptr, client_type_);
+ device_delegate_thread_.task_runner());
}
void GetNumberOfFakeCameras(
@@ -354,7 +365,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
.Times(1)
.WillOnce(
Invoke(this, &CameraDeviceDelegateTest::GetNumberOfFakeCameras));
- EXPECT_CALL(mock_camera_module_, DoSetCallbacks(_, _)).Times(1);
+ EXPECT_CALL(mock_camera_module_, DoSetCallbacksAssociated(_, _)).Times(1);
EXPECT_CALL(mock_camera_module_, DoGetVendorTagOps(_, _))
.Times(1)
.WillOnce(Invoke(this, &CameraDeviceDelegateTest::GetFakeVendorTagOps));
@@ -381,12 +392,11 @@ class CameraDeviceDelegateTest : public ::testing::Test {
.Times(1)
.WillOnce(
Invoke(this, &CameraDeviceDelegateTest::ConfigureFakeStreams));
- EXPECT_CALL(
- mock_gpu_memory_buffer_manager_,
- CreateGpuMemoryBuffer(
- _, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
- gpu::kNullSurfaceHandle))
+ EXPECT_CALL(mock_gpu_memory_buffer_manager_,
+ CreateGpuMemoryBuffer(
+ _, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
@@ -398,13 +408,12 @@ class CameraDeviceDelegateTest : public ::testing::Test {
.Times(AtMost(1))
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
- EXPECT_CALL(
- mock_gpu_memory_buffer_manager_,
- CreateGpuMemoryBuffer(
- gfx::Size(kDefaultWidth, kDefaultHeight),
- gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
- gpu::kNullSurfaceHandle))
+ EXPECT_CALL(mock_gpu_memory_buffer_manager_,
+ CreateGpuMemoryBuffer(
+ gfx::Size(kDefaultWidth, kDefaultHeight),
+ gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
CreateFakeGpuMemoryBuffer));
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index a0bee569637..3ce1c4c0583 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -19,11 +19,14 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_piece.h"
#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
#include "base/system/system_monitor.h"
-#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
+#include "base/unguessable_token.h"
+#include "components/device_event_log/device_event_log.h"
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
+#include "media/capture/video/chromeos/video_capture_device_chromeos_delegate.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
namespace media {
@@ -31,6 +34,7 @@ namespace media {
namespace {
constexpr int32_t kDefaultFps = 30;
+constexpr char kVirtualPrefix[] = "VIRTUAL_";
constexpr base::TimeDelta kEventWaitTimeoutSecs =
base::TimeDelta::FromSeconds(1);
@@ -38,8 +42,11 @@ constexpr base::TimeDelta kEventWaitTimeoutSecs =
class LocalCameraClientObserver : public CameraClientObserver {
public:
explicit LocalCameraClientObserver(
- scoped_refptr<CameraHalDelegate> camera_hal_delegate)
- : camera_hal_delegate_(std::move(camera_hal_delegate)) {}
+ scoped_refptr<CameraHalDelegate> camera_hal_delegate,
+ cros::mojom::CameraClientType type,
+ base::UnguessableToken auth_token)
+ : CameraClientObserver(type, std::move(auth_token)),
+ camera_hal_delegate_(std::move(camera_hal_delegate)) {}
void OnChannelCreated(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
@@ -117,7 +124,8 @@ base::flat_set<int32_t> GetAvailableFramerates(
CameraHalDelegate::CameraHalDelegate(
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
- : camera_module_has_been_set_(
+ : authenticated_(false),
+ camera_module_has_been_set_(
base::WaitableEvent::ResetPolicy::MANUAL,
base::WaitableEvent::InitialState::NOT_SIGNALED),
builtin_camera_info_updated_(
@@ -138,9 +146,27 @@ CameraHalDelegate::CameraHalDelegate(
CameraHalDelegate::~CameraHalDelegate() = default;
-void CameraHalDelegate::RegisterCameraClient() {
- CameraHalDispatcherImpl::GetInstance()->AddClientObserver(
- std::make_unique<LocalCameraClientObserver>(this));
+bool CameraHalDelegate::RegisterCameraClient() {
+ auto* dispatcher = CameraHalDispatcherImpl::GetInstance();
+ auto type = cros::mojom::CameraClientType::CHROME;
+ dispatcher->AddClientObserver(
+ std::make_unique<LocalCameraClientObserver>(
+ this, type, dispatcher->GetTokenForTrustedClient(type)),
+ base::BindOnce(&CameraHalDelegate::OnRegisteredCameraHalClient,
+ base::Unretained(this)));
+ camera_hal_client_registered_.Wait();
+ return authenticated_;
+}
+
+void CameraHalDelegate::OnRegisteredCameraHalClient(int32_t result) {
+ if (result != 0) {
+ LOG(ERROR) << "Failed to register camera HAL client";
+ camera_hal_client_registered_.Signal();
+ return;
+ }
+ CAMERA_LOG(EVENT) << "Registered camera HAL client";
+ authenticated_ = true;
+ camera_hal_client_registered_.Signal();
}
void CameraHalDelegate::SetCameraModule(
@@ -158,8 +184,7 @@ void CameraHalDelegate::Reset() {
std::unique_ptr<VideoCaptureDevice> CameraHalDelegate::CreateDevice(
scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
+ const VideoCaptureDeviceDescriptor& device_descriptor) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!UpdateBuiltInCameraInfo()) {
return nullptr;
@@ -170,35 +195,17 @@ std::unique_ptr<VideoCaptureDevice> CameraHalDelegate::CreateDevice(
return nullptr;
}
- if (camera_app_device_bridge) {
- auto* camera_app_device = camera_app_device_bridge->GetCameraAppDevice(
- device_descriptor.device_id);
- // Since the cleanup callback will be triggered when VideoCaptureDevice died
- // and |camera_app_device_bridge| is actually owned by
- // VideoCaptureServiceImpl, it should be safe to assume
- // |camera_app_device_bridge| is still valid here.
- auto cleanup_callback = base::BindOnce(
- [](const std::string& device_id, CameraAppDeviceBridgeImpl* bridge) {
- bridge->OnDeviceClosed(device_id);
- },
- device_descriptor.device_id, camera_app_device_bridge);
- return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(
- std::move(task_runner_for_screen_observer), device_descriptor, this,
- camera_app_device, std::move(cleanup_callback));
- } else {
- return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(
- std::move(task_runner_for_screen_observer), device_descriptor, this,
- nullptr, base::DoNothing());
- }
+ auto* delegate =
+ GetVCDDelegate(task_runner_for_screen_observer, device_descriptor);
+ return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(delegate,
+ device_descriptor);
}
void CameraHalDelegate::GetSupportedFormats(
- int camera_id,
+ const cros::mojom::CameraInfoPtr& camera_info,
VideoCaptureFormats* supported_formats) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- const cros::mojom::CameraInfoPtr& camera_info = camera_info_[camera_id];
-
base::flat_set<int32_t> candidate_fps_set =
GetAvailableFramerates(camera_info);
@@ -244,7 +251,7 @@ void CameraHalDelegate::GetSupportedFormats(
// There's no consumer information here to determine the buffer usage, so
// hard-code the usage that all the clients should be using.
constexpr gfx::BufferUsage kClientBufferUsage =
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
const ChromiumPixelFormat cr_format =
camera_buffer_factory_->ResolveStreamBufferFormat(hal_format,
kClientBufferUsage);
@@ -257,8 +264,9 @@ void CameraHalDelegate::GetSupportedFormats(
continue;
}
- VLOG(1) << "Supported format: " << width << "x" << height
- << " fps=" << fps << " format=" << cr_format.video_format;
+ CAMERA_LOG(EVENT) << "Supported format: " << width << "x" << height
+ << " fps=" << fps
+ << " format=" << cr_format.video_format;
supported_formats->emplace_back(gfx::Size(width, height), fps,
cr_format.video_format);
}
@@ -287,6 +295,7 @@ void CameraHalDelegate::GetDevicesInfo(
{
base::AutoLock info_lock(camera_info_lock_);
base::AutoLock id_map_lock(device_id_to_camera_id_lock_);
+ base::AutoLock virtual_lock(enable_virtual_device_lock_);
for (const auto& it : camera_info_) {
int camera_id = it.first;
const cros::mojom::CameraInfoPtr& camera_info = it.second;
@@ -334,6 +343,12 @@ void CameraHalDelegate::GetDevicesInfo(
// Mojo validates the input parameters for us so we don't need to
// worry about malformed values.
}
+ case cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_BACK:
+ case cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_FRONT:
+ case cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_EXTERNAL:
+ // |camera_info_| should not have these facing types.
+ LOG(ERROR) << "Invalid facing type: " << camera_info->facing;
+ break;
}
auto* vid = get_vendor_string("com.google.usb.vendorId");
auto* pid = get_vendor_string("com.google.usb.productId");
@@ -343,10 +358,22 @@ void CameraHalDelegate::GetDevicesInfo(
desc.set_control_support(GetControlSupport(camera_info));
device_id_to_camera_id_[desc.device_id] = camera_id;
devices_info.emplace_back(desc);
- GetSupportedFormats(camera_id, &devices_info.back().supported_formats);
+ GetSupportedFormats(camera_info_[camera_id],
+ &devices_info.back().supported_formats);
+
+ // Create a virtual device when multiple streams are enabled.
+ if (enable_virtual_device_[camera_id]) {
+ desc.facing = VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
+ desc.device_id =
+ std::string(kVirtualPrefix) + base::NumberToString(camera_id);
+ desc.set_display_name("Virtual Camera");
+ device_id_to_camera_id_[desc.device_id] = camera_id;
+ devices_info.emplace_back(desc);
+ GetSupportedFormats(camera_info_[camera_id],
+ &devices_info.back().supported_formats);
+ }
}
}
-
// TODO(shik): Report external camera first when lid is closed.
// TODO(jcliang): Remove this after JS API supports query camera facing
// (http://crbug.com/543997).
@@ -404,7 +431,36 @@ cros::mojom::CameraInfoPtr CameraHalDelegate::GetCameraInfoFromDeviceId(
if (it == camera_info_.end()) {
return {};
}
- return it->second.Clone();
+ auto info = it->second.Clone();
+ if (base::StartsWith(device_id, std::string(kVirtualPrefix))) {
+ switch (it->second->facing) {
+ case cros::mojom::CameraFacing::CAMERA_FACING_BACK:
+ info->facing = cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_BACK;
+ break;
+ case cros::mojom::CameraFacing::CAMERA_FACING_FRONT:
+ info->facing = cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_FRONT;
+ break;
+ case cros::mojom::CameraFacing::CAMERA_FACING_EXTERNAL:
+ info->facing =
+ cros::mojom::CameraFacing::CAMERA_FACING_VIRTUAL_EXTERNAL;
+ break;
+ default:
+ break;
+ }
+ }
+ return info;
+}
+
+void CameraHalDelegate::EnableVirtualDevice(const std::string& device_id,
+ bool enable) {
+ if (base::StartsWith(device_id, std::string(kVirtualPrefix))) {
+ return;
+ }
+ auto camera_id = GetCameraIdFromDeviceId(device_id);
+ if (camera_id != -1) {
+ base::AutoLock lock(enable_virtual_device_lock_);
+ enable_virtual_device_[camera_id] = enable;
+ }
}
const VendorTagInfo* CameraHalDelegate::GetVendorTagInfoByName(
@@ -436,6 +492,27 @@ int CameraHalDelegate::GetCameraIdFromDeviceId(const std::string& device_id) {
return it->second;
}
+VideoCaptureDeviceChromeOSDelegate* CameraHalDelegate::GetVCDDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
+ const VideoCaptureDeviceDescriptor& device_descriptor) {
+ auto camera_id = GetCameraIdFromDeviceId(device_descriptor.device_id);
+ auto it = vcd_delegate_map_.find(camera_id);
+ if (it == vcd_delegate_map_.end() || it->second->HasDeviceClient() == 0) {
+ auto cleanup_callback = base::BindOnce(
+ [](int camera_id,
+ base::flat_map<int,
+ std::unique_ptr<VideoCaptureDeviceChromeOSDelegate>>*
+ vcd_delegate_map) { vcd_delegate_map->erase(camera_id); },
+ camera_id, &vcd_delegate_map_);
+ auto delegate = std::make_unique<VideoCaptureDeviceChromeOSDelegate>(
+ std::move(task_runner_for_screen_observer), device_descriptor, this,
+ std::move(cleanup_callback));
+ vcd_delegate_map_[camera_id] = std::move(delegate);
+ return vcd_delegate_map_[camera_id].get();
+ }
+ return it->second.get();
+}
+
void CameraHalDelegate::SetCameraModuleOnIpcThread(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -462,6 +539,7 @@ void CameraHalDelegate::ResetMojoInterfaceOnIpcThread() {
external_camera_info_updated_.Signal();
// Clear all cached camera info, especially external cameras.
+ base::AutoLock lock(camera_info_lock_);
camera_info_.clear();
pending_external_camera_info_.clear();
}
@@ -495,18 +573,20 @@ void CameraHalDelegate::UpdateBuiltInCameraInfoOnIpcThread() {
void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(camera_info_lock_);
if (num_cameras < 0) {
builtin_camera_info_updated_.Signal();
LOG(ERROR) << "Failed to get number of cameras: " << num_cameras;
return;
}
- VLOG(1) << "Number of built-in cameras: " << num_cameras;
+ CAMERA_LOG(EVENT) << "Number of built-in cameras: " << num_cameras;
num_builtin_cameras_ = num_cameras;
// Per camera HAL v3 specification SetCallbacks() should be called after the
// first time GetNumberOfCameras() is called, and before other CameraModule
// functions are called.
- camera_module_->SetCallbacks(
- camera_module_callbacks_.BindNewPipeAndPassRemote(),
+ camera_module_->SetCallbacksAssociated(
+ camera_module_callbacks_.BindNewEndpointAndPassRemote(),
base::BindOnce(&CameraHalDelegate::OnSetCallbacksOnIpcThread, this));
camera_module_->GetVendorTagOps(
@@ -516,6 +596,8 @@ void CameraHalDelegate::OnGotNumberOfCamerasOnIpcThread(int32_t num_cameras) {
void CameraHalDelegate::OnSetCallbacksOnIpcThread(int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
+
+ base::AutoLock lock(camera_info_lock_);
if (result) {
num_builtin_cameras_ = 0;
builtin_camera_info_updated_.Signal();
@@ -567,6 +649,7 @@ void CameraHalDelegate::OnGotCameraInfoOnIpcThread(
// |camera_info_| might contain some entries for external cameras as well,
// we should check all built-in cameras explicitly.
bool all_updated = [&]() {
+ camera_info_lock_.AssertAcquired();
for (size_t i = 0; i < num_builtin_cameras_; i++) {
if (camera_info_.find(i) == camera_info_.end()) {
return false;
@@ -606,7 +689,8 @@ void CameraHalDelegate::CameraDeviceStatusChange(
int32_t camera_id,
cros::mojom::CameraDeviceStatus new_status) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- VLOG(1) << "camera_id = " << camera_id << ", new_status = " << new_status;
+ CAMERA_LOG(EVENT) << "camera_id = " << camera_id
+ << ", new_status = " << new_status;
base::AutoLock lock(camera_info_lock_);
auto it = camera_info_.find(camera_id);
switch (new_status) {
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.h b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
index aa0dacf2006..2aba005ad11 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
@@ -9,6 +9,7 @@
#include <string>
#include <unordered_map>
+#include "base/containers/flat_map.h"
#include "base/macros.h"
#include "base/sequence_checker.h"
#include "base/single_thread_task_runner.h"
@@ -20,15 +21,16 @@
#include "media/capture/video/chromeos/vendor_tag_ops_delegate.h"
#include "media/capture/video/video_capture_device_factory.h"
#include "media/capture/video_capture_types.h"
+#include "mojo/public/cpp/bindings/associated_receiver.h"
+#include "mojo/public/cpp/bindings/pending_associated_receiver.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
-#include "mojo/public/cpp/bindings/receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
namespace media {
-class CameraAppDeviceBridgeImpl;
class CameraBufferFactory;
+class VideoCaptureDeviceChromeOSDelegate;
// CameraHalDelegate is the component which does Mojo IPCs to the camera HAL
// process on Chrome OS to access the module-level camera functionalities such
@@ -47,7 +49,8 @@ class CAPTURE_EXPORT CameraHalDelegate final
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
// Registers the camera client observer to the CameraHalDispatcher instance.
- void RegisterCameraClient();
+ // Returns true if successful, false if failed (e.g., authentication failure).
+ bool RegisterCameraClient();
void SetCameraModule(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module);
@@ -62,8 +65,7 @@ class CAPTURE_EXPORT CameraHalDelegate final
std::unique_ptr<VideoCaptureDevice> CreateDevice(
scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- CameraAppDeviceBridgeImpl* app_device_bridge);
+ const VideoCaptureDeviceDescriptor& device_descriptor);
void GetDevicesInfo(
VideoCaptureDeviceFactory::GetDevicesInfoCallback callback);
@@ -89,14 +91,23 @@ class CAPTURE_EXPORT CameraHalDelegate final
const VendorTagInfo* GetVendorTagInfoByName(const std::string& full_name);
+ void EnableVirtualDevice(const std::string& device_id, bool enable);
+
private:
friend class base::RefCountedThreadSafe<CameraHalDelegate>;
~CameraHalDelegate() final;
- void GetSupportedFormats(int camera_id,
+ void OnRegisteredCameraHalClient(int32_t result);
+
+ void GetSupportedFormats(const cros::mojom::CameraInfoPtr& camera_info,
VideoCaptureFormats* supported_formats);
+ VideoCaptureDeviceChromeOSDelegate* GetVCDDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner>
+ task_runner_for_screen_observer,
+ const VideoCaptureDeviceDescriptor& device_descriptor);
+
void SetCameraModuleOnIpcThread(
mojo::PendingRemote<cros::mojom::CameraModule> camera_module);
@@ -144,6 +155,9 @@ class CAPTURE_EXPORT CameraHalDelegate final
void TorchModeStatusChange(int32_t camera_id,
cros::mojom::TorchModeStatus new_status) final;
+ base::WaitableEvent camera_hal_client_registered_;
+ bool authenticated_;
+
base::WaitableEvent camera_module_has_been_set_;
// Signaled when |num_builtin_cameras_| and |camera_info_| are updated.
@@ -167,15 +181,21 @@ class CAPTURE_EXPORT CameraHalDelegate final
// conditions. For external cameras, the |camera_info_| would be read nad
// updated in CameraDeviceStatusChange, which is also protected by
// |camera_info_lock_|.
- size_t num_builtin_cameras_;
base::Lock camera_info_lock_;
- std::unordered_map<int, cros::mojom::CameraInfoPtr> camera_info_;
+ size_t num_builtin_cameras_ GUARDED_BY(camera_info_lock_);
+ std::unordered_map<int, cros::mojom::CameraInfoPtr> camera_info_
+ GUARDED_BY(camera_info_lock_);
// A map from |VideoCaptureDeviceDescriptor.device_id| to camera id, which is
// updated in GetDeviceDescriptors() and queried in
// GetCameraIdFromDeviceId().
base::Lock device_id_to_camera_id_lock_;
- std::map<std::string, int> device_id_to_camera_id_;
+ std::map<std::string, int> device_id_to_camera_id_
+ GUARDED_BY(device_id_to_camera_id_lock_);
+ // A virtual device is enabled/disabled for camera id.
+ base::Lock enable_virtual_device_lock_;
+ base::flat_map<int, bool> enable_virtual_device_
+ GUARDED_BY(enable_virtual_device_lock_);
SEQUENCE_CHECKER(sequence_checker_);
@@ -190,12 +210,17 @@ class CAPTURE_EXPORT CameraHalDelegate final
// The Mojo receiver serving the camera module callbacks. Bound to
// |ipc_task_runner_|.
- mojo::Receiver<cros::mojom::CameraModuleCallbacks> camera_module_callbacks_;
+ mojo::AssociatedReceiver<cros::mojom::CameraModuleCallbacks>
+ camera_module_callbacks_;
// An internal delegate to handle VendorTagOps mojo connection and query
// information of vendor tags. Bound to |ipc_task_runner_|.
VendorTagOpsDelegate vendor_tag_ops_delegate_;
+ // A map from camera id to corresponding delegate instance.
+ base::flat_map<int, std::unique_ptr<VideoCaptureDeviceChromeOSDelegate>>
+ vcd_delegate_map_;
+
DISALLOW_COPY_AND_ASSIGN(CameraHalDelegate);
};
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index 89208e5610f..3e1bf9b0d6e 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -167,8 +167,9 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
};
auto set_callbacks_cb =
- [&](mojo::PendingRemote<cros::mojom::CameraModuleCallbacks>& callbacks,
- cros::mojom::CameraModule::SetCallbacksCallback&) {
+ [&](mojo::PendingAssociatedRemote<cros::mojom::CameraModuleCallbacks>&
+ callbacks,
+ cros::mojom::CameraModule::SetCallbacksAssociatedCallback&) {
mock_camera_module_.NotifyCameraDeviceChange(
2, cros::mojom::CameraDeviceStatus::CAMERA_DEVICE_STATUS_PRESENT);
};
@@ -176,10 +177,12 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
EXPECT_CALL(mock_camera_module_, DoGetNumberOfCameras(_))
.Times(1)
.WillOnce(Invoke(get_number_of_cameras_cb));
- EXPECT_CALL(mock_camera_module_,
- DoSetCallbacks(
- A<mojo::PendingRemote<cros::mojom::CameraModuleCallbacks>&>(),
- A<cros::mojom::CameraModule::SetCallbacksCallback&>()))
+ EXPECT_CALL(
+ mock_camera_module_,
+ DoSetCallbacksAssociated(
+ A<mojo::PendingAssociatedRemote<
+ cros::mojom::CameraModuleCallbacks>&>(),
+ A<cros::mojom::CameraModule::SetCallbacksAssociatedCallback&>()))
.Times(1)
.WillOnce(Invoke(set_callbacks_cb));
EXPECT_CALL(mock_camera_module_,
@@ -228,7 +231,7 @@ TEST_F(CameraHalDelegateTest, GetBuiltinCameraInfo) {
EXPECT_CALL(mock_gpu_memory_buffer_manager_,
CreateGpuMemoryBuffer(
_, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
gpu::kNullSurfaceHandle))
.Times(1)
.WillOnce(Invoke(&unittest_internal::MockGpuMemoryBufferManager::
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
deleted file mode 100644
index bf1475aaef8..00000000000
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
+++ /dev/null
@@ -1,146 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_HAL_DISPATCHER_IMPL_H_
-#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_HAL_DISPATCHER_IMPL_H_
-
-#include <memory>
-#include <set>
-
-#include "base/containers/unique_ptr_adapters.h"
-#include "base/files/scoped_file.h"
-#include "base/memory/singleton.h"
-#include "base/threading/thread.h"
-#include "components/chromeos_camera/common/jpeg_encode_accelerator.mojom.h"
-#include "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom.h"
-#include "media/capture/capture_export.h"
-#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
-#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
-#include "media/capture/video/video_capture_device_factory.h"
-#include "mojo/public/cpp/bindings/pending_receiver.h"
-#include "mojo/public/cpp/bindings/pending_remote.h"
-#include "mojo/public/cpp/bindings/receiver_set.h"
-#include "mojo/public/cpp/bindings/remote.h"
-#include "mojo/public/cpp/platform/platform_channel_server_endpoint.h"
-
-namespace base {
-
-class SingleThreadTaskRunner;
-class WaitableEvent;
-
-} // namespace base
-
-namespace media {
-
-using MojoJpegEncodeAcceleratorFactoryCB = base::RepeatingCallback<void(
- mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>)>;
-
-class CAPTURE_EXPORT CameraClientObserver {
- public:
- virtual ~CameraClientObserver();
- virtual void OnChannelCreated(
- mojo::PendingRemote<cros::mojom::CameraModule> camera_module) = 0;
-};
-
-// The CameraHalDispatcherImpl hosts and waits on the unix domain socket
-// /var/run/camera3.sock. CameraHalServer and CameraHalClients connect to the
-// unix domain socket to create the initial Mojo connections with the
-// CameraHalDisptcherImpl, and CameraHalDispatcherImpl then creates and
-// dispaches the Mojo channels between CameraHalServer and CameraHalClients to
-// establish direct Mojo connections between the CameraHalServer and the
-// CameraHalClients.
-//
-// For general documentation about the CameraHalDispater Mojo interface see the
-// comments in mojo/cros_camera_service.mojom.
-class CAPTURE_EXPORT CameraHalDispatcherImpl final
- : public cros::mojom::CameraHalDispatcher,
- public base::trace_event::TraceLog::EnabledStateObserver {
- public:
- static CameraHalDispatcherImpl* GetInstance();
-
- bool Start(MojoMjpegDecodeAcceleratorFactoryCB jda_factory,
- MojoJpegEncodeAcceleratorFactoryCB jea_factory);
-
- void AddClientObserver(std::unique_ptr<CameraClientObserver> observer);
-
- bool IsStarted();
-
- // CameraHalDispatcher implementations.
- void RegisterServer(
- mojo::PendingRemote<cros::mojom::CameraHalServer> server) final;
- void RegisterClient(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client) final;
- void GetJpegDecodeAccelerator(
- mojo::PendingReceiver<chromeos_camera::mojom::MjpegDecodeAccelerator>
- jda_receiver) final;
- void GetJpegEncodeAccelerator(
- mojo::PendingReceiver<chromeos_camera::mojom::JpegEncodeAccelerator>
- jea_receiver) final;
-
- // base::trace_event::TraceLog::EnabledStateObserver implementation.
- void OnTraceLogEnabled() final;
- void OnTraceLogDisabled() final;
-
- private:
- friend struct base::DefaultSingletonTraits<CameraHalDispatcherImpl>;
- // Allow the test to construct the class directly.
- friend class CameraHalDispatcherImplTest;
-
- CameraHalDispatcherImpl();
- ~CameraHalDispatcherImpl() final;
-
- bool StartThreads();
-
- // Creates the unix domain socket for the camera client processes and the
- // camera HALv3 adapter process to connect.
- void CreateSocket(base::WaitableEvent* started);
-
- // Waits for incoming connections (from HAL process or from client processes).
- // Runs on |blocking_io_thread_|.
- void StartServiceLoop(base::ScopedFD socket_fd, base::WaitableEvent* started);
-
- void RegisterClientOnProxyThread(
- mojo::PendingRemote<cros::mojom::CameraHalClient> client);
- void AddClientObserverOnProxyThread(
- std::unique_ptr<CameraClientObserver> observer);
-
- void EstablishMojoChannel(CameraClientObserver* client_observer);
-
- // Handler for incoming Mojo connection on the unix domain socket.
- void OnPeerConnected(mojo::ScopedMessagePipeHandle message_pipe);
-
- // Mojo connection error handlers.
- void OnCameraHalServerConnectionError();
- void OnCameraHalClientConnectionError(CameraClientObserver* client);
-
- void StopOnProxyThread();
-
- void OnTraceLogEnabledOnProxyThread();
- void OnTraceLogDisabledOnProxyThread();
-
- base::ScopedFD proxy_fd_;
- base::ScopedFD cancel_pipe_;
-
- base::Thread proxy_thread_;
- base::Thread blocking_io_thread_;
- scoped_refptr<base::SingleThreadTaskRunner> proxy_task_runner_;
- scoped_refptr<base::SingleThreadTaskRunner> blocking_io_task_runner_;
-
- mojo::ReceiverSet<cros::mojom::CameraHalDispatcher> receiver_set_;
-
- mojo::Remote<cros::mojom::CameraHalServer> camera_hal_server_;
-
- std::set<std::unique_ptr<CameraClientObserver>, base::UniquePtrComparator>
- client_observers_;
-
- MojoMjpegDecodeAcceleratorFactoryCB jda_factory_;
-
- MojoJpegEncodeAcceleratorFactoryCB jea_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImpl);
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_HAL_DISPATCHER_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
deleted file mode 100644
index 72e98ce2417..00000000000
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
+++ /dev/null
@@ -1,227 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
-
-#include <memory>
-#include <utility>
-
-#include "base/bind.h"
-#include "base/run_loop.h"
-#include "base/single_thread_task_runner.h"
-#include "base/test/task_environment.h"
-#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
-#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
-#include "mojo/public/cpp/bindings/pending_receiver.h"
-#include "mojo/public/cpp/bindings/pending_remote.h"
-#include "mojo/public/cpp/bindings/receiver.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-using testing::_;
-using testing::InvokeWithoutArgs;
-
-namespace media {
-namespace {
-
-class MockCameraHalServer : public cros::mojom::CameraHalServer {
- public:
- MockCameraHalServer() = default;
-
- ~MockCameraHalServer() = default;
-
- void CreateChannel(mojo::PendingReceiver<cros::mojom::CameraModule>
- camera_module_receiver) override {
- DoCreateChannel(std::move(camera_module_receiver));
- }
- MOCK_METHOD1(DoCreateChannel,
- void(mojo::PendingReceiver<cros::mojom::CameraModule>
- camera_module_receiver));
-
- MOCK_METHOD1(SetTracingEnabled, void(bool enabled));
-
- mojo::PendingRemote<cros::mojom::CameraHalServer> GetPendingRemote() {
- return receiver_.BindNewPipeAndPassRemote();
- }
-
- private:
- mojo::Receiver<cros::mojom::CameraHalServer> receiver_{this};
- DISALLOW_COPY_AND_ASSIGN(MockCameraHalServer);
-};
-
-class MockCameraHalClient : public cros::mojom::CameraHalClient {
- public:
- MockCameraHalClient() = default;
-
- ~MockCameraHalClient() = default;
-
- void SetUpChannel(
- mojo::PendingRemote<cros::mojom::CameraModule> camera_module) override {
- DoSetUpChannel(std::move(camera_module));
- }
- MOCK_METHOD1(
- DoSetUpChannel,
- void(mojo::PendingRemote<cros::mojom::CameraModule> camera_module));
-
- mojo::PendingRemote<cros::mojom::CameraHalClient> GetPendingRemote() {
- return receiver_.BindNewPipeAndPassRemote();
- }
-
- private:
- mojo::Receiver<cros::mojom::CameraHalClient> receiver_{this};
- DISALLOW_COPY_AND_ASSIGN(MockCameraHalClient);
-};
-
-} // namespace
-
-class CameraHalDispatcherImplTest : public ::testing::Test {
- public:
- CameraHalDispatcherImplTest() = default;
-
- ~CameraHalDispatcherImplTest() override = default;
-
- void SetUp() override {
- dispatcher_ = new CameraHalDispatcherImpl();
- EXPECT_TRUE(dispatcher_->StartThreads());
- }
-
- void TearDown() override { delete dispatcher_; }
-
- scoped_refptr<base::SingleThreadTaskRunner> GetProxyTaskRunner() {
- return dispatcher_->proxy_task_runner_;
- }
-
- void DoLoop() {
- run_loop_.reset(new base::RunLoop());
- run_loop_->Run();
- }
-
- void QuitRunLoop() {
- if (run_loop_) {
- run_loop_->Quit();
- }
- }
-
- static void RegisterServer(
- CameraHalDispatcherImpl* dispatcher,
- mojo::PendingRemote<cros::mojom::CameraHalServer> server) {
- dispatcher->RegisterServer(std::move(server));
- }
-
- static void RegisterClient(
- CameraHalDispatcherImpl* dispatcher,
- mojo::PendingRemote<cros::mojom::CameraHalClient> client) {
- dispatcher->RegisterClient(std::move(client));
- }
-
- protected:
- // We can't use std::unique_ptr here because the constructor and destructor of
- // CameraHalDispatcherImpl are private.
- CameraHalDispatcherImpl* dispatcher_;
-
- private:
- base::test::TaskEnvironment task_environment_;
- std::unique_ptr<base::RunLoop> run_loop_;
- DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImplTest);
-};
-
-// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
-// for the client when the server crashes.
-TEST_F(CameraHalDispatcherImplTest, ServerConnectionError) {
- // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
- // between the server and the client.
- auto mock_server = std::make_unique<MockCameraHalServer>();
- auto mock_client = std::make_unique<MockCameraHalClient>();
-
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- auto server = mock_server->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterServer,
- base::Unretained(dispatcher_), std::move(server)));
- auto client = mock_client->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterClient,
- base::Unretained(dispatcher_), std::move(client)));
-
- // Wait until the client gets the established Mojo channel.
- DoLoop();
-
- // Re-create a new server to simulate a server crash.
- mock_server = std::make_unique<MockCameraHalServer>();
-
- // Make sure we creates a new Mojo channel from the new server to the same
- // client.
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- server = mock_server->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterServer,
- base::Unretained(dispatcher_), std::move(server)));
-
- // Wait until the clients gets the newly established Mojo channel.
- DoLoop();
-}
-
-// Test that the CameraHalDisptcherImpl correctly re-establishes a Mojo channel
-// for the client when the client reconnects after crash.
-TEST_F(CameraHalDispatcherImplTest, ClientConnectionError) {
- // First verify that a the CameraHalDispatcherImpl establishes a Mojo channel
- // between the server and the client.
- auto mock_server = std::make_unique<MockCameraHalServer>();
- auto mock_client = std::make_unique<MockCameraHalClient>();
-
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- auto server = mock_server->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterServer,
- base::Unretained(dispatcher_), std::move(server)));
- auto client = mock_client->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterClient,
- base::Unretained(dispatcher_), std::move(client)));
-
- // Wait until the client gets the established Mojo channel.
- DoLoop();
-
- // Re-create a new server to simulate a server crash.
- mock_client = std::make_unique<MockCameraHalClient>();
-
- // Make sure we re-create the Mojo channel from the same server to the new
- // client.
- EXPECT_CALL(*mock_server, DoCreateChannel(_)).Times(1);
- EXPECT_CALL(*mock_client, DoSetUpChannel(_))
- .Times(1)
- .WillOnce(
- InvokeWithoutArgs(this, &CameraHalDispatcherImplTest::QuitRunLoop));
-
- client = mock_client->GetPendingRemote();
- GetProxyTaskRunner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraHalDispatcherImplTest::RegisterClient,
- base::Unretained(dispatcher_), std::move(client)));
-
- // Wait until the clients gets the newly established Mojo channel.
- DoLoop();
-}
-
-} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
index c3394d55b45..8b309353e85 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
@@ -35,6 +35,10 @@ template <>
const cros::mojom::EntryType entry_type_of<double>::value =
cros::mojom::EntryType::TYPE_DOUBLE;
+template <>
+const cros::mojom::EntryType entry_type_of<Rational>::value =
+ cros::mojom::EntryType::TYPE_RATIONAL;
+
// TODO(shik): support TYPE_RATIONAL
cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
@@ -54,7 +58,7 @@ cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
}
auto* entry_ptr = &(camera_metadata->entries.value()[(*iter)->index]);
- if (!(*entry_ptr)->data.data()) {
+ if ((*entry_ptr)->data.empty()) {
// Metadata tag found with no valid data.
LOG(WARNING) << "Found tag " << static_cast<int>(tag)
<< " but with invalid data";
@@ -123,6 +127,7 @@ void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
}
for (const auto& entry : from->entries.value()) {
if (tags.find(entry->tag) != tags.end()) {
+ (*to)->entry_count -= 1;
LOG(ERROR) << "Found duplicated entries for tag " << entry->tag;
continue;
}
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.h b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
index ed935bd6ff6..9d209ac8514 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.h
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
@@ -12,6 +12,11 @@
namespace media {
+struct Rational {
+ int32_t numerator;
+ int32_t denominator;
+};
+
// Helper traits for converting native types to cros::mojom::EntryType.
template <typename T, typename Enable = void>
struct entry_type_of {
diff --git a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
index 4d8fc096b75..3a3f9a02a60 100644
--- a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
+++ b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
@@ -31,7 +31,8 @@ bool GpuMemoryBufferTracker::Init(const gfx::Size& dimensions,
const gfx::BufferUsage usage =
*gfx_format == gfx::BufferFormat::R_8
? gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE
- : gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ : gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+
buffer_ =
buffer_factory_.CreateGpuMemoryBuffer(dimensions, *gfx_format, usage);
if (!buffer_) {
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.cc b/chromium/media/capture/video/chromeos/mock_camera_module.cc
index c5079eaee5f..120cc6f1613 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.cc
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.cc
@@ -42,9 +42,7 @@ void MockCameraModule::GetCameraInfo(int32_t camera_id,
void MockCameraModule::SetCallbacks(
mojo::PendingRemote<cros::mojom::CameraModuleCallbacks> callbacks,
SetCallbacksCallback callback) {
- DoSetCallbacks(callbacks, callback);
- callbacks_.Bind(std::move(callbacks));
- std::move(callback).Run(0);
+ // Method deprecated and not expected to be called.
}
void MockCameraModule::Init(InitCallback callback) {
@@ -66,6 +64,13 @@ void MockCameraModule::GetVendorTagOps(
std::move(callback).Run();
}
+void MockCameraModule::SetCallbacksAssociated(
+ mojo::PendingAssociatedRemote<cros::mojom::CameraModuleCallbacks> callbacks,
+ SetCallbacksAssociatedCallback callback) {
+ DoSetCallbacksAssociated(callbacks, callback);
+ callbacks_.Bind(std::move(callbacks));
+ std::move(callback).Run(0);
+}
void MockCameraModule::NotifyCameraDeviceChange(
int camera_id,
cros::mojom::CameraDeviceStatus status) {
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.h b/chromium/media/capture/video/chromeos/mock_camera_module.h
index 8479c9ab899..12397b61455 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.h
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.h
@@ -11,6 +11,8 @@
#include "base/threading/thread.h"
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
+#include "mojo/public/cpp/bindings/associated_remote.h"
+#include "mojo/public/cpp/bindings/pending_associated_remote.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/receiver.h"
@@ -72,6 +74,14 @@ class MockCameraModule : public cros::mojom::CameraModule {
vendor_tag_ops_receiver,
GetVendorTagOpsCallback& callback));
+ void SetCallbacksAssociated(mojo::PendingAssociatedRemote<
+ cros::mojom::CameraModuleCallbacks> callbacks,
+ SetCallbacksAssociatedCallback callback) override;
+ MOCK_METHOD2(DoSetCallbacksAssociated,
+ void(mojo::PendingAssociatedRemote<
+ cros::mojom::CameraModuleCallbacks>& callbacks,
+ SetCallbacksAssociatedCallback& callback));
+
void NotifyCameraDeviceChange(int camera_id,
cros::mojom::CameraDeviceStatus status);
@@ -89,7 +99,7 @@ class MockCameraModule : public cros::mojom::CameraModule {
base::Thread mock_module_thread_;
mojo::Receiver<cros::mojom::CameraModule> receiver_{this};
- mojo::Remote<cros::mojom::CameraModuleCallbacks> callbacks_;
+ mojo::AssociatedRemote<cros::mojom::CameraModuleCallbacks> callbacks_;
DISALLOW_COPY_AND_ASSIGN(MockCameraModule);
};
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
index 9151ac99363..be1371a2e95 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
@@ -68,9 +68,8 @@ void MockVideoCaptureClient::OnIncomingCapturedGfxBuffer(
}
void MockVideoCaptureClient::OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
if (frame_cb_)
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.h b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
index ddb9380aece..3c59b8033fa 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.h
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
@@ -55,9 +55,8 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
base::TimeDelta timestamp,
int frame_feedback_id = 0) override;
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override;
// Trampoline methods to workaround GMOCK problems with std::unique_ptr<>.
diff --git a/chromium/media/capture/video/chromeos/mojom/camera3.mojom b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
index 5e7e6ded75d..dd99ed86dde 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera3.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Next min version: 3
+// Next min version: 4
module cros.mojom;
@@ -28,10 +28,6 @@ const uint32 GRALLOC_USAGE_FORCE_I420 = 0x10000000;
// into a new HAL request for Zero-Shutter Lag (ZSL). See crrev.com/c/1877636
// for the CL that does the aforementioned things.
const uint32 GRALLOC_USAGE_STILL_CAPTURE = 0x20000000;
-// Flag to indicate ZSL is enabled for this session. Returned in the updated
-// stream configuration returned from configure_streams(). Refer to
-// crrev.com/c/2055927 which returns this flag.
-const uint32 GRALLOC_USAGE_ZERO_SHUTTER_LAG_ENABLED = 0x40000000;
[Extensible]
enum HalPixelFormat {
@@ -102,7 +98,8 @@ enum Camera3BufferStatus {
CAMERA3_BUFFER_STATUS_ERROR = 1,
};
-// Structure that contains needed information about a camera buffer.
+// Structure that contains needed information about a camera buffer that could
+// be used to map in userspace.
struct CameraBufferHandle {
uint64 buffer_id;
array<handle> fds;
@@ -112,6 +109,7 @@ struct CameraBufferHandle {
uint32 height;
array<uint32> strides;
array<uint32> offsets;
+ [MinVersion=3] array<uint32>? sizes;
};
struct Camera3StreamBuffer {
@@ -253,7 +251,7 @@ interface Camera3CallbackOps {
//
// 7. Close() closes the camera device.
//
-// Next method ID: 8
+// Next method ID: 9
interface Camera3DeviceOps {
// Initialize() is called once after the camera device is opened to register
// the Camera3CallbackOps handle.
@@ -309,4 +307,12 @@ interface Camera3DeviceOps {
// Close() is called to close the camera device.
Close@7() => (int32 result);
+
+ // ConfigureStreamsAndGetAllocatedBuffers() is called every time the client
+ // needs to set up new set of streams. Also allocated buffers for clients that
+ // do not have capabilities to allocate DMA-bufs.
+ [MinVersion=3]
+ ConfigureStreamsAndGetAllocatedBuffers@8(Camera3StreamConfiguration config) =>
+ (int32 result, Camera3StreamConfiguration? updated_config,
+ map<uint64, array<Camera3StreamBuffer>> allocated_buffers);
};
diff --git a/chromium/media/capture/video/chromeos/mojom/camera_app.mojom b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
index 7bc0da4e710..59df9f820bd 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
@@ -53,6 +53,11 @@ interface CameraAppDeviceProvider {
// and camera app. Currently only devices running camera HAL v3 support this
// feature.
IsSupported() => (bool is_supported);
+
+ // Add/Remove a virtual device for recording stream according to |enabled|.
+ // The virtual device has the same config as |device_id| except facing
+ // attribute.
+ SetMultipleStreamsEnabled(string device_id, bool enabled) => (bool success);
};
// Inner interface that used to communicate between browser process (Remote) and
@@ -68,6 +73,11 @@ interface CameraAppDeviceBridge {
// and camera app. Currently only devices running camera HAL v3 support this
// feature.
IsSupported() => (bool is_supported);
+
+ // Add/Remove a virtual device for recording stream according to |enabled|.
+ // The virtual device has the same config as |device_id| except facing
+ // attribute.
+ SetMultipleStreamsEnabled(string device_id, bool enabled) => (bool success);
};
// Interface for communication between Chrome Camera App (Remote) and camera
diff --git a/chromium/media/capture/video/chromeos/mojom/camera_common.mojom b/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
index 7c0847267d6..f26bcd95d1b 100644
--- a/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Next min version: 3
+// Next min version: 4
module cros.mojom;
@@ -13,6 +13,9 @@ enum CameraFacing {
CAMERA_FACING_BACK = 0,
CAMERA_FACING_FRONT = 1,
CAMERA_FACING_EXTERNAL = 2,
+ CAMERA_FACING_VIRTUAL_BACK = 3,
+ CAMERA_FACING_VIRTUAL_FRONT = 4,
+ CAMERA_FACING_VIRTUAL_EXTERNAL = 5,
};
struct CameraResourceCost {
@@ -101,6 +104,7 @@ interface CameraModule {
// Gets various info about the camera specified by |camera_id|.
GetCameraInfo@2(int32 camera_id) => (int32 result, CameraInfo? camera_info);
+ // [Deprecated in version 3]
// Registers the CameraModuleCallbacks interface with the camera HAL.
SetCallbacks@3(pending_remote<CameraModuleCallbacks> callbacks)
=> (int32 result);
@@ -122,4 +126,11 @@ interface CameraModule {
[MinVersion=2]
GetVendorTagOps@6(pending_receiver<VendorTagOps> vendor_tag_ops_request)
=> ();
+
+ // Registers the CameraModuleCallbacks associated interface with the camera
+ // HAL. TODO(b/169324225): Migrate all camera HAL clients to use this.
+ [MinVersion=3]
+ SetCallbacksAssociated@7(
+ pending_associated_remote<CameraModuleCallbacks> callbacks)
+ => (int32 result);
};
diff --git a/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom b/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
index 03f5a00fdac..dec0a5017ff 100644
--- a/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
@@ -2,13 +2,44 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-// Next min version: 4
+// Next min version: 6
module cros.mojom;
import "components/chromeos_camera/common/jpeg_encode_accelerator.mojom";
import "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom";
import "media/capture/video/chromeos/mojom/camera_common.mojom";
+import "mojo/public/mojom/base/unguessable_token.mojom";
+
+// CameraClientType indicates the type of a CameraHalClient.
+// It should be kept in sync with the ChromeOSCameraClientType enum in
+// tools/metrics/histograms/enums.xml
+[Extensible]
+enum CameraClientType{
+ UNKNOWN = 0,
+ TESTING = 1,
+ CHROME = 2,
+ ANDROID = 3,
+ PLUGINVM = 4,
+ ASH_CHROME = 5,
+ LACROS_CHROME = 6,
+};
+
+// CameraPrivacySwitchState indicates the state of the camera privacy switch.
+enum CameraPrivacySwitchState{
+ // For devices which can only read the privacy switch status while the camera
+ // is streaming, it is possible that the state of privacy switch is currently
+ // unknown.
+ UNKNOWN = 0,
+
+ // State when the privacy switch is on, which means the black frames will be
+ // delivered when streaming.
+ ON = 1,
+
+ // State when the privacy switch is off, which means camera should stream
+ // normally.
+ OFF = 2,
+};
// The CrOS camera HAL v3 Mojo dispatcher. The dispatcher acts as a proxy and
// waits for the server and the clients to register. There can only be one
@@ -17,12 +48,14 @@ import "media/capture/video/chromeos/mojom/camera_common.mojom";
// channel to the server and pass the established Mojo channel to the client in
// order to set up a Mojo channel between the client and the server.
//
-// Next method ID: 4
+// Next method ID: 6
interface CameraHalDispatcher {
+ // [Deprecated in version 4]
// A CameraHalServer calls RegisterServer to register itself with the
// dispatcher.
RegisterServer@0(pending_remote<CameraHalServer> server);
+ // [Deprecated in version 4]
// A CameraHalClient calls RegisterClient to register itself with the
// dispatcher.
RegisterClient@1(pending_remote<CameraHalClient> client);
@@ -35,6 +68,25 @@ interface CameraHalDispatcher {
// Get JpegEncodeAccelerator from dispatcher.
[MinVersion=2] GetJpegEncodeAccelerator@3(
pending_receiver<chromeos_camera.mojom.JpegEncodeAccelerator> jea_receiver);
+
+ // A CameraHalServer calls RegisterServerWithToken to register itself with the
+ // dispatcher. CameraHalDispatcher would authenticate the server with the
+ // supplied |auth_token|. |callbacks| is fired by CameraHalServer to notify
+ // CameraHalDispatcher about CameraHalClient updates, for example when a
+ // CameraHalClient opens or closes a camera device.
+ [MinVersion=4] RegisterServerWithToken@4(
+ pending_remote<CameraHalServer> server,
+ mojo_base.mojom.UnguessableToken auth_token) =>
+ (int32 result, pending_remote<CameraHalServerCallbacks> callbacks);
+
+ // A CameraHalClient calls RegisterClient to register itself with the
+ // dispatcher. CameraHalDispatcher would authenticate the client with the
+ // given |type| and |auth_token|.
+ [MinVersion=4] RegisterClientWithToken@5(
+ pending_remote<CameraHalClient> client,
+ CameraClientType type,
+ mojo_base.mojom.UnguessableToken auth_token) => (int32 result);
+
};
// The CrOS camera HAL v3 Mojo server.
@@ -45,13 +97,34 @@ interface CameraHalServer {
// HAL v3 adapter. Upon successfully binding of |camera_module_request|, the
// caller will have a established Mojo channel to the camera HAL v3 adapter
// process.
- CreateChannel@0(pending_receiver<CameraModule> camera_module_request);
+ CreateChannel@0(pending_receiver<CameraModule> camera_module_request,
+ [MinVersion=4] CameraClientType type);
// Enable or disable tracing.
[MinVersion=3]
SetTracingEnabled@1(bool enabled);
};
+// CameraHalServerCallbacks is an interface for CameraHalServer to notify
+// CameraHalDispatcher for any changes on the server side, for example when a
+// CameraHalClient opens or closes a camera device.
+//
+// Next method ID: 2
+interface CameraHalServerCallbacks {
+ // Fired when a CameraHalClient opens or closes a camera device. When a
+ // CameraHalClient loses mojo connection to CameraHalServer, CameraHalServer
+ // would also use this to notify that cameras are closed (not being used).
+ CameraDeviceActivityChange@0(int32 camera_id,
+ bool opened,
+ CameraClientType type);
+
+ // Fired when the camera privacy switch status is changed. If the device has
+ // such switch, this callback will be fired immediately for once to notify its
+ // current status when the callbacks are registered.
+ [MinVersion=5]
+ CameraPrivacySwitchStateChange@1(CameraPrivacySwitchState state);
+};
+
// The CrOS camera HAL v3 Mojo client.
//
// Next method ID: 1
diff --git a/chromium/media/capture/video/chromeos/request_manager.cc b/chromium/media/capture/video/chromeos/request_manager.cc
index 7dedadf09b3..8944ef12a3a 100644
--- a/chromium/media/capture/video/chromeos/request_manager.cc
+++ b/chromium/media/capture/video/chromeos/request_manager.cc
@@ -17,6 +17,7 @@
#include "base/posix/safe_strerror.h"
#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include "media/capture/video/chromeos/video_capture_features_chromeos.h"
@@ -34,6 +35,7 @@ constexpr std::initializer_list<StreamType> kYUVReprocessStreams = {
} // namespace
RequestManager::RequestManager(
+ const std::string& device_id,
mojo::PendingReceiver<cros::mojom::Camera3CallbackOps>
callback_ops_receiver,
std::unique_ptr<StreamCaptureInterface> capture_interface,
@@ -41,10 +43,9 @@ RequestManager::RequestManager(
VideoCaptureBufferType buffer_type,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type)
- : callback_ops_(this, std::move(callback_ops_receiver)),
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
+ : device_id_(device_id),
+ callback_ops_(this, std::move(callback_ops_receiver)),
capture_interface_(std::move(capture_interface)),
device_context_(device_context),
video_capture_use_gmb_(buffer_type ==
@@ -52,15 +53,12 @@ RequestManager::RequestManager(
stream_buffer_manager_(
new StreamBufferManager(device_context_,
video_capture_use_gmb_,
- std::move(camera_buffer_factory),
- client_type)),
+ std::move(camera_buffer_factory))),
blobify_callback_(std::move(blobify_callback)),
ipc_task_runner_(std::move(ipc_task_runner)),
capturing_(false),
partial_result_count_(1),
- first_frame_shutter_time_(base::TimeTicks()),
- camera_app_device_(std::move(camera_app_device)),
- client_type_(client_type) {
+ first_frame_shutter_time_(base::TimeTicks()) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(callback_ops_.is_bound());
DCHECK(device_context_);
@@ -80,9 +78,19 @@ RequestManager::RequestManager(
RequestManager::~RequestManager() = default;
void RequestManager::SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams) {
+ auto request_keys = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
+ zero_shutter_lag_supported_ = base::Contains(
+ request_keys,
+ static_cast<int32_t>(
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_ENABLE_ZSL));
+ VLOG(1) << "Zero-shutter lag is "
+ << (zero_shutter_lag_supported_ ? "" : "not ") << "supported";
+
// The partial result count metadata is optional; defaults to 1 in case it
// is not set in the static metadata.
const cros::mojom::CameraMetadataEntryPtr* partial_count = GetMetadataEntry(
@@ -107,7 +115,7 @@ void RequestManager::SetUpStreamsAndBuffers(
}
stream_buffer_manager_->SetUpStreamsAndBuffers(
- capture_format, static_metadata, std::move(streams));
+ capture_params, static_metadata, std::move(streams));
}
cros::mojom::Camera3StreamPtr RequestManager::GetStreamConfiguration(
@@ -269,11 +277,13 @@ void RequestManager::PrepareCaptureRequest() {
// 2. Capture (YuvOutput)
// 3. Preview + Capture (YuvOutput)
// 4. Reprocess (YuvInput + BlobOutput)
+ // 5. Preview + Recording (YuvOutput)
//
// For device without reprocess capability:
// 1. Preview
// 2. Capture (BlobOutput)
// 3. Preview + Capture (BlobOutput)
+ // 4. Preview + Recording (YuvOutput)
std::set<StreamType> stream_types;
cros::mojom::CameraMetadataPtr settings;
TakePhotoCallback callback = base::NullCallback();
@@ -283,6 +293,7 @@ void RequestManager::PrepareCaptureRequest() {
bool is_reprocess_request = false;
bool is_preview_request = false;
bool is_oneshot_request = false;
+ bool is_recording_request = false;
// First, check if there are pending reprocess tasks.
is_reprocess_request = TryPrepareReprocessRequest(
@@ -291,16 +302,32 @@ void RequestManager::PrepareCaptureRequest() {
// If there is no pending reprocess task, then check if there are pending
// one-shot requests. And also try to put preview in the request.
if (!is_reprocess_request) {
- is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
+ if (!zero_shutter_lag_supported_) {
+ is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
+
+ // Order matters here. If the preview request and oneshot request are both
+ // added in single capture request, the settings will be overridden by the
+ // later.
+ is_oneshot_request =
+ TryPrepareOneShotRequest(&stream_types, &settings, &callback);
+ } else {
+ // Zero-shutter lag could potentially give a frame from the past. Don't
+ // prepare a preview request when a one shot request has been prepared.
+ is_oneshot_request =
+ TryPrepareOneShotRequest(&stream_types, &settings, &callback);
+
+ if (!is_oneshot_request) {
+ is_preview_request = TryPreparePreviewRequest(&stream_types, &settings);
+ }
+ }
+ }
- // Order matters here. If the preview request and oneshot request are both
- // added in single capture request, the settings will be overridden by the
- // later.
- is_oneshot_request =
- TryPrepareOneShotRequest(&stream_types, &settings, &callback);
+ if (is_preview_request) {
+ is_recording_request = TryPrepareRecordingRequest(&stream_types);
}
- if (!is_reprocess_request && !is_oneshot_request && !is_preview_request) {
+ if (!is_reprocess_request && !is_oneshot_request && !is_preview_request &&
+ !is_recording_request) {
// We have to keep the pipeline full.
if (preview_buffers_queued_ < pipeline_depth_) {
ipc_task_runner_->PostTask(
@@ -450,6 +477,17 @@ bool RequestManager::TryPrepareOneShotRequest(
return true;
}
+bool RequestManager::TryPrepareRecordingRequest(
+ std::set<StreamType>* stream_types) {
+ if (!stream_buffer_manager_->IsRecordingSupported() ||
+ !stream_buffer_manager_->HasFreeBuffers({StreamType::kRecordingOutput})) {
+ return false;
+ }
+
+ stream_types->insert({StreamType::kRecordingOutput});
+ return true;
+}
+
void RequestManager::OnProcessedCaptureRequest(int32_t result) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -664,8 +702,12 @@ void RequestManager::Notify(cros::mojom::Camera3NotifyMsgPtr message) {
first_frame_shutter_time_ = reference_time;
}
pending_result.timestamp = reference_time - first_frame_shutter_time_;
- if (camera_app_device_ && pending_result.still_capture_callback) {
- camera_app_device_->OnShutterDone();
+
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_id_);
+ if (camera_app_device && pending_result.still_capture_callback) {
+ camera_app_device->OnShutterDone();
}
TrySubmitPendingBuffers(frame_number);
@@ -759,8 +801,11 @@ void RequestManager::SubmitCaptureResult(
observer->OnResultMetadataAvailable(frame_number, pending_result.metadata);
}
- if (camera_app_device_) {
- camera_app_device_->OnResultMetadataAvailable(
+ auto camera_app_device =
+ CameraAppDeviceBridgeImpl::GetInstance()->GetWeakCameraAppDevice(
+ device_id_);
+ if (camera_app_device) {
+ camera_app_device->OnResultMetadataAvailable(
pending_result.metadata,
static_cast<cros::mojom::StreamType>(stream_type));
}
@@ -790,8 +835,10 @@ void RequestManager::SubmitCaptureResult(
// Deliver the captured data to client.
if (stream_buffer->status ==
cros::mojom::Camera3BufferStatus::CAMERA3_BUFFER_STATUS_OK) {
- if (stream_type == StreamType::kPreviewOutput) {
- SubmitCapturedPreviewBuffer(frame_number, buffer_ipc_id);
+ if (stream_type == StreamType::kPreviewOutput ||
+ stream_type == StreamType::kRecordingOutput) {
+ SubmitCapturedPreviewRecordingBuffer(frame_number, buffer_ipc_id,
+ stream_type);
} else if (stream_type == StreamType::kJpegOutput) {
SubmitCapturedJpegBuffer(frame_number, buffer_ipc_id);
} else if (stream_type == StreamType::kYUVOutput) {
@@ -827,14 +874,17 @@ void RequestManager::SubmitCaptureResult(
PrepareCaptureRequest();
}
-void RequestManager::SubmitCapturedPreviewBuffer(uint32_t frame_number,
- uint64_t buffer_ipc_id) {
+void RequestManager::SubmitCapturedPreviewRecordingBuffer(
+ uint32_t frame_number,
+ uint64_t buffer_ipc_id,
+ StreamType stream_type) {
const CaptureResult& pending_result = pending_results_[frame_number];
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
if (video_capture_use_gmb_) {
VideoCaptureFormat format;
base::Optional<VideoCaptureDevice::Client::Buffer> buffer =
stream_buffer_manager_->AcquireBufferForClientById(
- StreamType::kPreviewOutput, buffer_ipc_id, &format);
+ stream_type, buffer_ipc_id, &format);
CHECK(buffer);
// TODO: Figure out the right color space for the camera frame. We may need
@@ -849,39 +899,38 @@ void RequestManager::SubmitCapturedPreviewBuffer(uint32_t frame_number,
auto translate_rotation = [](const int rotation) -> VideoRotation {
switch (rotation) {
case 0:
- return VideoRotation::VIDEO_ROTATION_0;
+ return VIDEO_ROTATION_0;
case 90:
- return VideoRotation::VIDEO_ROTATION_90;
+ return VIDEO_ROTATION_90;
case 180:
- return VideoRotation::VIDEO_ROTATION_180;
+ return VIDEO_ROTATION_180;
case 270:
- return VideoRotation::VIDEO_ROTATION_270;
+ return VIDEO_ROTATION_270;
}
- return VideoRotation::VIDEO_ROTATION_0;
+ return VIDEO_ROTATION_0;
};
- metadata.rotation =
+ metadata.transformation =
translate_rotation(device_context_->GetRotationForDisplay());
} else {
// All frames are pre-rotated to the display orientation.
- metadata.rotation = VideoRotation::VIDEO_ROTATION_0;
+ metadata.transformation = VIDEO_ROTATION_0;
}
device_context_->SubmitCapturedVideoCaptureBuffer(
- client_type_, std::move(*buffer), format, pending_result.reference_time,
+ client_type, std::move(*buffer), format, pending_result.reference_time,
pending_result.timestamp, metadata);
// |buffer| ownership is transferred to client, so we need to reserve a
// new video buffer.
- stream_buffer_manager_->ReserveBuffer(StreamType::kPreviewOutput);
+ stream_buffer_manager_->ReserveBuffer(stream_type);
} else {
gfx::GpuMemoryBuffer* gmb = stream_buffer_manager_->GetGpuMemoryBufferById(
- StreamType::kPreviewOutput, buffer_ipc_id);
+ stream_type, buffer_ipc_id);
CHECK(gmb);
device_context_->SubmitCapturedGpuMemoryBuffer(
- client_type_, gmb,
- stream_buffer_manager_->GetStreamCaptureFormat(
- StreamType::kPreviewOutput),
+ client_type, gmb,
+ stream_buffer_manager_->GetStreamCaptureFormat(stream_type),
pending_result.reference_time, pending_result.timestamp);
- stream_buffer_manager_->ReleaseBufferFromCaptureResult(
- StreamType::kPreviewOutput, buffer_ipc_id);
+ stream_buffer_manager_->ReleaseBufferFromCaptureResult(stream_type,
+ buffer_ipc_id);
}
}
diff --git a/chromium/media/capture/video/chromeos/request_manager.h b/chromium/media/capture/video/chromeos/request_manager.h
index f5d4fdad821..8739ceab41c 100644
--- a/chromium/media/capture/video/chromeos/request_manager.h
+++ b/chromium/media/capture/video/chromeos/request_manager.h
@@ -12,6 +12,7 @@
#include <set>
#include <vector>
+#include "base/containers/flat_map.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "media/capture/mojom/image_capture.mojom.h"
@@ -45,7 +46,6 @@ constexpr int32_t kMinConfiguredStreams = 1;
// Maximum configured streams could contain two optional YUV streams.
constexpr int32_t kMaxConfiguredStreams = 4;
-
// RequestManager is responsible for managing the flow for sending capture
// requests and receiving capture results. Having RequestBuilder to build
// request and StreamBufferManager to handles stream buffers, it focuses on
@@ -99,22 +99,21 @@ class CAPTURE_EXPORT RequestManager final
int32_t orientation;
};
- RequestManager(mojo::PendingReceiver<cros::mojom::Camera3CallbackOps>
+ RequestManager(const std::string& device_id,
+ mojo::PendingReceiver<cros::mojom::Camera3CallbackOps>
callback_ops_receiver,
std::unique_ptr<StreamCaptureInterface> capture_interface,
CameraDeviceContext* device_context,
VideoCaptureBufferType buffer_type,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device,
- ClientType client_type);
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
~RequestManager() override;
// Sets up the stream context and allocate buffers according to the
// configuration specified in |streams|.
void SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams);
@@ -227,6 +226,8 @@ class CAPTURE_EXPORT RequestManager final
cros::mojom::CameraMetadataPtr* settings,
TakePhotoCallback* callback);
+ bool TryPrepareRecordingRequest(std::set<StreamType>* stream_types);
+
// Callback for ProcessCaptureRequest().
void OnProcessedCaptureRequest(int32_t result);
@@ -257,20 +258,26 @@ class CAPTURE_EXPORT RequestManager final
void SubmitCaptureResult(uint32_t frame_number,
StreamType stream_type,
cros::mojom::Camera3StreamBufferPtr stream_buffer);
- void SubmitCapturedPreviewBuffer(uint32_t frame_number,
- uint64_t buffer_ipc_id);
+ void SubmitCapturedPreviewRecordingBuffer(uint32_t frame_number,
+ uint64_t buffer_ipc_id,
+ StreamType stream_type);
void SubmitCapturedJpegBuffer(uint32_t frame_number, uint64_t buffer_ipc_id);
// If there are some metadata set by SetCaptureMetadata() or
// SetRepeatingCaptureMetadata(), update them onto |capture_settings|.
void UpdateCaptureSettings(cros::mojom::CameraMetadataPtr* capture_settings);
+ // The unique device id which is retrieved from VideoCaptureDeviceDescriptor.
+ std::string device_id_;
+
mojo::Receiver<cros::mojom::Camera3CallbackOps> callback_ops_;
std::unique_ptr<StreamCaptureInterface> capture_interface_;
CameraDeviceContext* device_context_;
+ bool zero_shutter_lag_supported_;
+
bool video_capture_use_gmb_;
// StreamBufferManager should be declared before RequestBuilder since
@@ -362,9 +369,7 @@ class CAPTURE_EXPORT RequestManager final
// duplicate or out of order of frames.
std::map<StreamType, uint32_t> last_received_frame_number_map_;
- CameraAppDeviceImpl* camera_app_device_; // Weak.
-
- ClientType client_type_;
+ base::WeakPtr<CameraAppDeviceImpl> camera_app_device_;
base::WeakPtrFactory<RequestManager> weak_ptr_factory_{this};
diff --git a/chromium/media/capture/video/chromeos/request_manager_unittest.cc b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
index 6227b6f3e43..90b9e83aafe 100644
--- a/chromium/media/capture/video/chromeos/request_manager_unittest.cc
+++ b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
@@ -87,12 +87,16 @@ class RequestManagerTest : public ::testing::Test {
void SetUp() override {
quit_ = false;
client_type_ = ClientType::kPreviewClient;
+ VideoCaptureParams params;
+ params.requested_format = kDefaultCaptureFormat;
+ capture_params_[client_type_] = params;
device_context_ = std::make_unique<CameraDeviceContext>();
if (device_context_->AddClient(
client_type_,
std::make_unique<unittest_internal::MockVideoCaptureClient>())) {
+ std::string fake_device_id = "0";
request_manager_ = std::make_unique<RequestManager>(
- mock_callback_ops_.BindNewPipeAndPassReceiver(),
+ fake_device_id, mock_callback_ops_.BindNewPipeAndPassReceiver(),
std::make_unique<MockStreamCaptureInterface>(), device_context_.get(),
VideoCaptureBufferType::kSharedMemory,
std::make_unique<FakeCameraBufferFactory>(),
@@ -100,7 +104,7 @@ class RequestManagerTest : public ::testing::Test {
[](const uint8_t* buffer, const uint32_t bytesused,
const VideoCaptureFormat& capture_format,
const int rotation) { return mojom::Blob::New(); }),
- base::ThreadTaskRunnerHandle::Get(), nullptr, client_type_);
+ base::ThreadTaskRunnerHandle::Get());
}
}
@@ -284,6 +288,7 @@ class RequestManagerTest : public ::testing::Test {
mojo::Remote<cros::mojom::Camera3CallbackOps> mock_callback_ops_;
std::unique_ptr<CameraDeviceContext> device_context_;
ClientType client_type_;
+ base::flat_map<ClientType, VideoCaptureParams> capture_params_;
private:
std::unique_ptr<base::RunLoop> run_loop_;
@@ -300,8 +305,7 @@ TEST_F(RequestManagerTest, SimpleCaptureTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -343,8 +347,7 @@ TEST_F(RequestManagerTest, PartialResultTest) {
}));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 3),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 3),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -375,8 +378,7 @@ TEST_F(RequestManagerTest, DeviceErrorTest) {
}));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -415,8 +417,7 @@ TEST_F(RequestManagerTest, RequestErrorTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -456,8 +457,7 @@ TEST_F(RequestManagerTest, ResultErrorTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 2),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 2),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
@@ -499,8 +499,7 @@ TEST_F(RequestManagerTest, BufferErrorTest) {
.WillRepeatedly(Invoke(this, &RequestManagerTest::ProcessCaptureRequest));
request_manager_->SetUpStreamsAndBuffers(
- kDefaultCaptureFormat,
- GetFakeStaticMetadata(/* partial_result_count */ 1),
+ capture_params_, GetFakeStaticMetadata(/* partial_result_count */ 1),
PrepareCaptureStream(/* max_buffers */ 1));
request_manager_->StartPreview(cros::mojom::CameraMetadata::New());
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index 75bd224507a..4e3f277f890 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -27,12 +27,10 @@ namespace media {
StreamBufferManager::StreamBufferManager(
CameraDeviceContext* device_context,
bool video_capture_use_gmb,
- std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
- ClientType client_type)
+ std::unique_ptr<CameraBufferFactory> camera_buffer_factory)
: device_context_(device_context),
video_capture_use_gmb_(video_capture_use_gmb),
- camera_buffer_factory_(std::move(camera_buffer_factory)),
- client_type_(client_type) {
+ camera_buffer_factory_(std::move(camera_buffer_factory)) {
if (video_capture_use_gmb_) {
gmb_support_ = std::make_unique<gpu::GpuMemoryBufferSupport>();
}
@@ -155,8 +153,9 @@ StreamBufferManager::AcquireBufferForClientById(StreamType stream_type,
} else {
// We have to reserve a new buffer because the size is different.
Buffer rotated_buffer;
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
if (!device_context_->ReserveVideoCaptureBufferFromPool(
- client_type_, format->frame_size, format->pixel_format,
+ client_type, format->frame_size, format->pixel_format,
&rotated_buffer)) {
DLOG(WARNING) << "Failed to reserve video capture buffer";
original_gmb->Unmap();
@@ -221,7 +220,7 @@ bool StreamBufferManager::HasStreamsConfigured(
}
void StreamBufferManager::SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams) {
DestroyCurrentStreamsAndBuffers();
@@ -249,15 +248,18 @@ void StreamBufferManager::SetUpStreamsAndBuffers(
// flags of the stream.
StreamType stream_type = StreamIdToStreamType(stream->id);
auto stream_context = std::make_unique<StreamContext>();
- stream_context->capture_format = capture_format;
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
+ stream_context->capture_format =
+ capture_params[client_type].requested_format;
stream_context->stream = std::move(stream);
switch (stream_type) {
case StreamType::kPreviewOutput:
+ case StreamType::kRecordingOutput:
stream_context->buffer_dimension = gfx::Size(
stream_context->stream->width, stream_context->stream->height);
stream_context->buffer_usage =
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
break;
case StreamType::kYUVInput:
case StreamType::kYUVOutput:
@@ -378,6 +380,11 @@ bool StreamBufferManager::IsReprocessSupported() {
return stream_context_.find(StreamType::kYUVOutput) != stream_context_.end();
}
+bool StreamBufferManager::IsRecordingSupported() {
+ return stream_context_.find(StreamType::kRecordingOutput) !=
+ stream_context_.end();
+}
+
// static
uint64_t StreamBufferManager::GetBufferIpcId(StreamType stream_type, int key) {
uint64_t id = 0;
@@ -441,8 +448,9 @@ void StreamBufferManager::ReserveBufferFromPool(StreamType stream_type) {
return;
}
Buffer vcd_buffer;
+ auto client_type = kStreamClientTypeMap[static_cast<int>(stream_type)];
if (!device_context_->ReserveVideoCaptureBufferFromPool(
- client_type_, stream_context->buffer_dimension,
+ client_type, stream_context->buffer_dimension,
stream_context->capture_format.pixel_format, &vcd_buffer)) {
DLOG(WARNING) << "Failed to reserve video capture buffer";
return;
@@ -452,8 +460,9 @@ void StreamBufferManager::ReserveBufferFromPool(StreamType stream_type) {
stream_context->buffer_dimension, *gfx_format,
stream_context->buffer_usage, base::NullCallback());
stream_context->free_buffers.push(vcd_buffer.id);
- stream_context->buffers.insert(std::make_pair(
- vcd_buffer.id, BufferPair(std::move(gmb), std::move(vcd_buffer))));
+ const int id = vcd_buffer.id;
+ stream_context->buffers.insert(
+ std::make_pair(id, BufferPair(std::move(gmb), std::move(vcd_buffer))));
}
void StreamBufferManager::DestroyCurrentStreamsAndBuffers() {
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.h b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
index 50ed6a352b1..ba62fbe89f4 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
@@ -14,6 +14,7 @@
#include <unordered_map>
#include <vector>
+#include "base/containers/flat_map.h"
#include "base/containers/queue.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
@@ -51,8 +52,7 @@ class CAPTURE_EXPORT StreamBufferManager final {
StreamBufferManager(
CameraDeviceContext* device_context,
bool video_capture_use_gmb,
- std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
- ClientType client_type);
+ std::unique_ptr<CameraBufferFactory> camera_buffer_factory);
~StreamBufferManager();
void ReserveBuffer(StreamType stream_type);
@@ -84,7 +84,7 @@ class CAPTURE_EXPORT StreamBufferManager final {
// Sets up the stream context and allocate buffers according to the
// configuration specified in |stream|.
void SetUpStreamsAndBuffers(
- VideoCaptureFormat capture_format,
+ base::flat_map<ClientType, VideoCaptureParams> capture_params,
const cros::mojom::CameraMetadataPtr& static_metadata,
std::vector<cros::mojom::Camera3StreamPtr> streams);
@@ -105,6 +105,8 @@ class CAPTURE_EXPORT StreamBufferManager final {
bool IsReprocessSupported();
+ bool IsRecordingSupported();
+
private:
friend class RequestManagerTest;
@@ -164,8 +166,6 @@ class CAPTURE_EXPORT StreamBufferManager final {
std::unique_ptr<CameraBufferFactory> camera_buffer_factory_;
- ClientType client_type_;
-
base::WeakPtrFactory<StreamBufferManager> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(StreamBufferManager);
diff --git a/chromium/media/capture/video/chromeos/token_manager.cc b/chromium/media/capture/video/chromeos/token_manager.cc
new file mode 100644
index 00000000000..1f5789c6aaa
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/token_manager.cc
@@ -0,0 +1,157 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/token_manager.h"
+
+#include <grp.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <string>
+
+#include <base/files/file_path.h>
+#include <base/files/file_util.h>
+#include <base/strings/string_number_conversions.h>
+#include <base/strings/string_util.h>
+
+namespace {
+
+gid_t GetArcCameraGid() {
+ auto* group = getgrnam("arc-camera");
+ return group != nullptr ? group->gr_gid : 0;
+}
+
+bool EnsureTokenDirectoryExists(const base::FilePath& token_path) {
+ static const gid_t gid = GetArcCameraGid();
+ if (gid == 0) {
+ LOG(ERROR) << "Failed to query the GID of arc-camera";
+ return false;
+ }
+
+ base::FilePath dir_name = token_path.DirName();
+ if (!base::CreateDirectory(dir_name) ||
+ !base::SetPosixFilePermissions(dir_name, 0770)) {
+ LOG(ERROR) << "Failed to create token directory at "
+ << token_path.AsUTF8Unsafe();
+ return false;
+ }
+
+ if (chown(dir_name.AsUTF8Unsafe().c_str(), -1, gid) != 0) {
+ LOG(ERROR) << "Failed to chown token directory to arc-camera";
+ return false;
+ }
+ return true;
+}
+
+bool WriteTokenToFile(const base::FilePath& token_path,
+ const base::UnguessableToken& token) {
+ if (!EnsureTokenDirectoryExists(token_path)) {
+ LOG(ERROR) << "Failed to ensure token directory exists";
+ return false;
+ }
+ base::File token_file(
+ token_path, base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
+ if (!token_file.IsValid()) {
+ LOG(ERROR) << "Failed to create token file at "
+ << token_path.AsUTF8Unsafe();
+ return false;
+ }
+ std::string token_string = token.ToString();
+ token_file.WriteAtCurrentPos(token_string.c_str(), token_string.length());
+ return true;
+}
+
+} // namespace
+
+namespace media {
+
+constexpr char TokenManager::kServerTokenPath[];
+constexpr char TokenManager::kTestClientTokenPath[];
+constexpr std::array<cros::mojom::CameraClientType, 3>
+ TokenManager::kTrustedClientTypes;
+
+TokenManager::TokenManager() = default;
+TokenManager::~TokenManager() = default;
+
+bool TokenManager::GenerateServerToken() {
+ server_token_ = base::UnguessableToken::Create();
+ return WriteTokenToFile(base::FilePath(kServerTokenPath), server_token_);
+}
+
+bool TokenManager::GenerateTestClientToken() {
+ return WriteTokenToFile(
+ base::FilePath(kTestClientTokenPath),
+ GetTokenForTrustedClient(cros::mojom::CameraClientType::TESTING));
+}
+
+base::UnguessableToken TokenManager::GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type) {
+ base::AutoLock l(client_token_map_lock_);
+ if (std::find(kTrustedClientTypes.begin(), kTrustedClientTypes.end(), type) ==
+ kTrustedClientTypes.end()) {
+ return base::UnguessableToken();
+ }
+ auto& token_set = client_token_map_[type];
+ if (token_set.empty()) {
+ token_set.insert(base::UnguessableToken::Create());
+ }
+ return *token_set.begin();
+}
+
+void TokenManager::RegisterPluginVmToken(const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ auto result =
+ client_token_map_[cros::mojom::CameraClientType::PLUGINVM].insert(token);
+ if (!result.second) {
+ LOG(WARNING) << "The same token is already registered";
+ }
+}
+
+void TokenManager::UnregisterPluginVmToken(
+ const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ auto num_removed =
+ client_token_map_[cros::mojom::CameraClientType::PLUGINVM].erase(token);
+ if (num_removed != 1) {
+ LOG(WARNING) << "The token wasn't registered previously";
+ }
+}
+
+bool TokenManager::AuthenticateServer(const base::UnguessableToken& token) {
+ DCHECK(!server_token_.is_empty());
+ return server_token_ == token;
+}
+
+base::Optional<cros::mojom::CameraClientType> TokenManager::AuthenticateClient(
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ if (type == cros::mojom::CameraClientType::UNKNOWN) {
+ for (const auto& client_token_map_pair : client_token_map_) {
+ const auto& token_set = client_token_map_pair.second;
+ if (token_set.find(token) != token_set.end()) {
+ return client_token_map_pair.first;
+ }
+ }
+ return base::nullopt;
+ }
+ auto& token_set = client_token_map_[type];
+ if (token_set.find(token) == token_set.end()) {
+ return base::nullopt;
+ }
+ return type;
+}
+
+void TokenManager::AssignServerTokenForTesting(
+ const base::UnguessableToken& token) {
+ server_token_ = token;
+}
+
+void TokenManager::AssignClientTokenForTesting(
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token) {
+ base::AutoLock l(client_token_map_lock_);
+ client_token_map_[type].insert(token);
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/token_manager.h b/chromium/media/capture/video/chromeos/token_manager.h
new file mode 100644
index 00000000000..c4a75ad5f9f
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/token_manager.h
@@ -0,0 +1,72 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_TOKEN_MANAGER_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_TOKEN_MANAGER_H_
+
+#include <array>
+
+#include "base/containers/flat_map.h"
+#include "base/containers/flat_set.h"
+#include "base/optional.h"
+#include "base/thread_annotations.h"
+#include "base/unguessable_token.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
+
+namespace media {
+
+class CAPTURE_EXPORT TokenManager {
+ public:
+ static constexpr char kServerTokenPath[] = "/run/camera_tokens/server/token";
+ static constexpr char kTestClientTokenPath[] =
+ "/run/camera_tokens/testing/token";
+ static constexpr std::array<cros::mojom::CameraClientType, 3>
+ kTrustedClientTypes = {cros::mojom::CameraClientType::CHROME,
+ cros::mojom::CameraClientType::ANDROID,
+ cros::mojom::CameraClientType::TESTING};
+
+ TokenManager();
+ ~TokenManager();
+
+ bool GenerateServerToken();
+
+ bool GenerateTestClientToken();
+
+ base::UnguessableToken GetTokenForTrustedClient(
+ cros::mojom::CameraClientType type);
+
+ void RegisterPluginVmToken(const base::UnguessableToken& token);
+ void UnregisterPluginVmToken(const base::UnguessableToken& token);
+
+ bool AuthenticateServer(const base::UnguessableToken& token);
+
+ // Authenticates client with the given |type| and |token|. When |type| is
+ // cros::mojom::CameraClientType::UNKNOWN, it tries to figure out the actual
+ // client type by the supplied |token|. If authentication succeeds, it returns
+ // the authenticated type of the client. If authentication fails,
+ // base::nullopt is returned.
+ base::Optional<cros::mojom::CameraClientType> AuthenticateClient(
+ cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token);
+
+ private:
+ friend class TokenManagerTest;
+ friend class CameraHalDispatcherImplTest;
+
+ void AssignServerTokenForTesting(const base::UnguessableToken& token);
+ void AssignClientTokenForTesting(cros::mojom::CameraClientType type,
+ const base::UnguessableToken& token);
+
+ base::UnguessableToken server_token_;
+
+ base::Lock client_token_map_lock_;
+ base::flat_map<cros::mojom::CameraClientType,
+ base::flat_set<base::UnguessableToken>>
+ client_token_map_ GUARDED_BY(client_token_map_lock_);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_TOKEN_MANAGER_H_
diff --git a/chromium/media/capture/video/chromeos/token_manager_unittest.cc b/chromium/media/capture/video/chromeos/token_manager_unittest.cc
new file mode 100644
index 00000000000..1cc351b590d
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/token_manager_unittest.cc
@@ -0,0 +1,97 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/token_manager.h"
+
+#include <string>
+
+#include "base/containers/flat_map.h"
+#include "base/files/file_util.h"
+#include "base/unguessable_token.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+class TokenManagerTest : public ::testing::Test {
+ public:
+ TokenManagerTest() {
+ // Generate server token.
+ server_token_ = base::UnguessableToken::Create();
+ manager_.AssignServerTokenForTesting(server_token_);
+
+ // Generate tokens from trusted clients.
+ for (const auto& type : TokenManager::kTrustedClientTypes) {
+ auto& token = client_token_map_[type];
+ token = base::UnguessableToken::Create();
+ manager_.AssignClientTokenForTesting(type, token);
+ }
+ }
+
+ ~TokenManagerTest() override = default;
+
+ protected:
+ TokenManager manager_;
+ base::UnguessableToken server_token_;
+ base::flat_map<cros::mojom::CameraClientType, base::UnguessableToken>
+ client_token_map_;
+};
+
+// Test that TokenManager authenticates token for CameraHalServer.
+TEST_F(TokenManagerTest, AuthenticateServer) {
+ EXPECT_TRUE(manager_.AuthenticateServer(server_token_));
+}
+
+// Test that TokenManager authenticates token for CameraHalClient.
+TEST_F(TokenManagerTest, AuthenticateClient) {
+ for (auto type : TokenManager::kTrustedClientTypes) {
+ auto authenticated_type =
+ manager_.AuthenticateClient(type, client_token_map_[type]);
+ ASSERT_TRUE(authenticated_type.has_value());
+ EXPECT_EQ(authenticated_type.value(), type);
+
+ // Verify that an empty token fails authentication.
+ authenticated_type =
+ manager_.AuthenticateClient(type, base::UnguessableToken());
+ EXPECT_FALSE(authenticated_type.has_value());
+ }
+}
+
+// Test that TokanManager authenticates token for pluginvm and the
+// authentication fails when the token is unregistered.
+TEST_F(TokenManagerTest, AuthenticatePluginvm) {
+ // Create a fake token for pluginvm.
+ auto token = base::UnguessableToken::Create();
+
+ manager_.RegisterPluginVmToken(token);
+ auto authenticated_type = manager_.AuthenticateClient(
+ cros::mojom::CameraClientType::UNKNOWN, token);
+ ASSERT_TRUE(authenticated_type.has_value());
+ EXPECT_EQ(authenticated_type.value(),
+ cros::mojom::CameraClientType::PLUGINVM);
+
+ manager_.UnregisterPluginVmToken(token);
+ authenticated_type = manager_.AuthenticateClient(
+ cros::mojom::CameraClientType::UNKNOWN, token);
+ EXPECT_FALSE(authenticated_type.has_value());
+}
+
+// Test that CameraClientType::UNKNOWN with an empty token is rejected.
+TEST_F(TokenManagerTest, AuthenticateUnknown) {
+ auto authenticated_type = manager_.AuthenticateClient(
+ cros::mojom::CameraClientType::UNKNOWN, base::UnguessableToken::Create());
+ EXPECT_FALSE(authenticated_type.has_value());
+}
+
+// Test that TokenManager::GetTokenForTrustedClient returns an empty token for
+// untrusted clients.
+TEST_F(TokenManagerTest, GetTokenForTrustedClientFailForUntrustedClients) {
+ EXPECT_TRUE(
+ manager_.GetTokenForTrustedClient(cros::mojom::CameraClientType::UNKNOWN)
+ .is_empty());
+ EXPECT_TRUE(
+ manager_.GetTokenForTrustedClient(cros::mojom::CameraClientType::PLUGINVM)
+ .is_empty());
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
index 2ce163efb49..013b267fa77 100644
--- a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
+++ b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.cc
@@ -13,7 +13,7 @@ namespace media {
VendorTagOpsDelegate::VendorTagOpsDelegate(
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
- : ipc_task_runner_(ipc_task_runner) {}
+ : ipc_task_runner_(ipc_task_runner), is_initializing_(false) {}
VendorTagOpsDelegate::~VendorTagOpsDelegate() = default;
@@ -28,17 +28,29 @@ VendorTagOpsDelegate::MakeReceiver() {
void VendorTagOpsDelegate::Initialize() {
DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+
+ base::AutoLock lock(lock_);
+ is_initializing_ = true;
vendor_tag_ops_->GetTagCount(base::BindOnce(
&VendorTagOpsDelegate::OnGotTagCount, base::Unretained(this)));
}
void VendorTagOpsDelegate::Reset() {
DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
+
+ base::AutoLock lock(lock_);
vendor_tag_ops_.reset();
pending_info_.clear();
name_map_.clear();
tag_map_.clear();
initialized_.Reset();
+ is_initializing_ = false;
+}
+
+void VendorTagOpsDelegate::StopInitialization() {
+ base::AutoLock lock(lock_);
+ initialized_.Signal();
+ is_initializing_ = false;
}
void VendorTagOpsDelegate::RemovePending(uint32_t tag) {
@@ -47,7 +59,7 @@ void VendorTagOpsDelegate::RemovePending(uint32_t tag) {
DCHECK_EQ(removed, 1u);
if (pending_info_.empty()) {
DVLOG(1) << "VendorTagOpsDelegate initialized";
- initialized_.Signal();
+ StopInitialization();
}
}
@@ -55,13 +67,13 @@ void VendorTagOpsDelegate::OnGotTagCount(int32_t tag_count) {
DCHECK(ipc_task_runner_->RunsTasksInCurrentSequence());
if (tag_count == -1) {
LOG(ERROR) << "Failed to get tag count";
- initialized_.Signal();
+ StopInitialization();
return;
}
if (tag_count == 0) {
// There is no vendor tag, we are done here.
- initialized_.Signal();
+ StopInitialization();
return;
}
@@ -134,6 +146,13 @@ void VendorTagOpsDelegate::OnGotTagType(uint32_t tag, int32_t type) {
const VendorTagInfo* VendorTagOpsDelegate::GetInfoByName(
const std::string& full_name) {
+ {
+ base::AutoLock lock(lock_);
+ if (!is_initializing_ && !initialized_.IsSignaled()) {
+ LOG(WARNING) << "VendorTagOps is accessed before calling Initialize()";
+ return nullptr;
+ }
+ }
initialized_.Wait();
auto it = name_map_.find(full_name);
if (it == name_map_.end()) {
@@ -144,6 +163,13 @@ const VendorTagInfo* VendorTagOpsDelegate::GetInfoByName(
const VendorTagInfo* VendorTagOpsDelegate::GetInfoByTag(
cros::mojom::CameraMetadataTag tag) {
+ {
+ base::AutoLock lock(lock_);
+ if (!is_initializing_ && !initialized_.IsSignaled()) {
+ LOG(WARNING) << "VendorTagOps is accessed before calling Initialize()";
+ return nullptr;
+ }
+ }
initialized_.Wait();
auto it = tag_map_.find(tag);
if (it == tag_map_.end()) {
diff --git a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
index 206394f3817..cd963e0cdd8 100644
--- a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
+++ b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
@@ -9,6 +9,7 @@
#include <string>
#include <vector>
+#include "base/synchronization/lock.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/remote.h"
@@ -41,6 +42,7 @@ class VendorTagOpsDelegate {
const VendorTagInfo* GetInfoByTag(cros::mojom::CameraMetadataTag tag);
private:
+ void StopInitialization();
void RemovePending(uint32_t tag);
void OnGotTagCount(int32_t tag_count);
@@ -63,6 +65,9 @@ class VendorTagOpsDelegate {
std::map<cros::mojom::CameraMetadataTag, VendorTagInfo> tag_map_;
base::WaitableEvent initialized_;
+
+ base::Lock lock_;
+ bool is_initializing_ GUARDED_BY(lock_);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc
new file mode 100644
index 00000000000..498542efce2
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.cc
@@ -0,0 +1,282 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/video_capture_device_chromeos_delegate.h"
+
+#include <memory>
+#include <string>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/location.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/platform_thread.h"
+#include "base/trace_event/trace_event.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
+#include "media/capture/video/chromeos/camera_device_delegate.h"
+#include "media/capture/video/chromeos/camera_hal_delegate.h"
+#include "ui/display/display.h"
+#include "ui/display/display_observer.h"
+#include "ui/display/screen.h"
+
+namespace media {
+
+VideoCaptureDeviceChromeOSDelegate::VideoCaptureDeviceChromeOSDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ const VideoCaptureDeviceDescriptor& device_descriptor,
+ scoped_refptr<CameraHalDelegate> camera_hal_delegate,
+ base::OnceClosure cleanup_callback)
+ : device_descriptor_(device_descriptor),
+ camera_hal_delegate_(std::move(camera_hal_delegate)),
+ capture_task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ camera_device_ipc_thread_(std::string("CameraDeviceIpcThread") +
+ device_descriptor.device_id),
+ screen_observer_delegate_(
+ ScreenObserverDelegate::Create(this, ui_task_runner)),
+ lens_facing_(device_descriptor.facing),
+ // External cameras have lens_facing as MEDIA_VIDEO_FACING_NONE.
+ // We don't want to rotate the frame even if the device rotates.
+ rotates_with_device_(lens_facing_ !=
+ VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
+ rotation_(0),
+ cleanup_callback_(std::move(cleanup_callback)),
+ device_closed_(base::WaitableEvent::ResetPolicy::MANUAL,
+ base::WaitableEvent::InitialState::NOT_SIGNALED) {
+ // TODO(b/175168296): Hook power manager client on LaCrOS.
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ power_manager_client_proxy_ = base::MakeRefCounted<PowerManagerClientProxy>();
+ power_manager_client_proxy_->Init(
+ weak_ptr_factory_.GetWeakPtr(), "VideoCaptureDeviceChromeOSDelegate",
+ capture_task_runner_, std::move(ui_task_runner));
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+}
+
+VideoCaptureDeviceChromeOSDelegate::~VideoCaptureDeviceChromeOSDelegate() {}
+
+void VideoCaptureDeviceChromeOSDelegate::Shutdown() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ if (!HasDeviceClient()) {
+ DCHECK(!camera_device_ipc_thread_.IsRunning());
+ screen_observer_delegate_->RemoveObserver();
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ power_manager_client_proxy_->Shutdown();
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ std::move(cleanup_callback_).Run();
+ }
+}
+
+bool VideoCaptureDeviceChromeOSDelegate::HasDeviceClient() {
+ return device_context_ && device_context_->HasClient();
+}
+
+void VideoCaptureDeviceChromeOSDelegate::AllocateAndStart(
+ const VideoCaptureParams& params,
+ std::unique_ptr<VideoCaptureDevice::Client> client,
+ ClientType client_type) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(!camera_device_delegate_);
+ if (!HasDeviceClient()) {
+ TRACE_EVENT0("camera", "Start Device");
+ if (!camera_device_ipc_thread_.Start()) {
+ std::string error_msg = "Failed to start device thread";
+ LOG(ERROR) << error_msg;
+ client->OnError(
+ media::VideoCaptureError::kCrosHalV3FailedToStartDeviceThread,
+ FROM_HERE, error_msg);
+ return;
+ }
+
+ device_context_ = std::make_unique<CameraDeviceContext>();
+ if (device_context_->AddClient(client_type, std::move(client))) {
+ capture_params_[client_type] = params;
+ camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
+ device_descriptor_, camera_hal_delegate_,
+ camera_device_ipc_thread_.task_runner());
+ OpenDevice();
+ }
+ CameraAppDeviceBridgeImpl::GetInstance()->OnVideoCaptureDeviceCreated(
+ device_descriptor_.device_id, camera_device_ipc_thread_.task_runner());
+ } else {
+ if (device_context_->AddClient(client_type, std::move(client))) {
+ capture_params_[client_type] = params;
+ ReconfigureStreams();
+ }
+ }
+}
+
+void VideoCaptureDeviceChromeOSDelegate::StopAndDeAllocate(
+ ClientType client_type) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(camera_device_delegate_);
+ device_context_->RemoveClient(client_type);
+ if (!HasDeviceClient()) {
+ CloseDevice();
+ CameraAppDeviceBridgeImpl::GetInstance()->OnVideoCaptureDeviceClosing(
+ device_descriptor_.device_id);
+ camera_device_ipc_thread_.Stop();
+ camera_device_delegate_.reset();
+ device_context_.reset();
+ }
+}
+
+void VideoCaptureDeviceChromeOSDelegate::TakePhoto(
+ VideoCaptureDevice::TakePhotoCallback callback) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(camera_device_delegate_);
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::TakePhoto,
+ camera_device_delegate_->GetWeakPtr(),
+ std::move(callback)));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::GetPhotoState(
+ VideoCaptureDevice::GetPhotoStateCallback callback) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::GetPhotoState,
+ camera_device_delegate_->GetWeakPtr(),
+ std::move(callback)));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SetPhotoOptions(
+ mojom::PhotoSettingsPtr settings,
+ VideoCaptureDevice::SetPhotoOptionsCallback callback) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::SetPhotoOptions,
+ camera_device_delegate_->GetWeakPtr(),
+ std::move(settings), std::move(callback)));
+}
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+
+void VideoCaptureDeviceChromeOSDelegate::SuspendDone() {
+ OpenDevice();
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SuspendImminent() {
+ CloseDevice();
+}
+
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+void VideoCaptureDeviceChromeOSDelegate::OpenDevice() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+
+ if (!camera_device_delegate_) {
+ return;
+ }
+ // It's safe to pass unretained |device_context_| here since
+ // VideoCaptureDeviceChromeOSDelegate owns |camera_device_delegate_| and makes
+ // sure |device_context_| outlives |camera_device_delegate_|.
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::AllocateAndStart,
+ camera_device_delegate_->GetWeakPtr(), capture_params_,
+ base::Unretained(device_context_.get())));
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::SetRotation,
+ camera_device_delegate_->GetWeakPtr(), rotation_));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::CloseDevice() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+
+ if (!camera_device_delegate_) {
+ return;
+ }
+ // We do our best to allow the camera HAL cleanly shut down the device. In
+ // general we don't trust the camera HAL so if the device does not close in
+ // time we simply terminate the Mojo channel by resetting
+ // |camera_device_delegate_|.
+ //
+ // VideoCaptureDeviceChromeOSDelegate owns both |camera_device_delegate_| and
+ // |device_closed_| and it stops |camera_device_ipc_thread_| in
+ // StopAndDeAllocate, so it's safe to pass |device_closed_| as unretained in
+ // the callback.
+ device_closed_.Reset();
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE, base::BindOnce(&CameraDeviceDelegate::StopAndDeAllocate,
+ camera_device_delegate_->GetWeakPtr(),
+ base::BindOnce(
+ [](base::WaitableEvent* device_closed) {
+ device_closed->Signal();
+ },
+ base::Unretained(&device_closed_))));
+ base::TimeDelta kWaitTimeoutSecs = base::TimeDelta::FromSeconds(3);
+ device_closed_.TimedWait(kWaitTimeoutSecs);
+}
+
+void VideoCaptureDeviceChromeOSDelegate::ReconfigureStreams() {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ DCHECK(camera_device_delegate_);
+
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::ReconfigureStreams,
+ camera_device_delegate_->GetWeakPtr(), capture_params_));
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::SetRotation,
+ camera_device_delegate_->GetWeakPtr(), rotation_));
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SetDisplayRotation(
+ const display::Display& display) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ if (display.IsInternal())
+ SetRotation(display.rotation() * 90);
+}
+
+void VideoCaptureDeviceChromeOSDelegate::SetRotation(int rotation) {
+ DCHECK(capture_task_runner_->BelongsToCurrentThread());
+ if (!rotates_with_device_) {
+ rotation = 0;
+ } else if (lens_facing_ == VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT) {
+ // Original frame when |rotation| = 0
+ // -----------------------
+ // | * |
+ // | * * |
+ // | * * |
+ // | ******* |
+ // | * * |
+ // | * * |
+ // -----------------------
+ //
+ // |rotation| = 90, this is what back camera sees
+ // -----------------------
+ // | ******** |
+ // | * **** |
+ // | * *** |
+ // | * *** |
+ // | * **** |
+ // | ******** |
+ // -----------------------
+ //
+ // |rotation| = 90, this is what front camera sees
+ // -----------------------
+ // | ******** |
+ // | **** * |
+ // | *** * |
+ // | *** * |
+ // | **** * |
+ // | ******** |
+ // -----------------------
+ //
+ // Therefore, for back camera, we need to rotate (360 - |rotation|).
+ rotation = (360 - rotation) % 360;
+ }
+ rotation_ = rotation;
+ if (camera_device_ipc_thread_.IsRunning()) {
+ camera_device_ipc_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::BindOnce(&CameraDeviceDelegate::SetRotation,
+ camera_device_delegate_->GetWeakPtr(), rotation_));
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h
new file mode 100644
index 00000000000..0e4a6a31b5f
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_delegate.h
@@ -0,0 +1,133 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_VIDEO_CAPTURE_DEVICE_CHROMEOS_DELEGATE_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_VIDEO_CAPTURE_DEVICE_CHROMEOS_DELEGATE_H_
+
+#include <memory>
+
+#include "base/containers/flat_map.h"
+#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/thread.h"
+#include "media/capture/video/chromeos/camera_device_context.h"
+#include "media/capture/video/chromeos/display_rotation_observer.h"
+#include "media/capture/video/video_capture_device.h"
+#include "media/capture/video/video_capture_device_descriptor.h"
+#include "media/capture/video_capture_types.h"
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include "media/capture/video/chromeos/ash/power_manager_client_proxy.h"
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+namespace display {
+
+class Display;
+
+} // namespace display
+
+namespace media {
+
+class CameraHalDelegate;
+class CameraDeviceDelegate;
+
+// Implementation of delegate for ChromeOS with CrOS camera HALv3.
+class CAPTURE_EXPORT VideoCaptureDeviceChromeOSDelegate final
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ : public DisplayRotationObserver,
+ public PowerManagerClientProxy::Observer {
+#else
+ : public DisplayRotationObserver {
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ public:
+ VideoCaptureDeviceChromeOSDelegate(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ const VideoCaptureDeviceDescriptor& device_descriptor,
+ scoped_refptr<CameraHalDelegate> camera_hal_delegate,
+ base::OnceClosure cleanup_callback);
+
+ ~VideoCaptureDeviceChromeOSDelegate();
+ void Shutdown();
+ bool HasDeviceClient();
+
+ void AllocateAndStart(const VideoCaptureParams& params,
+ std::unique_ptr<VideoCaptureDevice::Client> client,
+ ClientType client_type);
+ void StopAndDeAllocate(ClientType client_type);
+ void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback);
+ void GetPhotoState(VideoCaptureDevice::GetPhotoStateCallback callback);
+ void SetPhotoOptions(mojom::PhotoSettingsPtr settings,
+ VideoCaptureDevice::SetPhotoOptionsCallback callback);
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // Implementation of PowerManagerClientProxy::Observer.
+ void SuspendDone() final;
+ void SuspendImminent() final;
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ void OpenDevice();
+ void CloseDevice();
+
+ private:
+ void ReconfigureStreams();
+
+ // DisplayRotationDelegate implementation.
+ void SetDisplayRotation(const display::Display& display) final;
+ void SetRotation(int rotation);
+
+ const VideoCaptureDeviceDescriptor device_descriptor_;
+
+ // A reference to the CameraHalDelegate instance in the VCD factory. This is
+ // used by AllocateAndStart to query camera info and create the camera device.
+ const scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
+
+ // A reference to the thread that all the VideoCaptureDevice interface methods
+ // are expected to be called on.
+ const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_;
+
+ // The thread that all the Mojo operations of |camera_device_delegate_| take
+ // place. Started in AllocateAndStart and stopped in StopAndDeAllocate, where
+ // the access to the base::Thread methods are sequenced on
+ // |capture_task_runner_|.
+ base::Thread camera_device_ipc_thread_;
+
+ // Map client type to VideoCaptureParams.
+ base::flat_map<ClientType, VideoCaptureParams> capture_params_;
+
+ // |device_context_| is created and owned by
+ // VideoCaptureDeviceChromeOSDelegate and is only accessed by
+ // |camera_device_delegate_|.
+ std::unique_ptr<CameraDeviceContext> device_context_;
+
+ // Internal delegate doing the actual capture setting, buffer allocation and
+ // circulation with the camera HAL. Created in AllocateAndStart and deleted in
+ // StopAndDeAllocate on |capture_task_runner_|. All methods of
+ // |camera_device_delegate_| operate on |camera_device_ipc_thread_|.
+ std::unique_ptr<CameraDeviceDelegate> camera_device_delegate_;
+
+ scoped_refptr<ScreenObserverDelegate> screen_observer_delegate_;
+ const VideoFacingMode lens_facing_;
+ // Whether the incoming frames should rotate when the device rotates.
+ const bool rotates_with_device_;
+ int rotation_;
+
+ base::OnceClosure cleanup_callback_;
+
+ base::WaitableEvent device_closed_;
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ scoped_refptr<PowerManagerClientProxy> power_manager_client_proxy_;
+
+ base::WeakPtrFactory<PowerManagerClientProxy::Observer> weak_ptr_factory_{
+ this};
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceChromeOSDelegate);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_VIDEO_CAPTURE_DEVICE_CHROMEOS_DELEGATE_H_
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
index 07f2996154d..d7dc818b102 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
@@ -1,309 +1,53 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
+// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "base/bind.h"
-#include "base/callback_helpers.h"
-#include "base/location.h"
-#include "base/synchronization/waitable_event.h"
-#include "base/threading/platform_thread.h"
-#include "base/trace_event/trace_event.h"
-#include "chromeos/dbus/power/power_manager_client.h"
-#include "media/base/bind_to_current_loop.h"
-#include "media/capture/video/chromeos/camera_device_context.h"
-#include "media/capture/video/chromeos/camera_device_delegate.h"
-#include "media/capture/video/chromeos/camera_hal_delegate.h"
-#include "ui/display/display.h"
-#include "ui/display/display_observer.h"
-#include "ui/display/screen.h"
+#include "base/strings/string_util.h"
+#include "media/capture/video/chromeos/video_capture_device_chromeos_delegate.h"
namespace media {
-class VideoCaptureDeviceChromeOSHalv3::PowerManagerClientProxy
- : public base::RefCountedThreadSafe<PowerManagerClientProxy>,
- public chromeos::PowerManagerClient::Observer {
- public:
- PowerManagerClientProxy() = default;
-
- void Init(base::WeakPtr<VideoCaptureDeviceChromeOSHalv3> device,
- scoped_refptr<base::SingleThreadTaskRunner> device_task_runner,
- scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner) {
- device_ = std::move(device);
- device_task_runner_ = std::move(device_task_runner);
- dbus_task_runner_ = std::move(dbus_task_runner);
-
- dbus_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&PowerManagerClientProxy::InitOnDBusThread, this));
- }
-
- void Shutdown() {
- dbus_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&PowerManagerClientProxy::ShutdownOnDBusThread, this));
- }
-
- void UnblockSuspend(const base::UnguessableToken& unblock_suspend_token) {
- dbus_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&PowerManagerClientProxy::UnblockSuspendOnDBusThread,
- this, unblock_suspend_token));
- }
-
- private:
- friend class base::RefCountedThreadSafe<PowerManagerClientProxy>;
-
- ~PowerManagerClientProxy() override = default;
-
- void InitOnDBusThread() {
- DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
- chromeos::PowerManagerClient::Get()->AddObserver(this);
- }
-
- void ShutdownOnDBusThread() {
- DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
- chromeos::PowerManagerClient::Get()->RemoveObserver(this);
- }
-
- void UnblockSuspendOnDBusThread(
- const base::UnguessableToken& unblock_suspend_token) {
- DCHECK(dbus_task_runner_->RunsTasksInCurrentSequence());
- chromeos::PowerManagerClient::Get()->UnblockSuspend(unblock_suspend_token);
- }
-
- // chromeos::PowerManagerClient::Observer:
- void SuspendImminent(power_manager::SuspendImminent::Reason reason) final {
- auto token = base::UnguessableToken::Create();
- chromeos::PowerManagerClient::Get()->BlockSuspend(
- token, "VideoCaptureDeviceChromeOSHalv3");
- device_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&VideoCaptureDeviceChromeOSHalv3::CloseDevice,
- device_, token));
- }
-
- void SuspendDone(const base::TimeDelta& sleep_duration) final {
- device_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&VideoCaptureDeviceChromeOSHalv3::OpenDevice, device_));
- }
-
- base::WeakPtr<VideoCaptureDeviceChromeOSHalv3> device_;
- scoped_refptr<base::SingleThreadTaskRunner> device_task_runner_;
- scoped_refptr<base::SingleThreadTaskRunner> dbus_task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(PowerManagerClientProxy);
-};
+constexpr char kVirtualPrefix[] = "VIRTUAL_";
VideoCaptureDeviceChromeOSHalv3::VideoCaptureDeviceChromeOSHalv3(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- CameraAppDeviceImpl* camera_app_device,
- base::OnceClosure cleanup_callback)
- : device_descriptor_(device_descriptor),
- camera_hal_delegate_(std::move(camera_hal_delegate)),
- capture_task_runner_(base::ThreadTaskRunnerHandle::Get()),
- camera_device_ipc_thread_(std::string("CameraDeviceIpcThread") +
- device_descriptor.device_id),
- screen_observer_delegate_(
- ScreenObserverDelegate::Create(this, ui_task_runner)),
- lens_facing_(device_descriptor.facing),
- // External cameras have lens_facing as MEDIA_VIDEO_FACING_NONE.
- // We don't want to rotate the frame even if the device rotates.
- rotates_with_device_(lens_facing_ !=
- VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
- rotation_(0),
- camera_app_device_(camera_app_device),
- cleanup_callback_(std::move(cleanup_callback)),
- power_manager_client_proxy_(
- base::MakeRefCounted<PowerManagerClientProxy>()),
- client_type_(ClientType::kPreviewClient) {
- power_manager_client_proxy_->Init(weak_ptr_factory_.GetWeakPtr(),
- capture_task_runner_,
- std::move(ui_task_runner));
+ VideoCaptureDeviceChromeOSDelegate* delegate,
+ const VideoCaptureDeviceDescriptor& vcd_descriptor)
+ : vcd_delegate_(delegate) {
+ client_type_ = base::StartsWith(vcd_descriptor.device_id, kVirtualPrefix)
+ ? ClientType::kVideoClient
+ : ClientType::kPreviewClient;
}
VideoCaptureDeviceChromeOSHalv3::~VideoCaptureDeviceChromeOSHalv3() {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- DCHECK(!camera_device_ipc_thread_.IsRunning());
- screen_observer_delegate_->RemoveObserver();
- power_manager_client_proxy_->Shutdown();
- std::move(cleanup_callback_).Run();
+ vcd_delegate_->Shutdown();
}
// VideoCaptureDevice implementation.
void VideoCaptureDeviceChromeOSHalv3::AllocateAndStart(
const VideoCaptureParams& params,
std::unique_ptr<Client> client) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- DCHECK(!camera_device_delegate_);
- TRACE_EVENT0("camera", "Start Device");
- if (!camera_device_ipc_thread_.Start()) {
- std::string error_msg = "Failed to start device thread";
- LOG(ERROR) << error_msg;
- client->OnError(
- media::VideoCaptureError::kCrosHalV3FailedToStartDeviceThread,
- FROM_HERE, error_msg);
- return;
- }
- capture_params_ = params;
- device_context_ = std::make_unique<CameraDeviceContext>();
- if (device_context_->AddClient(client_type_, std::move(client))) {
- camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
- device_descriptor_, camera_hal_delegate_,
- camera_device_ipc_thread_.task_runner(), camera_app_device_,
- client_type_);
- OpenDevice();
- }
+ vcd_delegate_->AllocateAndStart(params, std::move(client), client_type_);
}
void VideoCaptureDeviceChromeOSHalv3::StopAndDeAllocate() {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
-
- if (!camera_device_delegate_) {
- return;
- }
- CloseDevice(base::UnguessableToken());
- camera_device_ipc_thread_.Stop();
- camera_device_delegate_.reset();
- device_context_->RemoveClient(client_type_);
- device_context_.reset();
+ vcd_delegate_->StopAndDeAllocate(client_type_);
}
void VideoCaptureDeviceChromeOSHalv3::TakePhoto(TakePhotoCallback callback) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- DCHECK(camera_device_delegate_);
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::TakePhoto,
- camera_device_delegate_->GetWeakPtr(),
- std::move(callback)));
+ vcd_delegate_->TakePhoto(std::move(callback));
}
void VideoCaptureDeviceChromeOSHalv3::GetPhotoState(
GetPhotoStateCallback callback) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::GetPhotoState,
- camera_device_delegate_->GetWeakPtr(),
- std::move(callback)));
+ vcd_delegate_->GetPhotoState(std::move(callback));
}
void VideoCaptureDeviceChromeOSHalv3::SetPhotoOptions(
mojom::PhotoSettingsPtr settings,
SetPhotoOptionsCallback callback) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::SetPhotoOptions,
- camera_device_delegate_->GetWeakPtr(),
- std::move(settings), std::move(callback)));
-}
-
-void VideoCaptureDeviceChromeOSHalv3::OpenDevice() {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
-
- if (!camera_device_delegate_) {
- return;
- }
- // It's safe to pass unretained |device_context_| here since
- // VideoCaptureDeviceChromeOSHalv3 owns |camera_device_delegate_| and makes
- // sure |device_context_| outlives |camera_device_delegate_|.
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraDeviceDelegate::AllocateAndStart,
- camera_device_delegate_->GetWeakPtr(), capture_params_,
- base::Unretained(device_context_.get())));
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraDeviceDelegate::SetRotation,
- camera_device_delegate_->GetWeakPtr(), rotation_));
-}
-
-void VideoCaptureDeviceChromeOSHalv3::CloseDevice(
- base::UnguessableToken unblock_suspend_token) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
-
- if (!camera_device_delegate_) {
- return;
- }
- // We do our best to allow the camera HAL cleanly shut down the device. In
- // general we don't trust the camera HAL so if the device does not close in
- // time we simply terminate the Mojo channel by resetting
- // |camera_device_delegate_|.
- base::WaitableEvent device_closed(
- base::WaitableEvent::ResetPolicy::MANUAL,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&CameraDeviceDelegate::StopAndDeAllocate,
- camera_device_delegate_->GetWeakPtr(),
- base::BindOnce(
- [](base::WaitableEvent* device_closed) {
- device_closed->Signal();
- },
- base::Unretained(&device_closed))));
- base::TimeDelta kWaitTimeoutSecs = base::TimeDelta::FromSeconds(3);
- device_closed.TimedWait(kWaitTimeoutSecs);
- if (!unblock_suspend_token.is_empty())
- power_manager_client_proxy_->UnblockSuspend(unblock_suspend_token);
-}
-
-void VideoCaptureDeviceChromeOSHalv3::SetDisplayRotation(
- const display::Display& display) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- if (display.IsInternal())
- SetRotation(display.rotation() * 90);
-}
-
-void VideoCaptureDeviceChromeOSHalv3::SetRotation(int rotation) {
- DCHECK(capture_task_runner_->BelongsToCurrentThread());
- if (!rotates_with_device_) {
- rotation = 0;
- } else if (lens_facing_ == VideoFacingMode::MEDIA_VIDEO_FACING_ENVIRONMENT) {
- // Original frame when |rotation| = 0
- // -----------------------
- // | * |
- // | * * |
- // | * * |
- // | ******* |
- // | * * |
- // | * * |
- // -----------------------
- //
- // |rotation| = 90, this is what back camera sees
- // -----------------------
- // | ******** |
- // | * **** |
- // | * *** |
- // | * *** |
- // | * **** |
- // | ******** |
- // -----------------------
- //
- // |rotation| = 90, this is what front camera sees
- // -----------------------
- // | ******** |
- // | **** * |
- // | *** * |
- // | *** * |
- // | **** * |
- // | ******** |
- // -----------------------
- //
- // Therefore, for back camera, we need to rotate (360 - |rotation|).
- rotation = (360 - rotation) % 360;
- }
- rotation_ = rotation;
- if (camera_device_ipc_thread_.IsRunning()) {
- camera_device_ipc_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&CameraDeviceDelegate::SetRotation,
- camera_device_delegate_->GetWeakPtr(), rotation_));
- }
+ vcd_delegate_->SetPhotoOptions(std::move(settings), std::move(callback));
}
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
index 9f5a03ef70a..5c5188a82e5 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
@@ -1,4 +1,4 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
+// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -7,39 +7,21 @@
#include <memory>
-#include "base/macros.h"
-#include "base/memory/weak_ptr.h"
-#include "base/single_thread_task_runner.h"
-#include "base/threading/thread.h"
#include "media/capture/video/chromeos/camera_device_context.h"
-#include "media/capture/video/chromeos/display_rotation_observer.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/video_capture_device_descriptor.h"
-#include "media/capture/video_capture_types.h"
-
-namespace display {
-
-class Display;
-
-} // namespace display
namespace media {
-class CameraAppDeviceImpl;
-class CameraHalDelegate;
-class CameraDeviceDelegate;
+class VideoCaptureDeviceChromeOSDelegate;
// Implementation of VideoCaptureDevice for ChromeOS with CrOS camera HALv3.
class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
- : public VideoCaptureDevice,
- public DisplayRotationObserver {
+ : public VideoCaptureDevice {
public:
VideoCaptureDeviceChromeOSHalv3(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- const VideoCaptureDeviceDescriptor& device_descriptor,
- scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- CameraAppDeviceImpl* camera_app_device,
- base::OnceClosure cleanup_callback);
+ VideoCaptureDeviceChromeOSDelegate* delegate,
+ const VideoCaptureDeviceDescriptor& vcd_descriptor);
~VideoCaptureDeviceChromeOSHalv3() final;
@@ -53,60 +35,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
SetPhotoOptionsCallback callback) final;
private:
- // Helper to interact with PowerManagerClient on DBus original thread.
- class PowerManagerClientProxy;
-
- void OpenDevice();
- void CloseDevice(base::UnguessableToken unblock_suspend_token);
-
- // DisplayRotationDelegate implementation.
- void SetDisplayRotation(const display::Display& display) final;
- void SetRotation(int rotation);
-
- const VideoCaptureDeviceDescriptor device_descriptor_;
-
- // A reference to the CameraHalDelegate instance in the VCD factory. This is
- // used by AllocateAndStart to query camera info and create the camera device.
- const scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
-
- // A reference to the thread that all the VideoCaptureDevice interface methods
- // are expected to be called on.
- const scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner_;
+ VideoCaptureDeviceChromeOSDelegate* vcd_delegate_;
- // The thread that all the Mojo operations of |camera_device_delegate_| take
- // place. Started in AllocateAndStart and stopped in StopAndDeAllocate, where
- // the access to the base::Thread methods are sequenced on
- // |capture_task_runner_|.
- base::Thread camera_device_ipc_thread_;
-
- VideoCaptureParams capture_params_;
- // |device_context_| is created and owned by VideoCaptureDeviceChromeOSHalv3
- // and is only accessed by |camera_device_delegate_|.
- std::unique_ptr<CameraDeviceContext> device_context_;
-
- // Internal delegate doing the actual capture setting, buffer allocation and
- // circulation with the camera HAL. Created in AllocateAndStart and deleted in
- // StopAndDeAllocate on |capture_task_runner_|. All methods of
- // |camera_device_delegate_| operate on |camera_device_ipc_thread_|.
- std::unique_ptr<CameraDeviceDelegate> camera_device_delegate_;
-
- scoped_refptr<ScreenObserverDelegate> screen_observer_delegate_;
- const VideoFacingMode lens_facing_;
- // Whether the incoming frames should rotate when the device rotates.
- const bool rotates_with_device_;
- int rotation_;
-
- CameraAppDeviceImpl* camera_app_device_; // Weak.
-
- base::OnceClosure cleanup_callback_;
-
- scoped_refptr<PowerManagerClientProxy> power_manager_client_proxy_;
-
- // The client type in CameraDeviceContext.
ClientType client_type_;
- base::WeakPtrFactory<VideoCaptureDeviceChromeOSHalv3> weak_ptr_factory_{this};
-
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceChromeOSHalv3);
};
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
index 6c0862e3cea..687bed919fe 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
@@ -9,7 +9,10 @@
#include "base/memory/ptr_util.h"
#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
namespace media {
@@ -20,17 +23,18 @@ gpu::GpuMemoryBufferManager* g_gpu_buffer_manager = nullptr;
} // namespace
VideoCaptureDeviceFactoryChromeOS::VideoCaptureDeviceFactoryChromeOS(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
- CameraAppDeviceBridgeImpl* camera_app_device_bridge)
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer)
: task_runner_for_screen_observer_(task_runner_for_screen_observer),
camera_hal_ipc_thread_("CameraHalIpcThread"),
- camera_app_device_bridge_(camera_app_device_bridge),
initialized_(Init()) {}
VideoCaptureDeviceFactoryChromeOS::~VideoCaptureDeviceFactoryChromeOS() {
- if (camera_app_device_bridge_) {
- camera_app_device_bridge_->UnsetCameraInfoGetter();
- }
+ CameraAppDeviceBridgeImpl::GetInstance()->UnsetCameraInfoGetter();
+
+ auto* camera_app_device_bridge = CameraAppDeviceBridgeImpl::GetInstance();
+ camera_app_device_bridge->UnsetCameraInfoGetter();
+ camera_app_device_bridge->UnsetVirtualDeviceController();
+
camera_hal_delegate_->Reset();
camera_hal_ipc_thread_.Stop();
}
@@ -43,8 +47,7 @@ VideoCaptureDeviceFactoryChromeOS::CreateDevice(
return std::unique_ptr<VideoCaptureDevice>();
}
return camera_hal_delegate_->CreateDevice(task_runner_for_screen_observer_,
- device_descriptor,
- camera_app_device_bridge_);
+ device_descriptor);
}
void VideoCaptureDeviceFactoryChromeOS::GetDevicesInfo(
@@ -76,27 +79,30 @@ bool VideoCaptureDeviceFactoryChromeOS::Init() {
return false;
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (!CameraHalDispatcherImpl::GetInstance()->IsStarted()) {
LOG(ERROR) << "CameraHalDispatcherImpl is not started";
return false;
}
-
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
camera_hal_delegate_ =
new CameraHalDelegate(camera_hal_ipc_thread_.task_runner());
- camera_hal_delegate_->RegisterCameraClient();
-
- // Since the |camera_hal_delegate_| is initialized on the constructor of this
- // object and is destroyed after |camera_app_device_bridge_| unsetting its
- // reference, it is safe to use base::Unretained() here.
- if (camera_app_device_bridge_) {
- camera_app_device_bridge_->SetCameraInfoGetter(
- base::BindRepeating(&CameraHalDelegate::GetCameraInfoFromDeviceId,
- base::Unretained(camera_hal_delegate_.get())));
+
+ if (!camera_hal_delegate_->RegisterCameraClient()) {
+ LOG(ERROR) << "Failed to register camera client";
+ return false;
}
- return true;
-}
-bool VideoCaptureDeviceFactoryChromeOS::IsSupportedCameraAppDeviceBridge() {
+ // Since we will unset camera info getter and virtual device controller before
+ // invalidate |camera_hal_delegate_| in the destructor, it should be safe to
+ // use base::Unretained() here.
+ auto* camera_app_device_bridge = CameraAppDeviceBridgeImpl::GetInstance();
+ camera_app_device_bridge->SetCameraInfoGetter(
+ base::BindRepeating(&CameraHalDelegate::GetCameraInfoFromDeviceId,
+ base::Unretained(camera_hal_delegate_.get())));
+ camera_app_device_bridge->SetVirtualDeviceController(
+ base::BindRepeating(&CameraHalDelegate::EnableVirtualDevice,
+ base::Unretained(camera_hal_delegate_.get())));
return true;
}
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
index efa7c10bdf4..970c199e4d2 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
@@ -16,8 +16,6 @@
namespace media {
-class CameraAppDeviceBridgeImpl;
-
using MojoMjpegDecodeAcceleratorFactoryCB = base::RepeatingCallback<void(
mojo::PendingReceiver<chromeos_camera::mojom::MjpegDecodeAccelerator>)>;
@@ -26,8 +24,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
public:
explicit VideoCaptureDeviceFactoryChromeOS(
scoped_refptr<base::SingleThreadTaskRunner>
- task_runner_for_screen_observer,
- CameraAppDeviceBridgeImpl* camera_app_device_bridge);
+ task_runner_for_screen_observer);
~VideoCaptureDeviceFactoryChromeOS() override;
@@ -36,8 +33,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
const VideoCaptureDeviceDescriptor& device_descriptor) final;
void GetDevicesInfo(GetDevicesInfoCallback callback) override;
- bool IsSupportedCameraAppDeviceBridge() override;
-
static gpu::GpuMemoryBufferManager* GetBufferManager();
static void SetGpuBufferManager(gpu::GpuMemoryBufferManager* buffer_manager);
@@ -59,8 +54,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
// |camera_hal_ipc_thread_|.
scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
- CameraAppDeviceBridgeImpl* camera_app_device_bridge_; // Weak.
-
bool initialized_;
base::WeakPtrFactory<VideoCaptureDeviceFactoryChromeOS> weak_ptr_factory_{
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h
index 34ce0dba2cc..59850884926 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder.h
@@ -26,12 +26,9 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoder {
// decode error.
};
- using DecodeDoneCB = base::RepeatingCallback<void(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::
- ScopedAccessPermission> buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info)>;
+ using DecodeDoneCB =
+ base::RepeatingCallback<void(ReadyFrameInBuffer,
+ std::vector<ReadyFrameInBuffer>)>;
virtual ~VideoCaptureJpegDecoder() {}
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
index c272d15e92b..6b16b2cc2fb 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
@@ -25,15 +25,16 @@ VideoCaptureJpegDecoderImpl::VideoCaptureJpegDecoderImpl(
decode_done_cb_(std::move(decode_done_cb)),
send_log_message_cb_(std::move(send_log_message_cb)),
has_received_decoded_frame_(false),
+ decoder_status_(INIT_PENDING),
next_task_id_(0),
- task_id_(chromeos_camera::MjpegDecodeAccelerator::kInvalidTaskId),
- decoder_status_(INIT_PENDING) {}
+ task_id_(chromeos_camera::MjpegDecodeAccelerator::kInvalidTaskId) {}
VideoCaptureJpegDecoderImpl::~VideoCaptureJpegDecoderImpl() {
DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
}
void VideoCaptureJpegDecoderImpl::Initialize() {
+ base::AutoLock lock(lock_);
if (!IsVideoCaptureAcceleratedJpegDecodingEnabled()) {
decoder_status_ = FAILED;
RecordInitDecodeUMA_Locked();
@@ -130,8 +131,8 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
out_frame->BackWithOwnedSharedMemory(std::move(out_region),
std::move(out_mapping));
- out_frame->metadata()->frame_rate = frame_format.frame_rate;
- out_frame->metadata()->reference_time = reference_time;
+ out_frame->metadata().frame_rate = frame_format.frame_rate;
+ out_frame->metadata().reference_time = reference_time;
media::mojom::VideoFrameInfoPtr out_frame_info =
media::mojom::VideoFrameInfo::New();
@@ -139,14 +140,17 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
out_frame_info->pixel_format = media::PIXEL_FORMAT_I420;
out_frame_info->coded_size = dimensions;
out_frame_info->visible_rect = gfx::Rect(dimensions);
- out_frame_info->metadata = *(out_frame->metadata());
+ out_frame_info->metadata = out_frame->metadata();
out_frame_info->color_space = out_frame->ColorSpace();
{
base::AutoLock lock(lock_);
decode_done_closure_ = base::BindOnce(
- decode_done_cb_, out_buffer.id, out_buffer.frame_feedback_id,
- std::move(out_buffer.access_permission), std::move(out_frame_info));
+ decode_done_cb_,
+ ReadyFrameInBuffer(out_buffer.id, out_buffer.frame_feedback_id,
+ std::move(out_buffer.access_permission),
+ std::move(out_frame_info)),
+ std::vector<ReadyFrameInBuffer>());
}
// base::Unretained is safe because |decoder_| is deleted on
@@ -236,6 +240,7 @@ bool VideoCaptureJpegDecoderImpl::IsDecoding_Locked() const {
}
void VideoCaptureJpegDecoderImpl::RecordInitDecodeUMA_Locked() {
+ lock_.AssertAcquired();
UMA_HISTOGRAM_BOOLEAN("Media.VideoCaptureGpuJpegDecoder.InitDecodeSuccess",
decoder_status_ == INIT_PASSED);
}
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
index 7bb0296b25a..fbc3a41d95e 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
@@ -87,11 +87,10 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
const base::RepeatingCallback<void(const std::string&)> send_log_message_cb_;
bool has_received_decoded_frame_;
- // Guards |decode_done_closure_| and |decoder_status_|.
- mutable base::Lock lock_;
-
// The closure of |decode_done_cb_| with bound parameters.
- base::OnceClosure decode_done_closure_;
+ mutable base::Lock lock_;
+ STATUS decoder_status_ GUARDED_BY(lock_);
+ base::OnceClosure decode_done_closure_ GUARDED_BY(lock_);
// Next id for input BitstreamBuffer.
int32_t next_task_id_;
@@ -104,8 +103,6 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
base::UnsafeSharedMemoryRegion in_shared_region_;
base::WritableSharedMemoryMapping in_shared_mapping_;
- STATUS decoder_status_;
-
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtrFactory<VideoCaptureJpegDecoderImpl> weak_ptr_factory_{this};
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.cc b/chromium/media/capture/video/create_video_capture_device_factory.cc
index bab72603630..435ede8bd3b 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/create_video_capture_device_factory.cc
@@ -11,10 +11,9 @@
#include "media/capture/video/fake_video_capture_device_factory.h"
#include "media/capture/video/file_video_capture_device_factory.h"
-#if defined(OS_LINUX) || BUILDFLAG(IS_LACROS)
+#if defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
-#elif BUILDFLAG(IS_ASH)
-#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
@@ -57,11 +56,10 @@ CreateFakeVideoCaptureDeviceFactory() {
}
}
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory>
CreateChromeOSVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- media::CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
// On Chrome OS we have to support two use cases:
//
// 1. For devices that have the camera HAL v3 service running on Chrome OS,
@@ -72,21 +70,20 @@ CreateChromeOSVideoCaptureDeviceFactory(
// some special devices that may never be able to implement a camera HAL
// v3.
if (ShouldUseCrosCameraService()) {
- return std::make_unique<VideoCaptureDeviceFactoryChromeOS>(
- ui_task_runner, camera_app_device_bridge);
+ return std::make_unique<VideoCaptureDeviceFactoryChromeOS>(ui_task_runner);
} else {
return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
}
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory>
CreatePlatformSpecificVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
-#if defined(OS_LINUX) || BUILDFLAG(IS_LACROS)
+#if defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
-#elif BUILDFLAG(IS_ASH)
- return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner, {});
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
+ return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner);
#elif defined(OS_WIN)
return std::make_unique<VideoCaptureDeviceFactoryWin>();
#elif defined(OS_MAC)
@@ -115,20 +112,4 @@ std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
}
}
-#if BUILDFLAG(IS_ASH)
-std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- media::CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
- auto fake_device_factory = CreateFakeVideoCaptureDeviceFactory();
- if (fake_device_factory) {
- return fake_device_factory;
- } else {
- // |ui_task_runner| is needed for the Linux ChromeOS factory to retrieve
- // screen rotations.
- return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner,
- camera_app_device_bridge);
- }
-}
-#endif // BUILDFLAG(IS_ASH)
-
} // namespace media
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.h b/chromium/media/capture/video/create_video_capture_device_factory.h
index 9a9c85d756c..5e8a77090c8 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.h
+++ b/chromium/media/capture/video/create_video_capture_device_factory.h
@@ -14,19 +14,10 @@
namespace media {
-class CameraAppDeviceBridgeImpl;
-
std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
-#if BUILDFLAG(IS_ASH)
-std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
-CreateVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
- media::CameraAppDeviceBridgeImpl* camera_app_device_bridge);
-#endif // BUILDFLAG(IS_ASH)
-
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_CREATE_VIDEO_CAPTURE_DEVICE_FACTORY_H_
diff --git a/chromium/media/capture/video/fake_video_capture_device.cc b/chromium/media/capture/video/fake_video_capture_device.cc
index aef12478e12..b3892136940 100644
--- a/chromium/media/capture/video/fake_video_capture_device.cc
+++ b/chromium/media/capture/video/fake_video_capture_device.cc
@@ -162,6 +162,7 @@ gfx::ColorSpace GetDefaultColorSpace(VideoPixelFormat format) {
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_BGRA:
+ case PIXEL_FORMAT_RGBAF16:
return gfx::ColorSpace::CreateSRGB();
case PIXEL_FORMAT_UNKNOWN:
return gfx::ColorSpace();
diff --git a/chromium/media/capture/video/file_video_capture_device_factory.cc b/chromium/media/capture/video/file_video_capture_device_factory.cc
index d899c509444..29fd7c77a0e 100644
--- a/chromium/media/capture/video/file_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/file_video_capture_device_factory.cc
@@ -8,6 +8,7 @@
#include "base/files/file_path.h"
#include "base/strings/sys_string_conversions.h"
#include "base/threading/scoped_blocking_call.h"
+#include "base/threading/thread_restrictions.h"
#include "build/build_config.h"
#include "media/base/media_switches.h"
#include "media/capture/video/file_video_capture_device.h"
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
index 538f01d2a1e..1c42b270512 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia.cc
@@ -208,8 +208,11 @@ void VideoCaptureDeviceFactoryFuchsia::OnWatchDevicesResult(
}
if (it->second->is_pending()) {
// If the device info request was still pending then consider it
- // complete now.
- OnDeviceInfoFetched();
+ // complete now. If this was the only device in pending state then all
+ // callbacks will be resolved in
+ // MaybeResolvePendingDeviceInfoCallbacks() called below.
+ DCHECK_GT(num_pending_device_info_requests_, 0U);
+ num_pending_device_info_requests_--;
}
devices_->erase(it);
continue;
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
index 2f892b7fdbc..2692ac2a3ca 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
@@ -113,7 +113,8 @@ bool VideoCaptureDeviceFuchsia::IsSupportedPixelFormat(
}
VideoCaptureDeviceFuchsia::VideoCaptureDeviceFuchsia(
- fidl::InterfaceHandle<fuchsia::camera3::Device> device) {
+ fidl::InterfaceHandle<fuchsia::camera3::Device> device)
+ : sysmem_allocator_("CrVideoCaptureDeviceFuchsia") {
device_.Bind(std::move(device));
device_.set_error_handler(
fit::bind_member(this, &VideoCaptureDeviceFuchsia::OnDeviceError));
@@ -263,6 +264,11 @@ void VideoCaptureDeviceFuchsia::InitializeBufferCollection(
SysmemBufferReader::GetRecommendedConstraints(
kMaxUsedOutputFrames,
/*min_buffer_size=*/base::nullopt);
+ // This is not an actual device driver, so the priority should be > 1. It's
+ // also not a high-level system, so the name should be < 100.
+ constexpr uint32_t kNamePriority = 10;
+ buffer_collection_creator_->SetName(kNamePriority,
+ "CrVideoCaptureDeviceFuchsia");
buffer_collection_creator_->Create(
std::move(constraints),
base::BindOnce(&VideoCaptureDeviceFuchsia::OnBufferCollectionCreated,
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
index 849fa5f8086..61ae93f0ad6 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
@@ -163,9 +163,8 @@ class TestVideoCaptureClient : public VideoCaptureDevice::Client {
NOTREACHED();
}
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override {
NOTREACHED();
diff --git a/chromium/media/capture/video/gpu_memory_buffer_utils.cc b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
index 2d1117a8501..2cc0655ff76 100644
--- a/chromium/media/capture/video/gpu_memory_buffer_utils.cc
+++ b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
@@ -62,7 +62,7 @@ VideoCaptureDevice::Client::ReserveResult AllocateNV12GpuMemoryBuffer(
*out_gpu_memory_buffer = gmb_support->CreateGpuMemoryBufferImplFromHandle(
out_capture_buffer->handle_provider->GetGpuMemoryBufferHandle(),
buffer_size, kOpaqueGfxFormat,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
base::NullCallback());
return reserve_result;
}
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
index 242aef391fd..185ca779fd4 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
@@ -66,6 +66,7 @@ struct {
size_t num_planes;
} constexpr kSupportedFormatsAndPlanarity[] = {
{V4L2_PIX_FMT_YUV420, PIXEL_FORMAT_I420, 1},
+ {V4L2_PIX_FMT_NV12, PIXEL_FORMAT_NV12, 1},
{V4L2_PIX_FMT_Y16, PIXEL_FORMAT_Y16, 1},
{V4L2_PIX_FMT_Z16, PIXEL_FORMAT_Y16, 1},
{V4L2_PIX_FMT_INVZ, PIXEL_FORMAT_Y16, 1},
@@ -264,10 +265,18 @@ void V4L2CaptureDelegate::AllocateAndStart(
ResetUserAndCameraControlsToDefault();
+ // In theory, checking for CAPTURE/OUTPUT in caps.capabilities should only
+ // be done if V4L2_CAP_DEVICE_CAPS is not set. However, this was not done
+ // in the past and it is unclear if it breaks with existing devices. And if
+ // a device is accepted incorrectly then it will not have any usable
+ // formats and is skipped anyways.
v4l2_capability cap = {};
if (!(DoIoctl(VIDIOC_QUERYCAP, &cap) == 0 &&
- ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)))) {
+ (((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) ||
+ ((cap.capabilities & V4L2_CAP_DEVICE_CAPS) &&
+ (cap.device_caps & V4L2_CAP_VIDEO_CAPTURE) &&
+ !(cap.device_caps & V4L2_CAP_VIDEO_OUTPUT))))) {
device_fd_.reset();
SetErrorState(VideoCaptureError::kV4L2ThisIsNotAV4L2VideoCaptureDevice,
FROM_HERE, "This is not a V4L2 video capture device");
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
index 48bcb6e168f..677d733460e 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
@@ -28,7 +28,7 @@
#include <linux/videodev2.h>
#endif
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/linux/camera_config_chromeos.h"
#include "media/capture/video/linux/video_capture_device_chromeos.h"
#endif
@@ -53,7 +53,7 @@ const char kPidPathTemplate[] = "/sys/class/video4linux/%s/device/../idProduct";
const char kInterfacePathTemplate[] =
"/sys/class/video4linux/%s/device/interface";
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
static CameraConfigChromeOS* GetCameraConfig() {
static CameraConfigChromeOS* config = new CameraConfigChromeOS();
return config;
@@ -125,7 +125,7 @@ class DevVideoFilePathsDeviceProvider
VideoFacingMode GetCameraFacing(const std::string& device_id,
const std::string& model_id) override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return GetCameraConfig()->GetCameraFacing(device_id, model_id);
#else
NOTREACHED();
@@ -135,7 +135,7 @@ class DevVideoFilePathsDeviceProvider
int GetOrientation(const std::string& device_id,
const std::string& model_id) override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return GetCameraConfig()->GetOrientation(device_id, model_id);
#else
NOTREACHED();
@@ -166,7 +166,7 @@ std::unique_ptr<VideoCaptureDevice>
VideoCaptureDeviceFactoryLinux::CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) {
DCHECK(thread_checker_.CalledOnValidThread());
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
ChromeOSDeviceCameraConfig camera_config(
device_provider_->GetCameraFacing(device_descriptor.device_id,
device_descriptor.model_id),
@@ -210,10 +210,18 @@ void VideoCaptureDeviceFactoryLinux::GetDevicesInfo(
// one supported capture format. Devices that have capture and output
// capabilities at the same time are memory-to-memory and are skipped, see
// http://crbug.com/139356.
+ // In theory, checking for CAPTURE/OUTPUT in caps.capabilities should only
+ // be done if V4L2_CAP_DEVICE_CAPS is not set. However, this was not done
+ // in the past and it is unclear if it breaks with existing devices. And if
+ // a device is accepted incorrectly then it will not have any usable
+ // formats and is skipped anyways.
v4l2_capability cap;
if ((DoIoctl(fd.get(), VIDIOC_QUERYCAP, &cap) == 0) &&
- (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE &&
- !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) &&
+ ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE &&
+ !(cap.capabilities & V4L2_CAP_VIDEO_OUTPUT)) ||
+ (cap.capabilities & V4L2_CAP_DEVICE_CAPS &&
+ cap.device_caps & V4L2_CAP_VIDEO_CAPTURE &&
+ !(cap.device_caps & V4L2_CAP_VIDEO_OUTPUT))) &&
HasUsableFormats(fd.get(), cap.capabilities)) {
const std::string model_id =
device_provider_->GetDeviceModelId(unique_id);
@@ -223,19 +231,25 @@ void VideoCaptureDeviceFactoryLinux::GetDevicesInfo(
display_name = reinterpret_cast<char*>(cap.card);
VideoFacingMode facing_mode =
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
device_provider_->GetCameraFacing(unique_id, model_id);
#else
VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
#endif
+ VideoCaptureFormats supported_formats;
+ GetSupportedFormatsForV4L2BufferType(fd.get(), &supported_formats);
+ if (supported_formats.empty()) {
+ DVLOG(1) << "No supported formats: " << unique_id;
+ continue;
+ }
+
devices_info.emplace_back(VideoCaptureDeviceDescriptor(
display_name, unique_id, model_id,
VideoCaptureApi::LINUX_V4L2_SINGLE_PLANE, GetControlSupport(fd.get()),
VideoCaptureTransportType::OTHER_TRANSPORT, facing_mode));
- GetSupportedFormatsForV4L2BufferType(
- fd.get(), &devices_info.back().supported_formats);
+ devices_info.back().supported_formats = std::move(supported_formats);
}
}
diff --git a/chromium/media/capture/video/mac/DEPS b/chromium/media/capture/video/mac/DEPS
index 577e795b73b..b17486db0f9 100644
--- a/chromium/media/capture/video/mac/DEPS
+++ b/chromium/media/capture/video/mac/DEPS
@@ -2,3 +2,9 @@ include_rules = [
"+third_party/decklink",
"+services/video_capture/public/uma",
]
+
+specific_include_rules = {
+"video_capture_metrics_mac_unittest.mm": [
+ "+third_party/ocmock"
+]
+}
diff --git a/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm b/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
index 55befbd1574..bf0b561b96c 100644
--- a/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
@@ -137,6 +137,36 @@ TEST(PixelBufferPoolTest,
IOSurfaceGetID(second_buffer_io_surface));
}
+TEST(PixelBufferPoolTest, RecreatePoolAndObserveRecycledIOSurfaceID) {
+ constexpr size_t kPoolMaxBuffers = 1;
+ std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
+ kPixelFormatNv12, kVgaWidth, kVgaHeight, kPoolMaxBuffers);
+ base::ScopedCFTypeRef<CVPixelBufferRef> first_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(first_buffer);
+ IOSurfaceID first_buffer_id =
+ IOSurfaceGetID(CVPixelBufferGetIOSurface(first_buffer));
+
+ // Free references and recreate the pool. There is nothing preventing the
+ // IOSurfaceID from being recycled, even by a different CVPixelBufferPool with
+ // a different resolution!
+ first_buffer.reset();
+ pool = PixelBufferPool::Create(kPixelFormatNv12, kVgaWidth / 2,
+ kVgaHeight / 2, kPoolMaxBuffers);
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> second_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(second_buffer);
+ IOSurfaceID second_buffer_id =
+ IOSurfaceGetID(CVPixelBufferGetIOSurface(second_buffer));
+
+ // The new pool is allowed to recycle the old IOSurface ID.
+ //
+ // This test documents "foot gun" behavior that is not documented by Apple
+ // anywhere. If the test starts failing, it may be because this behavior is
+ // specific to version or hardware. In such cases, feel free to disable the
+ // test.
+ EXPECT_EQ(first_buffer_id, second_buffer_id);
+}
+
TEST(PixelBufferPoolTest, BuffersCanOutliveThePool) {
std::unique_ptr<PixelBufferPool> pool =
PixelBufferPool::Create(kPixelFormatNv12, kVgaWidth, kVgaHeight, 1);
diff --git a/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
index fa00dfca33d..ea1334d2506 100644
--- a/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
@@ -8,6 +8,7 @@
#include <vector>
#include "base/logging.h"
+#include "build/build_config.h"
#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
@@ -59,7 +60,14 @@ TEST(PixelBufferTransfererTest, CanCopyYuvsAndVerifyColor) {
kColorR, kColorG, kColorB));
}
-TEST(PixelBufferTransfererTest, CanScaleYuvsAndVerifyColor) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled as part of arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanScaleYuvsAndVerifyColor DISABLED_CanScaleYuvsAndVerifyColor
+#else
+#define MAYBE_CanScaleYuvsAndVerifyColor CanScaleYuvsAndVerifyColor
+#endif
+
+TEST(PixelBufferTransfererTest, MAYBE_CanScaleYuvsAndVerifyColor) {
constexpr OSType kPixelFormat = kPixelFormatYuvs;
constexpr int kSourceWidth = 32;
constexpr int kSourceHeight = 32;
@@ -115,7 +123,16 @@ TEST(PixelBufferTransfererTest, CanScaleYuvsAndVerifyCheckerPattern) {
EXPECT_EQ(num_tiles_across_y, kSourceNumTilesAcross);
}
-TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyCheckerPattern) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled as part of arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanStretchYuvsAndVerifyCheckerPattern \
+ DISABLED_CanStretchYuvsAndVerifyCheckerPattern
+#else
+#define MAYBE_CanStretchYuvsAndVerifyCheckerPattern \
+ CanStretchYuvsAndVerifyCheckerPattern
+#endif
+
+TEST(PixelBufferTransfererTest, MAYBE_CanStretchYuvsAndVerifyCheckerPattern) {
// Note: The ARGB -> YUVS -> ARGB conversions results in a small loss of
// information, so for the checker pattern to be intact the buffer can't be
// tiny (e.g. 4x4).
@@ -149,7 +166,14 @@ TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyCheckerPattern) {
EXPECT_EQ(num_tiles_across_y, kSourceNumTilesAcross);
}
-TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyColor) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled as part of arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanStretchYuvsAndVerifyColor DISABLED_CanStretchYuvsAndVerifyColor
+#else
+#define MAYBE_CanStretchYuvsAndVerifyColor CanStretchYuvsAndVerifyColor
+#endif
+
+TEST(PixelBufferTransfererTest, MAYBE_CanStretchYuvsAndVerifyColor) {
constexpr OSType kPixelFormat = kPixelFormatYuvs;
constexpr int kSourceWidth = 32;
constexpr int kSourceHeight = 32;
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
index daa96cb01b0..043381c1b6f 100644
--- a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
@@ -18,17 +18,8 @@ namespace media {
const base::Feature kInCaptureConvertToNv12{"InCaptureConvertToNv12",
base::FEATURE_ENABLED_BY_DEFAULT};
-const base::Feature kInCaptureConvertToNv12WithPixelTransfer{
- "InCaptureConvertToNv12WithPixelTransfer",
- base::FEATURE_DISABLED_BY_DEFAULT};
-
-const base::Feature kInCaptureConvertToNv12WithLibyuv{
- "InCaptureConvertToNv12WithLibyuv", base::FEATURE_DISABLED_BY_DEFAULT};
-
namespace {
-constexpr size_t kDefaultBufferPoolSize = 10;
-
// NV12 a.k.a. 420v
constexpr OSType kPixelFormatNv12 =
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
@@ -139,6 +130,24 @@ struct NV12Planes {
size_t uv_plane_stride;
};
+// TODO(eshr): Move this to libyuv.
+void CopyNV12(const uint8_t* src_y,
+ int src_y_stride,
+ const uint8_t* src_uv,
+ int src_uv_stride,
+ uint8_t* dst_y,
+ int dst_y_stride,
+ uint8_t* dst_uv,
+ int dst_uv_stride,
+ int width,
+ int height) {
+ libyuv::CopyPlane(src_y, src_y_stride, dst_y, dst_y_stride, width, height);
+ size_t half_width = (width + 1) >> 1;
+ size_t half_height = (height + 1) >> 1;
+ libyuv::CopyPlane(src_uv, src_uv_stride, dst_uv, dst_uv_stride,
+ half_width * 2, half_height);
+}
+
size_t GetContiguousNV12BufferSize(size_t width, size_t height) {
gfx::Size dimensions(width, height);
return VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kYPlane,
@@ -204,44 +213,34 @@ bool ConvertFromMjpegToI420(uint8_t* source_buffer_base_address,
return result == 0;
}
-// Returns true on success. Converting uncompressed pixel formats should never
-// fail, however MJPEG frames produces by some webcams have been observed to be
-// invalid in special circumstances (see https://crbug.com/1147867). To support
-// a graceful failure path in this case, this function may return false.
-bool ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
- const I420Planes& destination) {
+void ConvertFromAnyToNV12(CVPixelBufferRef source_pixel_buffer,
+ const NV12Planes& destination) {
auto pixel_format = CVPixelBufferGetPixelFormatType(source_pixel_buffer);
+ int ret;
switch (pixel_format) {
// UYVY a.k.a. 2vuy
case kCVPixelFormatType_422YpCbCr8: {
const uint8_t* src_uyvy = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddress(source_pixel_buffer));
size_t src_stride_uyvy = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
- return libyuv::UYVYToI420(
- src_uyvy, src_stride_uyvy, destination.y_plane_data,
- destination.y_plane_stride, destination.u_plane_data,
- destination.u_plane_stride, destination.v_plane_data,
- destination.v_plane_stride, destination.width,
- destination.height) == 0;
+ ret = libyuv::UYVYToNV12(
+ src_uyvy, src_stride_uyvy, destination.y_plane_data,
+ destination.y_plane_stride, destination.uv_plane_data,
+ destination.uv_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
}
// YUY2 a.k.a. yuvs
case kCMPixelFormat_422YpCbCr8_yuvs: {
const uint8_t* src_yuy2 = static_cast<const uint8_t*>(
CVPixelBufferGetBaseAddress(source_pixel_buffer));
size_t src_stride_yuy2 = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
- return libyuv::YUY2ToI420(
- src_yuy2, src_stride_yuy2, destination.y_plane_data,
- destination.y_plane_stride, destination.u_plane_data,
- destination.u_plane_stride, destination.v_plane_data,
- destination.v_plane_stride, destination.width,
- destination.height) == 0;
- }
- // MJPEG a.k.a. dmb1
- case kCMVideoCodecType_JPEG_OpenDML: {
- uint8_t* src_jpg = static_cast<uint8_t*>(
- CVPixelBufferGetBaseAddress(source_pixel_buffer));
- size_t src_jpg_size = CVPixelBufferGetDataSize(source_pixel_buffer);
- return ConvertFromMjpegToI420(src_jpg, src_jpg_size, destination);
+ ret = libyuv::YUY2ToNV12(
+ src_yuy2, src_stride_yuy2, destination.y_plane_data,
+ destination.y_plane_stride, destination.uv_plane_data,
+ destination.uv_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
}
// NV12 a.k.a. 420v
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
@@ -255,12 +254,11 @@ bool ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
size_t src_stride_uv =
CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
- return libyuv::NV12ToI420(
- src_y, src_stride_y, src_uv, src_stride_uv,
- destination.y_plane_data, destination.y_plane_stride,
- destination.u_plane_data, destination.u_plane_stride,
- destination.v_plane_data, destination.v_plane_stride,
- destination.width, destination.height) == 0;
+ CopyNV12(src_y, src_stride_y, src_uv, src_stride_uv,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride,
+ destination.width, destination.height);
+ return;
}
// I420 a.k.a. y420
case kCVPixelFormatType_420YpCbCr8Planar: {
@@ -278,32 +276,98 @@ bool ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 2));
size_t src_stride_v =
CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 2);
- return libyuv::I420Copy(
- src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
- destination.y_plane_data, destination.y_plane_stride,
- destination.u_plane_data, destination.u_plane_stride,
- destination.v_plane_data, destination.v_plane_stride,
- destination.width, destination.height) == 0;
+ ret = libyuv::I420ToNV12(
+ src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride,
+ destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
}
default:
NOTREACHED() << "Pixel format " << pixel_format << " not supported.";
}
- return false;
}
-void ConvertFromI420ToNV12(const I420Planes& source,
- const NV12Planes& destination) {
- DCHECK_EQ(source.width, destination.width);
- DCHECK_EQ(source.height, destination.height);
- int result = libyuv::I420ToNV12(
- source.y_plane_data, source.y_plane_stride, source.u_plane_data,
- source.u_plane_stride, source.v_plane_data, source.v_plane_stride,
- destination.y_plane_data, destination.y_plane_stride,
- destination.uv_plane_data, destination.uv_plane_stride, source.width,
- source.height);
- // A webcam has never been observed to produce invalid uncompressed pixel
- // buffer, so we do not support a graceful failure path in this case.
- DCHECK_EQ(result, 0);
+void ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
+ const I420Planes& destination) {
+ auto pixel_format = CVPixelBufferGetPixelFormatType(source_pixel_buffer);
+ int ret;
+ switch (pixel_format) {
+ // UYVY a.k.a. 2vuy
+ case kCVPixelFormatType_422YpCbCr8: {
+ const uint8_t* src_uyvy = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddress(source_pixel_buffer));
+ size_t src_stride_uyvy = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
+ ret = libyuv::UYVYToI420(
+ src_uyvy, src_stride_uyvy, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ // YUY2 a.k.a. yuvs
+ case kCMPixelFormat_422YpCbCr8_yuvs: {
+ const uint8_t* src_yuy2 = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddress(source_pixel_buffer));
+ size_t src_stride_yuy2 = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
+ ret = libyuv::YUY2ToI420(
+ src_yuy2, src_stride_yuy2, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ // NV12 a.k.a. 420v
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ DCHECK(CVPixelBufferIsPlanar(source_pixel_buffer));
+ DCHECK_EQ(2u, CVPixelBufferGetPlaneCount(source_pixel_buffer));
+ const uint8_t* src_y = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 0));
+ size_t src_stride_y =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 0);
+ const uint8_t* src_uv = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
+ size_t src_stride_uv =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
+ ret = libyuv::NV12ToI420(
+ src_y, src_stride_y, src_uv, src_stride_uv, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ // I420 a.k.a. y420
+ case kCVPixelFormatType_420YpCbCr8Planar: {
+ DCHECK(CVPixelBufferIsPlanar(source_pixel_buffer));
+ DCHECK_EQ(3u, CVPixelBufferGetPlaneCount(source_pixel_buffer));
+ const uint8_t* src_y = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 0));
+ size_t src_stride_y =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 0);
+ const uint8_t* src_u = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
+ size_t src_stride_u =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
+ const uint8_t* src_v = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 2));
+ size_t src_stride_v =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 2);
+ ret = libyuv::I420Copy(
+ src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.u_plane_data, destination.u_plane_stride,
+ destination.v_plane_data, destination.v_plane_stride,
+ destination.width, destination.height);
+ DCHECK_EQ(ret, 0);
+ return;
+ }
+ default:
+ NOTREACHED() << "Pixel format " << pixel_format << " not supported.";
+ }
}
// Returns true on success. MJPEG frames produces by some webcams have been
@@ -361,44 +425,40 @@ void ScaleNV12(const NV12Planes& source, const NV12Planes& destination) {
void CopyNV12(const NV12Planes& source, const NV12Planes& destination) {
DCHECK_EQ(source.width, destination.width);
DCHECK_EQ(source.height, destination.height);
- libyuv::CopyPlane(source.y_plane_data, source.y_plane_stride,
- destination.y_plane_data, destination.y_plane_stride,
- destination.width, destination.height);
- size_t half_width = (destination.width + 1) >> 1;
- size_t half_height = (destination.height + 1) >> 1;
- libyuv::CopyPlane(source.uv_plane_data, source.uv_plane_stride,
- destination.uv_plane_data, destination.uv_plane_stride,
- half_width * 2, half_height);
+ CopyNV12(source.y_plane_data, source.y_plane_stride, source.uv_plane_data,
+ source.uv_plane_stride, destination.y_plane_data,
+ destination.y_plane_stride, destination.uv_plane_data,
+ destination.uv_plane_stride, source.width, source.height);
}
} // namespace
// static
-std::unique_ptr<SampleBufferTransformer>
-SampleBufferTransformer::CreateIfAutoReconfigureEnabled() {
- return IsAutoReconfigureEnabled()
- ? std::make_unique<SampleBufferTransformer>()
- : nullptr;
-}
+const SampleBufferTransformer::Transformer
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output =
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer;
// static
-std::unique_ptr<SampleBufferTransformer> SampleBufferTransformer::Create() {
- return std::make_unique<SampleBufferTransformer>();
+SampleBufferTransformer::Transformer
+SampleBufferTransformer::GetBestTransformerForNv12Output(
+ CMSampleBufferRef sample_buffer) {
+ if (CVPixelBufferRef pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer)) {
+ return kBestTransformerForPixelBufferToNv12Output;
+ }
+ // When we don't have a pixel buffer (e.g. it's MJPEG or we get a SW-backed
+ // byte buffer) only libyuv is able to perform the transform.
+ return Transformer::kLibyuv;
}
// static
-bool SampleBufferTransformer::IsAutoReconfigureEnabled() {
- return base::FeatureList::IsEnabled(kInCaptureConvertToNv12) ||
- base::FeatureList::IsEnabled(
- kInCaptureConvertToNv12WithPixelTransfer) ||
- base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithLibyuv);
+std::unique_ptr<SampleBufferTransformer> SampleBufferTransformer::Create() {
+ return std::make_unique<SampleBufferTransformer>();
}
SampleBufferTransformer::SampleBufferTransformer()
: transformer_(Transformer::kNotConfigured),
- destination_pixel_format_(0x0),
- destination_width_(0),
- destination_height_(0) {}
+ destination_pixel_format_(0x0) {}
SampleBufferTransformer::~SampleBufferTransformer() {}
@@ -411,26 +471,14 @@ OSType SampleBufferTransformer::destination_pixel_format() const {
return destination_pixel_format_;
}
-size_t SampleBufferTransformer::destination_width() const {
- return destination_width_;
-}
-
-size_t SampleBufferTransformer::destination_height() const {
- return destination_height_;
-}
-
-base::ScopedCFTypeRef<CVPixelBufferRef>
-SampleBufferTransformer::AutoReconfigureAndTransform(
- CMSampleBufferRef sample_buffer) {
- AutoReconfigureBasedOnInputAndFeatureFlags(sample_buffer);
- return Transform(sample_buffer);
+const gfx::Size& SampleBufferTransformer::destination_size() const {
+ return destination_size_;
}
void SampleBufferTransformer::Reconfigure(
Transformer transformer,
OSType destination_pixel_format,
- size_t destination_width,
- size_t destination_height,
+ const gfx::Size& destination_size,
base::Optional<size_t> buffer_pool_size) {
DCHECK(transformer != Transformer::kLibyuv ||
destination_pixel_format == kPixelFormatI420 ||
@@ -438,18 +486,16 @@ void SampleBufferTransformer::Reconfigure(
<< "Destination format is unsupported when running libyuv";
if (transformer_ == transformer &&
destination_pixel_format_ == destination_pixel_format &&
- destination_width_ == destination_width &&
- destination_height_ == destination_height) {
+ destination_size_ == destination_size) {
// Already configured as desired, abort.
return;
}
transformer_ = transformer;
destination_pixel_format_ = destination_pixel_format;
- destination_width_ = destination_width;
- destination_height_ = destination_height;
- destination_pixel_buffer_pool_ =
- PixelBufferPool::Create(destination_pixel_format_, destination_width_,
- destination_height_, buffer_pool_size);
+ destination_size_ = destination_size;
+ destination_pixel_buffer_pool_ = PixelBufferPool::Create(
+ destination_pixel_format_, destination_size_.width(),
+ destination_size_.height(), buffer_pool_size);
if (transformer == Transformer::kPixelBufferTransfer) {
pixel_buffer_transferer_ = std::make_unique<PixelBufferTransferer>();
} else {
@@ -459,57 +505,21 @@ void SampleBufferTransformer::Reconfigure(
intermediate_nv12_buffer_.resize(0);
}
-void SampleBufferTransformer::AutoReconfigureBasedOnInputAndFeatureFlags(
- CMSampleBufferRef sample_buffer) {
- DCHECK(IsAutoReconfigureEnabled());
- Transformer desired_transformer = Transformer::kNotConfigured;
- size_t desired_width;
- size_t desired_height;
- if (CVPixelBufferRef pixel_buffer =
- CMSampleBufferGetImageBuffer(sample_buffer)) {
- // We have a pixel buffer.
- if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12)) {
- // Pixel transfers are believed to be more efficient for X -> NV12.
- desired_transformer = Transformer::kPixelBufferTransfer;
- }
- desired_width = CVPixelBufferGetWidth(pixel_buffer);
- desired_height = CVPixelBufferGetHeight(pixel_buffer);
- } else {
- // We don't have a pixel buffer. Reconfigure to be prepared for MJPEG.
- if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12)) {
- // Only libyuv supports MJPEG -> NV12.
- desired_transformer = Transformer::kLibyuv;
- }
- CMFormatDescriptionRef format_description =
- CMSampleBufferGetFormatDescription(sample_buffer);
- CMVideoDimensions dimensions =
- CMVideoFormatDescriptionGetDimensions(format_description);
- desired_width = dimensions.width;
- desired_height = dimensions.height;
- }
- if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithPixelTransfer)) {
- desired_transformer = Transformer::kPixelBufferTransfer;
- } else if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithLibyuv)) {
- desired_transformer = Transformer::kLibyuv;
- }
- Reconfigure(desired_transformer, kPixelFormatNv12, desired_width,
- desired_height, kDefaultBufferPoolSize);
-}
-
base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
- CMSampleBufferRef sample_buffer) {
+ CVPixelBufferRef pixel_buffer) {
DCHECK(transformer_ != Transformer::kNotConfigured);
- CVPixelBufferRef source_pixel_buffer =
- CMSampleBufferGetImageBuffer(sample_buffer);
+ DCHECK(pixel_buffer);
// Fast path: If source and destination formats are identical, return the
// source pixel buffer.
- if (source_pixel_buffer &&
- destination_width_ == CVPixelBufferGetWidth(source_pixel_buffer) &&
- destination_height_ == CVPixelBufferGetHeight(source_pixel_buffer) &&
+ if (pixel_buffer &&
+ static_cast<size_t>(destination_size_.width()) ==
+ CVPixelBufferGetWidth(pixel_buffer) &&
+ static_cast<size_t>(destination_size_.height()) ==
+ CVPixelBufferGetHeight(pixel_buffer) &&
destination_pixel_format_ ==
- CVPixelBufferGetPixelFormatType(source_pixel_buffer) &&
- CVPixelBufferGetIOSurface(source_pixel_buffer)) {
- return base::ScopedCFTypeRef<CVPixelBufferRef>(source_pixel_buffer,
+ CVPixelBufferGetPixelFormatType(pixel_buffer) &&
+ CVPixelBufferGetIOSurface(pixel_buffer)) {
+ return base::ScopedCFTypeRef<CVPixelBufferRef>(pixel_buffer,
base::scoped_policy::RETAIN);
}
// Create destination buffer from pool.
@@ -521,10 +531,28 @@ base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
LOG(ERROR) << "Maximum destination buffers exceeded";
return base::ScopedCFTypeRef<CVPixelBufferRef>();
}
- if (source_pixel_buffer) {
- // Pixel buffer path. Do pixel transfer or libyuv conversion + rescale.
- TransformPixelBuffer(source_pixel_buffer, destination_pixel_buffer);
- return destination_pixel_buffer;
+ // Do pixel transfer or libyuv conversion + rescale.
+ TransformPixelBuffer(pixel_buffer, destination_pixel_buffer);
+ return destination_pixel_buffer;
+}
+
+base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
+ CMSampleBufferRef sample_buffer) {
+ DCHECK(transformer_ != Transformer::kNotConfigured);
+ DCHECK(sample_buffer);
+ // If the sample buffer has a pixel buffer, run the pixel buffer path instead.
+ if (CVPixelBufferRef pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer)) {
+ return Transform(pixel_buffer);
+ }
+ // Create destination buffer from pool.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination_pixel_buffer =
+ destination_pixel_buffer_pool_->CreateBuffer();
+ if (!destination_pixel_buffer) {
+ // Maximum destination buffers exceeded. Old buffers are not being released
+ // (and thus not returned to the pool) in time.
+ LOG(ERROR) << "Maximum destination buffers exceeded";
+ return base::ScopedCFTypeRef<CVPixelBufferRef>();
}
// Sample buffer path - it's MJPEG. Do libyuv conversion + rescale.
if (!TransformSampleBuffer(sample_buffer, destination_pixel_buffer)) {
@@ -602,8 +630,9 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToI420(
CVPixelBufferGetPixelFormatType(source_pixel_buffer);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert to I420.
I420Planes i420_fullscale_buffer;
@@ -628,11 +657,7 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToI420(
i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_i420_buffer_);
}
- if (!ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer)) {
- // Only MJPEG conversions are known to be able to fail. Because X is an
- // uncompressed pixel format, this conversion should never fail.
- NOTREACHED();
- }
+ ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer);
}
// Step 2: Rescale I420.
@@ -653,8 +678,9 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
CVPixelBufferGetPixelFormatType(source_pixel_buffer);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert to NV12.
NV12Planes nv12_fullscale_buffer;
@@ -671,21 +697,6 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
return;
}
} else {
- // Convert X -> I420 -> NV12. (We don't know how to do X -> NV12.)
- // TODO(https://crbug.com/1154273): Convert to NV12 directly.
- I420Planes i420_fullscale_buffer;
- if (source_pixel_format == kPixelFormatI420) {
- // We are already at I420.
- i420_fullscale_buffer = GetI420PlanesFromPixelBuffer(source_pixel_buffer);
- } else {
- // Convert X -> I420.
- i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
- source_width, source_height, &intermediate_i420_buffer_);
- if (!ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer)) {
- NOTREACHED();
- }
- }
- // Convert I420 -> NV12.
if (!rescale_needed) {
nv12_fullscale_buffer =
GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
@@ -693,7 +704,7 @@ void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
nv12_fullscale_buffer = EnsureNV12BufferSizeAndGetPlanes(
source_width, source_height, &intermediate_nv12_buffer_);
}
- ConvertFromI420ToNV12(i420_fullscale_buffer, nv12_fullscale_buffer);
+ ConvertFromAnyToNV12(source_pixel_buffer, nv12_fullscale_buffer);
}
// Step 2: Rescale NV12.
@@ -713,7 +724,7 @@ bool SampleBufferTransformer::TransformSampleBuffer(
CMSampleBufferGetFormatDescription(source_sample_buffer);
FourCharCode source_pixel_format =
CMFormatDescriptionGetMediaSubType(source_format_description);
- DCHECK(source_pixel_format == kPixelFormatMjpeg);
+ CHECK_EQ(source_pixel_format, kPixelFormatMjpeg);
CMVideoDimensions source_dimensions =
CMVideoFormatDescriptionGetDimensions(source_format_description);
@@ -759,8 +770,9 @@ bool SampleBufferTransformer::TransformSampleBufferFromMjpegToI420(
CVPixelBufferRef destination_pixel_buffer) {
DCHECK(destination_pixel_format_ == kPixelFormatI420);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert MJPEG -> I420.
I420Planes i420_fullscale_buffer;
@@ -793,8 +805,9 @@ bool SampleBufferTransformer::TransformSampleBufferFromMjpegToNV12(
CVPixelBufferRef destination_pixel_buffer) {
DCHECK(destination_pixel_format_ == kPixelFormatNv12);
// Rescaling has to be done in a separate step.
- const bool rescale_needed = destination_width_ != source_width ||
- destination_height_ != source_height;
+ const bool rescale_needed =
+ static_cast<size_t>(destination_size_.width()) != source_width ||
+ static_cast<size_t>(destination_size_.height()) != source_height;
// Step 1: Convert MJPEG -> NV12.
NV12Planes nv12_fullscale_buffer;
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
index 4f9dc45bbe3..24d42cff78d 100644
--- a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
@@ -14,28 +14,15 @@
#include "media/capture/capture_export.h"
#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
#include "media/capture/video/mac/pixel_buffer_transferer_mac.h"
+#include "ui/gfx/geometry/size.h"
namespace media {
-// When enabled, AutoReconfigureAndTransform() configures the
-// SampleBufferTransformer to use the conversion path (pixel transfer or libyuv)
-// that is believed to be most efficient for the input sample buffer.
+// This flag is used to decide whether or not to use the SampleBufferTransformer
+// to convert captured images to NV12, see
+// video_capture_device_avfoundation_mac.mm.
CAPTURE_EXPORT extern const base::Feature kInCaptureConvertToNv12;
-// Feature flag used for performance measurements. This will not be shipped.
-//
-// When enabled, AutoReconfigureAndTransform() configures the
-// SampleBufferTransformer to use the pixel transfer path. Transforming an MJPEG
-// sample buffer with this configuration will DCHECK crash.
-CAPTURE_EXPORT extern const base::Feature
- kInCaptureConvertToNv12WithPixelTransfer;
-
-// Feature flag used for performance measurements. This will not be shipped.
-//
-// When enabled, AutoReconfigureAndTransform() configures the
-// SampleBufferTransformer to use the libyuv path.
-CAPTURE_EXPORT extern const base::Feature kInCaptureConvertToNv12WithLibyuv;
-
// Capable of converting from any supported capture format (NV12, YUY2, UYVY and
// MJPEG) to NV12 or I420 and doing rescaling. This class can be configured to
// use VTPixelTransferSession (sometimes HW-accelerated) or third_party/libyuv
@@ -51,37 +38,35 @@ class CAPTURE_EXPORT SampleBufferTransformer {
kLibyuv,
};
- // Only construct a sample transformer if one of the "InCaptureConvertToNv12"
- // flags are enabled and AutoReconfigureAndTransform() is supported. See
- // IsAutoReconfigureEnabled().
- static std::unique_ptr<SampleBufferTransformer>
- CreateIfAutoReconfigureEnabled();
- static std::unique_ptr<SampleBufferTransformer> Create();
+ // TODO(https://crbug.com/1175763): Make determining the optimal Transformer
+ // an implementation detail determined at Transform()-time, making
+ // Reconfigure() only care about destination resolution and pixel format. Then
+ // make it possible to override this decision explicitly but only do that for
+ // testing and measurements purposes, not in default capturer integration.
+ static const Transformer kBestTransformerForPixelBufferToNv12Output;
+ static Transformer GetBestTransformerForNv12Output(
+ CMSampleBufferRef sample_buffer);
+ static std::unique_ptr<SampleBufferTransformer> Create();
~SampleBufferTransformer();
Transformer transformer() const;
OSType destination_pixel_format() const;
- size_t destination_width() const;
- size_t destination_height() const;
-
- // Automatically reconfigures based on |sample_buffer| and base::Feature flags
- // if needed before performing a Transform().
- base::ScopedCFTypeRef<CVPixelBufferRef> AutoReconfigureAndTransform(
- CMSampleBufferRef sample_buffer);
+ const gfx::Size& destination_size() const;
// Future calls to Transform() will output pixel buffers according to this
- // configuration.
+ // configuration. Changing configuration will allocate a new buffer pool, but
+ // calling Reconfigure() multiple times with the same parameters is a NO-OP.
void Reconfigure(Transformer transformer,
OSType destination_pixel_format,
- size_t destination_width,
- size_t destination_height,
- base::Optional<size_t> buffer_pool_size);
+ const gfx::Size& destination_size,
+ base::Optional<size_t> buffer_pool_size = base::nullopt);
- // Converts the sample buffer to an IOSurface-backed pixel buffer according to
+ // Converts the input buffer to an IOSurface-backed pixel buffer according to
// current configurations. If no transformation is needed (input format is the
- // same as the configured output format), the sample buffer's pixel buffer is
- // returned.
+ // same as the configured output format), the input pixel buffer is returned.
+ base::ScopedCFTypeRef<CVPixelBufferRef> Transform(
+ CVPixelBufferRef pixel_buffer);
base::ScopedCFTypeRef<CVPixelBufferRef> Transform(
CMSampleBufferRef sample_buffer);
@@ -89,13 +74,8 @@ class CAPTURE_EXPORT SampleBufferTransformer {
friend std::unique_ptr<SampleBufferTransformer>
std::make_unique<SampleBufferTransformer>();
- static bool IsAutoReconfigureEnabled();
-
SampleBufferTransformer();
- void AutoReconfigureBasedOnInputAndFeatureFlags(
- CMSampleBufferRef sample_buffer);
-
// Sample buffers from the camera contain pixel buffers when an uncompressed
// pixel format is used (i.e. it's not MJPEG).
void TransformPixelBuffer(CVPixelBufferRef source_pixel_buffer,
@@ -132,8 +112,7 @@ class CAPTURE_EXPORT SampleBufferTransformer {
Transformer transformer_;
OSType destination_pixel_format_;
- size_t destination_width_;
- size_t destination_height_;
+ gfx::Size destination_size_;
std::unique_ptr<PixelBufferPool> destination_pixel_buffer_pool_;
// For kPixelBufferTransfer.
std::unique_ptr<PixelBufferTransferer> pixel_buffer_transferer_;
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm b/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
index dde2a940139..3c83dd24d4c 100644
--- a/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
@@ -7,7 +7,8 @@
#include <tuple>
#include "base/logging.h"
-#include "base/test/scoped_feature_list.h"
+#include "base/mac/scoped_cftyperef.h"
+#include "build/build_config.h"
#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -147,6 +148,116 @@ base::ScopedCFTypeRef<CVPixelBufferRef> CreatePixelBuffer(OSType pixel_format,
return pixel_buffer;
}
+enum class PixelBufferType {
+ kIoSurfaceBacked,
+ kIoSurfaceMissing,
+};
+
+void NonPlanarCvPixelBufferReleaseCallback(void* releaseRef, const void* data) {
+ free(const_cast<void*>(data));
+}
+
+void PlanarCvPixelBufferReleaseCallback(void* releaseRef,
+ const void* data,
+ size_t size,
+ size_t num_planes,
+ const void* planes[]) {
+ free(const_cast<void*>(data));
+ for (size_t plane = 0; plane < num_planes; ++plane)
+ free(const_cast<void*>(planes[plane]));
+}
+
+std::pair<uint8_t*, size_t> GetDataAndStride(CVPixelBufferRef pixel_buffer,
+ size_t plane) {
+ if (CVPixelBufferIsPlanar(pixel_buffer)) {
+ return {static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, plane)),
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, plane)};
+ } else {
+ DCHECK_EQ(plane, 0u) << "Non-planar pixel buffers only have 1 plane.";
+ return {static_cast<uint8_t*>(CVPixelBufferGetBaseAddress(pixel_buffer)),
+ CVPixelBufferGetBytesPerRow(pixel_buffer)};
+ }
+}
+
+base::ScopedCFTypeRef<CVPixelBufferRef> AddPadding(
+ CVPixelBufferRef pixel_buffer,
+ OSType pixel_format,
+ int width,
+ int height,
+ int padding) {
+ size_t num_planes = CVPixelBufferGetPlaneCount(pixel_buffer);
+ size_t padded_size = 0;
+ std::vector<size_t> plane_widths;
+ std::vector<size_t> plane_heights;
+ std::vector<size_t> plane_strides;
+ if (CVPixelBufferIsPlanar(pixel_buffer)) {
+ for (size_t plane = 0; plane < num_planes; ++plane) {
+ size_t plane_stride =
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, plane);
+ size_t padded_stride = plane_stride + padding;
+ size_t h = CVPixelBufferGetHeightOfPlane(pixel_buffer, plane);
+ size_t w = CVPixelBufferGetWidthOfPlane(pixel_buffer, plane);
+ plane_heights.push_back(h);
+ plane_widths.push_back(w);
+ plane_strides.push_back(padded_stride);
+ padded_size += h * padded_stride;
+ }
+ } else {
+ // CVPixelBufferGetPlaneCount returns 0 for non-planar buffers.
+ num_planes = 1;
+ size_t plane_stride = CVPixelBufferGetBytesPerRow(pixel_buffer);
+ size_t padded_stride = plane_stride + padding;
+ size_t h = CVPixelBufferGetHeight(pixel_buffer);
+ padded_size += h * padded_stride;
+ plane_heights.push_back(h);
+ plane_strides.push_back(padded_stride);
+ }
+ std::vector<void*> plane_address;
+ CHECK_EQ(
+ CVPixelBufferLockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly),
+ kCVReturnSuccess);
+ // Allocate and copy each plane.
+ for (size_t plane = 0; plane < num_planes; ++plane) {
+ plane_address.push_back(
+ calloc(1, plane_strides[plane] * plane_heights[plane]));
+ uint8_t* dst_ptr = static_cast<uint8_t*>(plane_address[plane]);
+ uint8_t* src_ptr;
+ size_t plane_stride;
+ std::tie(src_ptr, plane_stride) = GetDataAndStride(pixel_buffer, plane);
+ CHECK(dst_ptr);
+ CHECK(src_ptr);
+ for (size_t r = 0; r < plane_heights[plane]; ++r) {
+ memcpy(dst_ptr, src_ptr, plane_stride);
+ src_ptr += plane_stride;
+ dst_ptr += plane_strides[plane];
+ }
+ }
+ CHECK_EQ(
+ CVPixelBufferUnlockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly),
+ kCVReturnSuccess);
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> padded_pixel_buffer;
+ CVReturn create_buffer_result;
+ if (CVPixelBufferIsPlanar(pixel_buffer)) {
+ // Without some memory block the callback won't be called and we leak the
+ // planar data.
+ void* descriptor = calloc(1, sizeof(CVPlanarPixelBufferInfo_YCbCrPlanar));
+ create_buffer_result = CVPixelBufferCreateWithPlanarBytes(
+ nullptr, width, height, pixel_format, descriptor, 0, num_planes,
+ plane_address.data(), plane_widths.data(), plane_heights.data(),
+ plane_strides.data(), &PlanarCvPixelBufferReleaseCallback,
+ plane_strides.data(), nullptr, padded_pixel_buffer.InitializeInto());
+ } else {
+ create_buffer_result = CVPixelBufferCreateWithBytes(
+ nullptr, width, height, pixel_format, plane_address[0],
+ plane_strides[0], &NonPlanarCvPixelBufferReleaseCallback, nullptr,
+ nullptr, padded_pixel_buffer.InitializeInto());
+ }
+ DCHECK_EQ(create_buffer_result, kCVReturnSuccess);
+ return padded_pixel_buffer;
+}
+
base::ScopedCFTypeRef<CMSampleBufferRef> CreateSampleBuffer(
OSType pixel_format,
int width,
@@ -154,19 +265,40 @@ base::ScopedCFTypeRef<CMSampleBufferRef> CreateSampleBuffer(
uint8_t r,
uint8_t g,
uint8_t b,
- bool iosurface_backed = true) {
- base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer;
- if (iosurface_backed) {
- pixel_buffer = CreatePixelBuffer(pixel_format, width, height, r, g, b);
- } else {
- CVPixelBufferCreate(nullptr, width, height, pixel_format, nullptr,
- pixel_buffer.InitializeInto());
+ PixelBufferType pixel_buffer_type,
+ size_t padding = 0) {
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer =
+ CreatePixelBuffer(pixel_format, width, height, r, g, b);
+ if (padding != 0) {
+ CHECK_EQ(pixel_buffer_type, PixelBufferType::kIoSurfaceMissing)
+ << "Padding does not work with IOSurfaces.";
+ }
+ if (pixel_buffer_type == PixelBufferType::kIoSurfaceMissing) {
+ // Our pixel buffer currently has an IOSurface. To get rid of it, we perform
+ // a pixel buffer transfer to a destination pixel buffer that is not backed
+ // by an IOSurface. The resulting pixel buffer will have the desired color.
+ base::ScopedCFTypeRef<CVPixelBufferRef> iosurfaceless_pixel_buffer;
+ CVReturn create_buffer_result =
+ CVPixelBufferCreate(nullptr, width, height, pixel_format, nullptr,
+ iosurfaceless_pixel_buffer.InitializeInto());
+ DCHECK_EQ(create_buffer_result, kCVReturnSuccess);
+ PixelBufferTransferer transferer;
+ bool success =
+ transferer.TransferImage(pixel_buffer, iosurfaceless_pixel_buffer);
+ DCHECK(success);
+ DCHECK(!CVPixelBufferGetIOSurface(iosurfaceless_pixel_buffer));
+ pixel_buffer = iosurfaceless_pixel_buffer;
+
+ if (padding > 0) {
+ pixel_buffer =
+ AddPadding(pixel_buffer, pixel_format, width, height, padding);
+ }
}
// Wrap the pixel buffer in a sample buffer.
- CMFormatDescriptionRef format_description;
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> format_description;
OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(
- nil, pixel_buffer, &format_description);
+ nil, pixel_buffer, format_description.InitializeInto());
DCHECK(status == noErr);
// Dummy information to make CMSampleBufferCreateForImageBuffer() happy.
@@ -189,16 +321,24 @@ base::ScopedCFTypeRef<CMSampleBufferRef> CreateMjpegSampleBuffer(
size_t mjpeg_data_size,
size_t width,
size_t height) {
- CMBlockBufferRef data_buffer;
+ CMBlockBufferCustomBlockSource source = {0};
+ source.FreeBlock = [](void* refcon, void* doomedMemoryBlock,
+ size_t sizeInBytes) {
+ // Do nothing. The data to be released is not dynamically allocated in this
+ // test code.
+ };
+
+ base::ScopedCFTypeRef<CMBlockBufferRef> data_buffer;
OSStatus status = CMBlockBufferCreateWithMemoryBlock(
nil, const_cast<void*>(static_cast<const void*>(mjpeg_data)),
- mjpeg_data_size, nil, nil, 0, mjpeg_data_size, 0, &data_buffer);
+ mjpeg_data_size, nil, &source, 0, mjpeg_data_size, 0,
+ data_buffer.InitializeInto());
DCHECK(status == noErr);
- CMFormatDescriptionRef format_description;
- status =
- CMVideoFormatDescriptionCreate(nil, kCMVideoCodecType_JPEG_OpenDML, width,
- height, nil, &format_description);
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> format_description;
+ status = CMVideoFormatDescriptionCreate(nil, kCMVideoCodecType_JPEG_OpenDML,
+ width, height, nil,
+ format_description.InitializeInto());
DCHECK(status == noErr);
// Dummy information to make CMSampleBufferCreateReady() happy.
@@ -247,38 +387,130 @@ TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertFullScale) {
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(
SampleBufferTransformer::Transformer::kPixelBufferTransfer,
- output_pixel_format, kFullResolutionWidth, kFullResolutionHeight, 1);
+ output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
-TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertAndScaleDown) {
+#if defined(ARCH_CPU_ARM64)
+// Bulk-disabled for arm64 bot stabilization: https://crbug.com/1154345
+#define MAYBE_CanConvertAndScaleDown DISABLED_CanConvertAndScaleDown
+#else
+#define MAYBE_CanConvertAndScaleDown CanConvertAndScaleDown
+#endif
+
+TEST_P(SampleBufferTransformerPixelTransferTest, MAYBE_CanConvertAndScaleDown) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(
SampleBufferTransformer::Transformer::kPixelBufferTransfer,
- output_pixel_format, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, 1);
+ output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerPixelTransferTest,
+ CanConvertAndScaleDownWhenIoSurfaceIsMissing) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerPixelTransferTest,
+ CanConvertWithPaddingFullScale) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerPixelTransferTest,
+ CanConvertAndScaleWithPadding) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionWidth,
CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionHeight,
@@ -304,37 +536,116 @@ TEST_P(SampleBufferTransformerLibyuvTest, CanConvertFullScale) {
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
- transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, 1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
EXPECT_TRUE(
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
-TEST_P(SampleBufferTransformerLibyuvTest, CanConvertAndScaleDown) {
+TEST_P(SampleBufferTransformerLibyuvTest, MAYBE_CanConvertAndScaleDown) {
OSType input_pixel_format;
OSType output_pixel_format;
std::tie(input_pixel_format, output_pixel_format) = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
- kFullResolutionHeight, kColorR, kColorG, kColorB);
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceBacked);
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
- transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, 1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerLibyuvTest, CanConvertWithPaddingFullScale) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kFullResolutionWidth, kFullResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerLibyuvTest, CanConvertAndScaleWithPadding) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing, /*padding*/ 100);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerLibyuvTest,
+ CanConvertAndScaleDownWhenIoSurfaceIsMissing) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB,
+ PixelBufferType::kIoSurfaceMissing);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight), 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionWidth,
CVPixelBufferGetWidth(output_pixel_buffer));
EXPECT_EQ(kScaledDownResolutionHeight,
@@ -361,8 +672,8 @@ TEST_P(SampleBufferTransformerMjpegTest, CanConvertFullScale) {
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kExampleJpegWidth,
- kExampleJpegHeight, 1);
+ output_pixel_format,
+ gfx::Size(kExampleJpegWidth, kExampleJpegHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
@@ -372,16 +683,16 @@ TEST_P(SampleBufferTransformerMjpegTest, CanConvertFullScale) {
PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
}
-TEST_P(SampleBufferTransformerMjpegTest, CanConvertAndScaleDown) {
+TEST_P(SampleBufferTransformerMjpegTest, MAYBE_CanConvertAndScaleDown) {
OSType output_pixel_format = GetParam();
base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
CreateExampleMjpegSampleBuffer();
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
- transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kExampleJpegScaledDownWidth,
- kExampleJpegScaledDownHeight, 1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kLibyuv, output_pixel_format,
+ gfx::Size(kExampleJpegScaledDownWidth, kExampleJpegScaledDownHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
@@ -402,8 +713,8 @@ TEST_P(SampleBufferTransformerMjpegTest,
std::unique_ptr<SampleBufferTransformer> transformer =
SampleBufferTransformer::Create();
transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
- output_pixel_format, kExampleJpegWidth,
- kExampleJpegHeight, 1);
+ output_pixel_format,
+ gfx::Size(kExampleJpegWidth, kExampleJpegHeight), 1);
base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
transformer->Transform(input_sample_buffer);
EXPECT_FALSE(output_pixel_buffer);
@@ -414,28 +725,23 @@ INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
SupportedOutputFormats(),
TestParametersOSTypeToString);
-TEST(SampleBufferTransformerAutoReconfigureTest,
- AutoReconfigureIsEnabledByDefault) {
- EXPECT_TRUE(SampleBufferTransformer::CreateIfAutoReconfigureEnabled());
-}
-
-TEST(SampleBufferTransformerAutoReconfigureTest,
- SourceAndDestinationResolutionMatches) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ SourceAndDestinationResolutionMatches_InputSampleBuffer) {
std::unique_ptr<SampleBufferTransformer> transformer =
- SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- ASSERT_TRUE(transformer);
+ SampleBufferTransformer::Create();
base::ScopedCFTypeRef<CMSampleBufferRef> sample0 = CreateSampleBuffer(
kPixelFormatNv12, kFullResolutionWidth, kFullResolutionHeight, kColorR,
- kColorG, kColorB, /*iosurface_backed=*/false);
+ kColorG, kColorB, PixelBufferType::kIoSurfaceMissing);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample0),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample0));
base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
- transformer->AutoReconfigureAndTransform(sample0);
+ transformer->Transform(sample0);
- EXPECT_EQ(kFullResolutionWidth, transformer->destination_width());
- EXPECT_EQ(kFullResolutionHeight, transformer->destination_height());
+ EXPECT_EQ(gfx::Size(kFullResolutionWidth, kFullResolutionHeight),
+ transformer->destination_size());
EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_buffer));
EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_buffer));
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
@@ -445,12 +751,15 @@ TEST(SampleBufferTransformerAutoReconfigureTest,
base::ScopedCFTypeRef<CMSampleBufferRef> sample1 = CreateSampleBuffer(
kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB);
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
- output_buffer = transformer->AutoReconfigureAndTransform(sample1);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample1),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample1));
+ output_buffer = transformer->Transform(sample1);
- EXPECT_EQ(kScaledDownResolutionWidth, transformer->destination_width());
- EXPECT_EQ(kScaledDownResolutionHeight, transformer->destination_height());
+ EXPECT_EQ(gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight),
+ transformer->destination_size());
EXPECT_EQ(kScaledDownResolutionWidth, CVPixelBufferGetWidth(output_buffer));
EXPECT_EQ(kScaledDownResolutionHeight, CVPixelBufferGetHeight(output_buffer));
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
@@ -458,88 +767,194 @@ TEST(SampleBufferTransformerAutoReconfigureTest,
EXPECT_EQ(output_buffer.get(), CMSampleBufferGetImageBuffer(sample1.get()));
}
-TEST(SampleBufferTransformerAutoReconfigureTest,
+// Same test as above, verifying that Transform() methods work on pixel buffers
+// directly (so that there's no need to have a sample buffer).
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ SourceAndDestinationResolutionMatches_InputPixelBuffer) {
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample0 = CreateSampleBuffer(
+ kPixelFormatNv12, kFullResolutionWidth, kFullResolutionHeight, kColorR,
+ kColorG, kColorB, PixelBufferType::kIoSurfaceMissing);
+ CVPixelBufferRef pixel0 = CMSampleBufferGetImageBuffer(sample0);
+ ASSERT_TRUE(pixel0);
+
+ transformer->Reconfigure(
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output,
+ kPixelFormatNv12, media::GetPixelBufferSize(pixel0));
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
+ transformer->Transform(pixel0);
+
+ EXPECT_EQ(gfx::Size(kFullResolutionWidth, kFullResolutionHeight),
+ transformer->destination_size());
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ // Because pixel0 has no underlying IOSurface, it should not be returned from
+ // the transformer.
+ EXPECT_NE(output_buffer.get(), pixel0);
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample1 = CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ CVPixelBufferRef pixel1 = CMSampleBufferGetImageBuffer(sample1);
+ ASSERT_TRUE(pixel1);
+
+ transformer->Reconfigure(
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output,
+ kPixelFormatNv12, media::GetPixelBufferSize(pixel1));
+ output_buffer = transformer->Transform(pixel1);
+
+ EXPECT_EQ(gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight),
+ transformer->destination_size());
+ EXPECT_EQ(kScaledDownResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ // Because pixel1 does have an IOSurface, it can be returned directly.
+ EXPECT_EQ(output_buffer.get(), pixel1);
+}
+
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ CanConvertAndScaleDown_InputPixelBuffer) {
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample_buffer = CreateSampleBuffer(
+ kPixelFormatNv12, kFullResolutionWidth, kFullResolutionHeight, kColorR,
+ kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ CVPixelBufferRef pixel_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
+ ASSERT_TRUE(pixel_buffer);
+
+ transformer->Reconfigure(
+ SampleBufferTransformer::kBestTransformerForPixelBufferToNv12Output,
+ kPixelFormatNv12,
+ gfx::Size(kScaledDownResolutionWidth, kScaledDownResolutionHeight));
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
+ transformer->Transform(pixel_buffer);
+
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ EXPECT_EQ(kScaledDownResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
DestinationPixelFormatIsAlwaysNv12) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
std::unique_ptr<SampleBufferTransformer> transformer =
- SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- ASSERT_TRUE(transformer);
+ SampleBufferTransformer::Create();
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample = CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
- transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatNv12, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, kColorR, kColorG, kColorB));
+ transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatUyvy, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatUyvy, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatYuy2, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatYuy2, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatI420, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatI420, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
- output_buffer = transformer->AutoReconfigureAndTransform(
- CreateExampleMjpegSampleBuffer());
+ sample = CreateExampleMjpegSampleBuffer();
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
EXPECT_EQ(kPixelFormatNv12,
IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
}
-TEST(SampleBufferTransformerAutoReconfigureTest, UsesBestTransformerPaths) {
- base::test::ScopedFeatureList scoped_feature_list;
- scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+TEST(SampleBufferTransformerBestTransformerForNv12OutputTest,
+ UsesBestTransformerPaths) {
std::unique_ptr<SampleBufferTransformer> transformer =
- SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- ASSERT_TRUE(transformer);
+ SampleBufferTransformer::Create();
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample = CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
- transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatNv12, kScaledDownResolutionWidth,
- kScaledDownResolutionHeight, kColorR, kColorG, kColorB));
+ transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatUyvy, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatUyvy, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatYuy2, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatYuy2, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
- kPixelFormatI420, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
- kColorR, kColorG, kColorB));
+ sample = CreateSampleBuffer(kPixelFormatI420, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG,
+ kColorB, PixelBufferType::kIoSurfaceBacked);
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
- output_buffer = transformer->AutoReconfigureAndTransform(
- CreateExampleMjpegSampleBuffer());
+ sample = CreateExampleMjpegSampleBuffer();
+ transformer->Reconfigure(
+ SampleBufferTransformer::GetBestTransformerForNv12Output(sample),
+ kPixelFormatNv12, media::GetSampleBufferSize(sample));
+ output_buffer = transformer->Transform(sample);
EXPECT_EQ(SampleBufferTransformer::Transformer::kLibyuv,
transformer->transformer());
EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm
index c8a62dff46b..fe80ca2db2f 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_legacy_mac.mm
@@ -405,6 +405,10 @@ void ExtractBaseAddressAndLength(char** base_address,
return YES;
}
+- (void)setScaledResolutions:(std::vector<gfx::Size>)resolutions {
+ // The legacy capturer does not implement in-capturer scaling.
+}
+
- (BOOL)startCapture {
DCHECK(_main_thread_checker.CalledOnValidThread());
if (!_captureSession) {
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
index 120f8c656ed..885f631591e 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
@@ -7,6 +7,7 @@
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
+#include "base/callback_forward.h"
#include "base/mac/scoped_dispatch_object.h"
#include "base/mac/scoped_nsobject.h"
@@ -19,6 +20,14 @@
namespace media {
+// When this feature is enabled, the capturer can be configured using
+// setScaledResolutions to output scaled versions of the captured frame (in
+// addition to the original frame), whenever NV12 IOSurfaces are available to
+// the capturer. These are available either when the camera supports it and
+// kAVFoundationCaptureV2ZeroCopy is enabled or when kInCaptureConvertToNv12 is
+// used to convert frames to NV12.
+CAPTURE_EXPORT extern const base::Feature kInCapturerScaling;
+
// Find the best capture format from |formats| for the specified dimensions and
// frame rate. Returns an element of |formats|, or nil.
AVCaptureDeviceFormat* CAPTURE_EXPORT
@@ -53,6 +62,13 @@ CAPTURE_EXPORT
base::Lock _lock;
media::VideoCaptureDeviceAVFoundationFrameReceiver* _frameReceiver
GUARDED_BY(_lock); // weak.
+ bool _capturedFirstFrame GUARDED_BY(_lock);
+ bool _capturedFrameSinceLastStallCheck GUARDED_BY(_lock);
+ std::unique_ptr<base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>
+ _weakPtrFactoryForStallCheck;
+
+ // Used to rate-limit crash reports for https://crbug.com/1168112.
+ bool _hasDumpedForFrameSizeMismatch;
base::scoped_nsobject<AVCaptureSession> _captureSession;
@@ -64,6 +80,12 @@ CAPTURE_EXPORT
// When enabled, converts captured frames to NV12.
std::unique_ptr<media::SampleBufferTransformer> _sampleBufferTransformer;
+ // Transformers used to create downscaled versions of the captured image.
+ // Enabled when setScaledResolutions is called (i.e. media::VideoFrameFeedback
+ // asks for scaled frames on behalf of a consumer in the Renderer process),
+ // NV12 output is enabled and the kInCapturerScaling feature is on.
+ std::vector<std::unique_ptr<media::SampleBufferTransformer>>
+ _scaledFrameTransformers;
// An AVDataOutput specialized for taking pictures out of |captureSession_|.
base::scoped_nsobject<AVCaptureStillImageOutput> _stillImageOutput;
@@ -87,6 +109,20 @@ CAPTURE_EXPORT
- (void)setOnStillImageOutputStoppedForTesting:
(base::RepeatingCallback<void()>)onStillImageOutputStopped;
+// Use the below only for test.
+- (void)callLocked:(base::OnceClosure)lambda;
+
+- (void)processPixelBufferNV12IOSurface:(CVPixelBufferRef)pixelBuffer
+ captureFormat:
+ (const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp;
+
+- (BOOL)processPixelBufferPlanes:(CVImageBufferRef)pixelBuffer
+ captureFormat:(const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp;
+
@end
#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_MAC_H_
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
index 31d5516bf2e..6dd77ee0eb9 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
@@ -10,13 +10,15 @@
#include <stddef.h>
#include <stdint.h>
+#include "base/debug/dump_without_crashing.h"
#include "base/location.h"
#include "base/mac/foundation_util.h"
-#include "base/mac/mac_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/sequenced_task_runner.h"
#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
#include "base/strings/sys_string_conversions.h"
+#include "components/crash/core/common/crash_key.h"
#include "media/base/mac/color_space_util_mac.h"
#include "media/base/media_switches.h"
#include "media/base/timestamp_constants.h"
@@ -24,6 +26,7 @@
#import "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
#include "media/capture/video/mac/video_capture_device_factory_mac.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
+#import "media/capture/video/mac/video_capture_metrics_mac.h"
#include "media/capture/video_capture_types.h"
#include "services/video_capture/public/uma/video_capture_service_event.h"
#include "ui/gfx/geometry/size.h"
@@ -54,10 +57,15 @@ base::TimeDelta GetCMSampleBufferTimestamp(CMSampleBufferRef sampleBuffer) {
return timestamp;
}
+constexpr size_t kPixelBufferPoolSize = 10;
+
} // anonymous namespace
namespace media {
+const base::Feature kInCapturerScaling{"InCapturerScaling",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
AVCaptureDeviceFormat* FindBestCaptureFormat(
NSArray<AVCaptureDeviceFormat*>* formats,
int width,
@@ -163,14 +171,14 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
DISPATCH_QUEUE_SERIAL),
base::scoped_policy::ASSUME);
DCHECK(frameReceiver);
+ _capturedFirstFrame = false;
_weakPtrFactoryForTakePhoto =
std::make_unique<base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>(
self);
[self setFrameReceiver:frameReceiver];
_captureSession.reset([[AVCaptureSession alloc] init]);
- _sampleBufferTransformer =
- media::SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
- if (_sampleBufferTransformer) {
+ if (base::FeatureList::IsEnabled(media::kInCaptureConvertToNv12)) {
+ _sampleBufferTransformer = media::SampleBufferTransformer::Create();
VLOG(1) << "Capturing with SampleBufferTransformer enabled";
}
}
@@ -268,7 +276,6 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
media::FindBestCaptureFormat([_captureDevice formats], width, height,
frameRate),
base::scoped_policy::RETAIN);
- // Default to NV12, a pixel format commonly supported by web cameras.
FourCharCode best_fourcc = kDefaultFourCCPixelFormat;
if (_bestCaptureFormat) {
best_fourcc = CMFormatDescriptionGetMediaSubType(
@@ -328,6 +335,39 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
return YES;
}
+- (void)setScaledResolutions:(std::vector<gfx::Size>)resolutions {
+ if (!base::FeatureList::IsEnabled(media::kInCapturerScaling)) {
+ return;
+ }
+ // The lock is needed for |_scaledFrameTransformers|.
+ base::AutoLock lock(_lock);
+ bool reconfigureScaledFrameTransformers = false;
+ if (resolutions.size() != _scaledFrameTransformers.size()) {
+ reconfigureScaledFrameTransformers = true;
+ } else {
+ for (size_t i = 0; i < resolutions.size(); ++i) {
+ if (resolutions[i] != _scaledFrameTransformers[i]->destination_size()) {
+ reconfigureScaledFrameTransformers = true;
+ break;
+ }
+ }
+ }
+ if (!reconfigureScaledFrameTransformers)
+ return;
+ _scaledFrameTransformers.clear();
+ for (const auto& resolution : resolutions) {
+ // Configure the transformer to and from NV12 pixel buffers - we only want
+ // to pay scaling costs, not conversion costs.
+ auto scaledFrameTransformer = media::SampleBufferTransformer::Create();
+ scaledFrameTransformer->Reconfigure(
+ media::SampleBufferTransformer::
+ kBestTransformerForPixelBufferToNv12Output,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, resolution,
+ kPixelBufferPoolSize);
+ _scaledFrameTransformers.push_back(std::move(scaledFrameTransformer));
+ }
+}
+
- (BOOL)startCapture {
DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread());
if (!_captureSession) {
@@ -351,11 +391,18 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
}
}
+ {
+ base::AutoLock lock(_lock);
+ _capturedFirstFrame = false;
+ _capturedFrameSinceLastStallCheck = NO;
+ }
+ [self doStallCheck:0];
return YES;
}
- (void)stopCapture {
DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread());
+ _weakPtrFactoryForStallCheck.reset();
[self stopStillImageOutput];
if ([_captureSession isRunning])
[_captureSession stopRunning]; // Synchronous.
@@ -557,10 +604,10 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
timestamp);
}
-- (BOOL)processPixelBuffer:(CVImageBufferRef)pixelBuffer
- captureFormat:(const media::VideoCaptureFormat&)captureFormat
- colorSpace:(const gfx::ColorSpace&)colorSpace
- timestamp:(const base::TimeDelta)timestamp {
+- (BOOL)processPixelBufferPlanes:(CVImageBufferRef)pixelBuffer
+ captureFormat:(const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp {
VLOG(3) << __func__;
if (CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly) !=
kCVReturnSuccess) {
@@ -620,20 +667,40 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
packedBufferSize += bytesPerRow * height;
}
+ // If media::VideoFrame::PlaneSize differs from the CVPixelBuffer's size then
+ // generate a crash report to show the difference.
+ // https://crbug.com/1168112
+ CHECK_EQ(pixelBufferHeights.size(), packedHeights.size());
+ for (size_t plane = 0; plane < pixelBufferHeights.size(); ++plane) {
+ if (pixelBufferHeights[plane] != packedHeights[plane] &&
+ !_hasDumpedForFrameSizeMismatch) {
+ static crash_reporter::CrashKeyString<64> planeInfoKey(
+ "core-video-plane-info");
+ planeInfoKey.Set(
+ base::StringPrintf("plane:%zu cv_height:%zu packed_height:%zu", plane,
+ pixelBufferHeights[plane], packedHeights[plane]));
+ base::debug::DumpWithoutCrashing();
+ _hasDumpedForFrameSizeMismatch = true;
+ }
+ }
+
// If |pixelBuffer| is not tightly packed, then copy it to |packedBufferCopy|,
// because ReceiveFrame() below assumes tight packing.
// https://crbug.com/1151936
bool needsCopyToPackedBuffer = pixelBufferBytesPerRows != packedBytesPerRows;
- CHECK(pixelBufferHeights == packedHeights);
std::vector<uint8_t> packedBufferCopy;
if (needsCopyToPackedBuffer) {
- CHECK(pixelBufferHeights == packedHeights);
- packedBufferCopy.resize(packedBufferSize);
+ packedBufferCopy.resize(packedBufferSize, 0);
uint8_t* dstAddr = packedBufferCopy.data();
for (size_t plane = 0; plane < numPlanes; ++plane) {
uint8_t* srcAddr = pixelBufferAddresses[plane];
- for (size_t row = 0; row < packedHeights[plane]; ++row) {
- memcpy(dstAddr, srcAddr, packedBytesPerRows[plane]);
+ size_t row = 0;
+ for (row = 0;
+ row < std::min(packedHeights[plane], pixelBufferHeights[plane]);
+ ++row) {
+ memcpy(dstAddr, srcAddr,
+ std::min(packedBytesPerRows[plane],
+ pixelBufferBytesPerRows[plane]));
dstAddr += packedBytesPerRows[plane];
srcAddr += pixelBufferBytesPerRows[plane];
}
@@ -649,12 +716,68 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
return YES;
}
-- (void)processNV12IOSurface:(IOSurfaceRef)ioSurface
- captureFormat:(const media::VideoCaptureFormat&)captureFormat
- colorSpace:(const gfx::ColorSpace&)colorSpace
- timestamp:(const base::TimeDelta)timestamp {
+- (void)processPixelBufferNV12IOSurface:(CVPixelBufferRef)pixelBuffer
+ captureFormat:
+ (const media::VideoCaptureFormat&)captureFormat
+ colorSpace:(const gfx::ColorSpace&)colorSpace
+ timestamp:(const base::TimeDelta)timestamp {
VLOG(3) << __func__;
DCHECK_EQ(captureFormat.pixel_format, media::PIXEL_FORMAT_NV12);
+
+ IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
+ DCHECK(ioSurface);
+ media::CapturedExternalVideoBuffer externalBuffer =
+ [self capturedExternalVideoBufferFromNV12IOSurface:ioSurface
+ captureFormat:captureFormat
+ colorSpace:colorSpace];
+
+ // The lock is needed for |_scaledFrameTransformers| and |_frameReceiver|.
+ _lock.AssertAcquired();
+ std::vector<media::CapturedExternalVideoBuffer> scaledExternalBuffers;
+ scaledExternalBuffers.reserve(_scaledFrameTransformers.size());
+ for (auto& scaledFrameTransformer : _scaledFrameTransformers) {
+ gfx::Size scaledFrameSize = scaledFrameTransformer->destination_size();
+ // Only proceed if this results in downscaling in one or both dimensions.
+ //
+ // It is not clear that we want to continue to allow changing the aspect
+ // ratio like this since this causes visible stretching in the image if the
+ // stretch is significantly large.
+ // TODO(https://crbug.com/1157072): When we know what to do about aspect
+ // ratios, consider adding a DCHECK here or otherwise ignore wrong aspect
+ // ratios (within some fault tolerance).
+ if (scaledFrameSize.width() > captureFormat.frame_size.width() ||
+ scaledFrameSize.height() > captureFormat.frame_size.height() ||
+ scaledFrameSize == captureFormat.frame_size) {
+ continue;
+ }
+ base::ScopedCFTypeRef<CVPixelBufferRef> scaledPixelBuffer =
+ scaledFrameTransformer->Transform(pixelBuffer);
+ if (!scaledPixelBuffer) {
+ LOG(ERROR) << "Failed to downscale frame, skipping resolution "
+ << scaledFrameSize.ToString();
+ continue;
+ }
+ IOSurfaceRef scaledIoSurface = CVPixelBufferGetIOSurface(scaledPixelBuffer);
+ media::VideoCaptureFormat scaledCaptureFormat = captureFormat;
+ scaledCaptureFormat.frame_size = scaledFrameSize;
+ scaledExternalBuffers.push_back([self
+ capturedExternalVideoBufferFromNV12IOSurface:scaledIoSurface
+ captureFormat:scaledCaptureFormat
+ colorSpace:colorSpace]);
+ }
+
+ _frameReceiver->ReceiveExternalGpuMemoryBufferFrame(
+ std::move(externalBuffer), std::move(scaledExternalBuffers), timestamp);
+}
+
+- (media::CapturedExternalVideoBuffer)
+ capturedExternalVideoBufferFromNV12IOSurface:(IOSurfaceRef)ioSurface
+ captureFormat:
+ (const media::VideoCaptureFormat&)
+ captureFormat
+ colorSpace:
+ (const gfx::ColorSpace&)colorSpace {
+ DCHECK(ioSurface);
gfx::GpuMemoryBufferHandle handle;
handle.id.id = -1;
handle.type = gfx::GpuMemoryBufferType::IO_SURFACE_BUFFER;
@@ -673,9 +796,63 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
kCGColorSpaceSRGB);
}
- _lock.AssertAcquired();
- _frameReceiver->ReceiveExternalGpuMemoryBufferFrame(
- std::move(handle), captureFormat, overriddenColorSpace, timestamp);
+ return media::CapturedExternalVideoBuffer(std::move(handle), captureFormat,
+ overriddenColorSpace);
+}
+
+// Sometimes (especially when the camera is accessed by another process, e.g,
+// Photo Booth), the AVCaptureSession will stop producing new frames. This check
+// happens with no errors or notifications being produced. To recover from this,
+// check to see if a new frame has been captured second. If 5 of these checks
+// fail consecutively, restart the capture session.
+// https://crbug.com/1176568
+- (void)doStallCheck:(int)failedCheckCount {
+ DCHECK(_mainThreadTaskRunner->BelongsToCurrentThread());
+
+ int nextFailedCheckCount = failedCheckCount + 1;
+ {
+ base::AutoLock lock(_lock);
+ // This is to detect a capture was working, but stopped submitting new
+ // frames. If we haven't received any frames yet, don't do anything.
+ if (!_capturedFirstFrame)
+ nextFailedCheckCount = 0;
+
+ // If we captured a frame since last check, then we aren't stalled.
+ if (_capturedFrameSinceLastStallCheck)
+ nextFailedCheckCount = 0;
+ _capturedFrameSinceLastStallCheck = NO;
+ }
+
+ constexpr int kMaxFailedCheckCount = 5;
+ if (nextFailedCheckCount < kMaxFailedCheckCount) {
+ // Post a task to check for progress in 1 second. Create the weak factory
+ // for the posted task, if needed.
+ if (!_weakPtrFactoryForStallCheck) {
+ _weakPtrFactoryForStallCheck = std::make_unique<
+ base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>(self);
+ }
+ constexpr base::TimeDelta kStallCheckInterval =
+ base::TimeDelta::FromSeconds(1);
+ auto callback_lambda =
+ [](base::WeakPtr<VideoCaptureDeviceAVFoundation> weakSelf,
+ int failedCheckCount) {
+ VideoCaptureDeviceAVFoundation* strongSelf = weakSelf.get();
+ if (!strongSelf)
+ return;
+ [strongSelf doStallCheck:failedCheckCount];
+ };
+ _mainThreadTaskRunner->PostDelayedTask(
+ FROM_HERE,
+ base::BindOnce(callback_lambda,
+ _weakPtrFactoryForStallCheck->GetWeakPtr(),
+ nextFailedCheckCount),
+ kStallCheckInterval);
+ } else {
+ // Capture appears to be stalled. Restart it.
+ LOG(ERROR) << "Capture appears to have stalled, restarting.";
+ [self stopCapture];
+ [self startCapture];
+ }
}
// |captureOutput| is called by the capture device to deliver a new frame.
@@ -689,10 +866,15 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// Concurrent calls into |_frameReceiver| are not supported, so take |_lock|
// before any of the subsequent paths.
base::AutoLock lock(_lock);
+ _capturedFrameSinceLastStallCheck = YES;
if (!_frameReceiver)
return;
const base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer);
+ bool logUma = !std::exchange(_capturedFirstFrame, true);
+ if (logUma) {
+ media::LogFirstCapturedVideoFrame(_bestCaptureFormat, sampleBuffer);
+ }
// The SampleBufferTransformer CHECK-crashes if the sample buffer is not MJPEG
// and does not have a pixel buffer (https://crbug.com/1160647) so we fall
@@ -700,26 +882,30 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// TODO(https://crbug.com/1160315): When the SampleBufferTransformer is
// patched to support non-MJPEG-and-non-pixel-buffer sample buffers, remove
// this workaround.
- bool sampleBufferLacksPixelBufferAndIsNotMjpeg =
- !CMSampleBufferGetImageBuffer(sampleBuffer) &&
+ bool sampleHasPixelBufferOrIsMjpeg =
+ CMSampleBufferGetImageBuffer(sampleBuffer) ||
CMFormatDescriptionGetMediaSubType(CMSampleBufferGetFormatDescription(
- sampleBuffer)) != kCMVideoCodecType_JPEG_OpenDML;
+ sampleBuffer)) == kCMVideoCodecType_JPEG_OpenDML;
// If the SampleBufferTransformer is enabled, convert all possible capture
// formats to an IOSurface-backed NV12 pixel buffer.
- // TODO(hbos): If |_sampleBufferTransformer| gets shipped 100%, delete the
+ // TODO(https://crbug.com/1175142): Update this code path so that it is
+ // possible to turn on/off the kAVFoundationCaptureV2ZeroCopy feature and the
+ // kInCaptureConvertToNv12 feature separately.
+ // TODO(hbos): When |_sampleBufferTransformer| gets shipped 100%, delete the
// other code paths.
- if (_sampleBufferTransformer && !sampleBufferLacksPixelBufferAndIsNotMjpeg) {
+ if (_sampleBufferTransformer && sampleHasPixelBufferOrIsMjpeg) {
+ _sampleBufferTransformer->Reconfigure(
+ media::SampleBufferTransformer::GetBestTransformerForNv12Output(
+ sampleBuffer),
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ media::GetSampleBufferSize(sampleBuffer), kPixelBufferPoolSize);
base::ScopedCFTypeRef<CVPixelBufferRef> pixelBuffer =
- _sampleBufferTransformer->AutoReconfigureAndTransform(sampleBuffer);
+ _sampleBufferTransformer->Transform(sampleBuffer);
if (!pixelBuffer) {
LOG(ERROR) << "Failed to transform captured frame. Dropping frame.";
return;
}
- IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
- CHECK(ioSurface);
- CHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer),
- kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); // NV12
const media::VideoCaptureFormat captureFormat(
gfx::Size(CVPixelBufferGetWidth(pixelBuffer),
CVPixelBufferGetHeight(pixelBuffer)),
@@ -730,14 +916,14 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
// results in log spam and a default color space format is returned. To
// avoid this, we pretend the color space is kColorSpaceRec709Apple which
// triggers a path that avoids color space parsing inside of
- // processNV12IOSurface.
+ // processPixelBufferNV12IOSurface.
// TODO(hbos): Investigate how to successfully parse and/or configure the
// color space correctly. The implications of this hack is not fully
// understood.
- [self processNV12IOSurface:ioSurface
- captureFormat:captureFormat
- colorSpace:kColorSpaceRec709Apple
- timestamp:timestamp];
+ [self processPixelBufferNV12IOSurface:pixelBuffer
+ captureFormat:captureFormat
+ colorSpace:kColorSpaceRec709Apple
+ timestamp:timestamp];
return;
}
@@ -773,29 +959,28 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
static const bool kEnableGpuMemoryBuffers =
base::FeatureList::IsEnabled(media::kAVFoundationCaptureV2ZeroCopy);
if (kEnableGpuMemoryBuffers) {
- IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
- if (ioSurface && videoPixelFormat == media::PIXEL_FORMAT_NV12) {
- [self processNV12IOSurface:ioSurface
- captureFormat:captureFormat
- colorSpace:colorSpace
- timestamp:timestamp];
+ if (CVPixelBufferGetIOSurface(pixelBuffer) &&
+ videoPixelFormat == media::PIXEL_FORMAT_NV12) {
+ [self processPixelBufferNV12IOSurface:pixelBuffer
+ captureFormat:captureFormat
+ colorSpace:colorSpace
+ timestamp:timestamp];
return;
}
}
- // Second preference is to read the CVPixelBuffer.
- if ([self processPixelBuffer:pixelBuffer
- captureFormat:captureFormat
- colorSpace:colorSpace
- timestamp:timestamp]) {
+ // Second preference is to read the CVPixelBuffer's planes.
+ if ([self processPixelBufferPlanes:pixelBuffer
+ captureFormat:captureFormat
+ colorSpace:colorSpace
+ timestamp:timestamp]) {
return;
}
}
// Last preference is to read the CMSampleBuffer.
- gfx::ColorSpace colorSpace;
- if (@available(macOS 10.11, *))
- colorSpace = media::GetFormatDescriptionColorSpace(formatDescription);
+ gfx::ColorSpace colorSpace =
+ media::GetFormatDescriptionColorSpace(formatDescription);
[self processSample:sampleBuffer
captureFormat:captureFormat
colorSpace:colorSpace
@@ -821,4 +1006,9 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
FROM_HERE, base::SysNSStringToUTF8(error));
}
+- (void)callLocked:(base::OnceClosure)lambda {
+ base::AutoLock lock(_lock);
+ std::move(lambda).Run();
+}
+
@end
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm
index 63e3cf84b1e..cd55822cea3 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac_unittest.mm
@@ -9,16 +9,411 @@
#include "base/bind.h"
#include "base/mac/scoped_nsobject.h"
#include "base/run_loop.h"
+#include "base/test/bind.h"
#include "base/test/gmock_callback_support.h"
-#import "media/capture/video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.h"
-#import "media/capture/video/mac/test/video_capture_test_utils_mac.h"
+#include "base/test/scoped_feature_list.h"
+#include "base/time/time.h"
+#include "media/base/video_types.h"
+#include "media/capture/video/mac/sample_buffer_transformer_mac.h"
+#include "media/capture/video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.h"
+#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
+#include "media/capture/video/mac/test/video_capture_test_utils_mac.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/color_space.h"
using testing::_;
namespace media {
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ OutputsNv12WithoutScalingByDefault) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_EQ(frame.format.pixel_format, PIXEL_FORMAT_NV12);
+ EXPECT_TRUE(scaled_frames.empty());
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ SpecifiedScalingIsIgnoredWhenInCapturerScalingIsNotEnabled) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+ // By default, kInCapturerScaling is false.
+ EXPECT_FALSE(base::FeatureList::IsEnabled(kInCapturerScaling));
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ std::vector<gfx::Size> scaled_resolutions;
+ scaled_resolutions.emplace_back(320, 240);
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_TRUE(scaled_frames.empty());
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest, SpecifiedScalingOutputsNv12) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ std::vector<gfx::Size> scaled_resolutions;
+ scaled_resolutions.emplace_back(320, 240);
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_EQ(frame.format.pixel_format, PIXEL_FORMAT_NV12);
+ ASSERT_EQ(scaled_frames.size(), 1u);
+ EXPECT_EQ(scaled_frames[0].format.frame_size,
+ scaled_resolutions[0]);
+ EXPECT_EQ(scaled_frames[0].format.pixel_format,
+ PIXEL_FORMAT_NV12);
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ SpecifiedScalingCanChangeDuringCapture) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ NSString* deviceId = GetFirstDeviceId();
+ if (!deviceId) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ // Start capture without scaling and wait until frames are flowing.
+ [captureDevice setScaledResolutions:{}];
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Ignore subsequent frames.
+ return;
+ }
+ EXPECT_TRUE(scaled_frames.empty());
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ // Specify scaling and wait for scaled frames to arrive.
+ std::vector<gfx::Size> scaled_resolutions;
+ scaled_resolutions.emplace_back(320, 240);
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ bool has_received_scaled_frame = false;
+ base::RunLoop scaled_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_scaled_frame || scaled_frames.empty()) {
+ // Ignore subsequent frames.
+ return;
+ }
+ has_received_scaled_frame = true;
+ scaled_frame_received.Quit();
+ }));
+ scaled_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ SpecifiedScalingUsesGoodSizesButNotBadSizes) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitWithFeatures(
+ {kInCaptureConvertToNv12, kInCapturerScaling}, {});
+
+ RunTestCase(base::BindOnce([] {
+ VideoCaptureDeviceFactoryMac video_capture_device_factory;
+ std::vector<VideoCaptureDeviceInfo> device_infos =
+ GetDevicesInfo(&video_capture_device_factory);
+ if (device_infos.empty()) {
+ LOG(ERROR) << "No camera available. Exiting test.";
+ return;
+ }
+ const auto& device_info = device_infos.front();
+ NSString* deviceId = [NSString
+ stringWithUTF8String:device_info.descriptor.device_id.c_str()];
+ VideoCaptureFormat camera_format = device_info.supported_formats.front();
+
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ NSString* errorMessage = nil;
+ ASSERT_TRUE([captureDevice setCaptureDevice:deviceId
+ errorMessage:&errorMessage]);
+
+ // Capture at a lower resolution than we request to scale.
+ ASSERT_TRUE([captureDevice
+ setCaptureHeight:camera_format.frame_size.height()
+ width:camera_format.frame_size.width()
+ frameRate:camera_format.frame_rate]);
+ std::vector<gfx::Size> scaled_resolutions;
+ // Bad resolution because it causes upscale.
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() * 2,
+ camera_format.frame_size.height() * 2);
+ // Bad resolution because it is the same as the captured resolution.
+ scaled_resolutions.push_back(camera_format.frame_size);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() / 2,
+ camera_format.frame_size.height() / 2);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() / 4,
+ camera_format.frame_size.height() / 4);
+ // Good resolution because it causes downscale in one dimension (stretch).
+ scaled_resolutions.emplace_back(camera_format.frame_size.width() / 2,
+ camera_format.frame_size.height());
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ ASSERT_TRUE([captureDevice startCapture]);
+
+ bool has_received_first_frame = false;
+ base::RunLoop first_frame_received(
+ base::RunLoop::Type::kNestableTasksAllowed);
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillRepeatedly(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ if (has_received_first_frame) {
+ // Normally we have time to stop capturing before multiple
+ // frames are received but in order for the test to be able to
+ // run on slow bots we are prepared for this method to be
+ // invoked any number of times. Frames subsequent the first one
+ // are ignored.
+ return;
+ }
+
+ EXPECT_EQ(scaled_frames.size(), 3u);
+ // The bad resolutions were ignored and the good resolutions are
+ // outputted in the requested order.
+ EXPECT_EQ(scaled_frames[0].format.frame_size,
+ scaled_resolutions[2]);
+ EXPECT_EQ(scaled_frames[1].format.frame_size,
+ scaled_resolutions[3]);
+ EXPECT_EQ(scaled_frames[2].format.frame_size,
+ scaled_resolutions[4]);
+
+ has_received_first_frame = true;
+ first_frame_received.Quit();
+ }));
+ first_frame_received.Run();
+
+ [captureDevice stopCapture];
+ }));
+}
+
+// This is approximately the same test as the one above except it does not rely
+// on having a camera. Instead we mock-invoke processPixelBufferNV12IOSurface
+// from the test as-if a camera had produced a frame.
+TEST(VideoCaptureDeviceAVFoundationMacTest,
+ ProcessPixelBufferNV12IOSurfaceWithGoodAndBadScaling) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kInCapturerScaling);
+
+ RunTestCase(base::BindOnce([] {
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ // Capture at a lower resolution than we request to scale.
+ gfx::Size capture_resolution(640, 360);
+ std::vector<gfx::Size> scaled_resolutions;
+ // Bad resolution because it causes upscale.
+ scaled_resolutions.emplace_back(capture_resolution.width() * 2,
+ capture_resolution.height() * 2);
+ // Bad resolution because it is the same as the captured resolution.
+ scaled_resolutions.push_back(capture_resolution);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(capture_resolution.width() / 2,
+ capture_resolution.height() / 2);
+ // Good resolution because it causes downscale in both dimensions.
+ scaled_resolutions.emplace_back(capture_resolution.width() / 4,
+ capture_resolution.height() / 4);
+ // Good resolution because it causes downscale in one dimension (stretch).
+ scaled_resolutions.emplace_back(capture_resolution.width() / 2,
+ capture_resolution.height());
+ [captureDevice setScaledResolutions:scaled_resolutions];
+
+ // Create a blank NV12 pixel buffer that we pretend was captured.
+ VideoCaptureFormat capture_format(capture_resolution, 30,
+ PIXEL_FORMAT_NV12);
+ std::unique_ptr<ByteArrayPixelBuffer> yuvs_buffer =
+ CreateYuvsPixelBufferFromSingleRgbColor(
+ capture_resolution.width(), capture_resolution.height(), 0, 0, 0);
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixelBuffer =
+ PixelBufferPool::Create(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ capture_resolution.width(),
+ capture_resolution.height(), 1)
+ ->CreateBuffer();
+ DCHECK(PixelBufferTransferer().TransferImage(yuvs_buffer->pixel_buffer,
+ pixelBuffer));
+
+ [captureDevice
+ callLocked:base::BindLambdaForTesting([&] {
+ EXPECT_CALL(frame_receiver, ReceiveExternalGpuMemoryBufferFrame)
+ .WillOnce(testing::Invoke(
+ [&](CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
+ base::TimeDelta timestamp) {
+ EXPECT_EQ(scaled_frames.size(), 3u);
+ // The bad resolutions were ignored and the good
+ // resolutions are outputted in the requested order.
+ EXPECT_EQ(scaled_frames[0].format.frame_size,
+ scaled_resolutions[2]);
+ EXPECT_EQ(scaled_frames[1].format.frame_size,
+ scaled_resolutions[3]);
+ EXPECT_EQ(scaled_frames[2].format.frame_size,
+ scaled_resolutions[4]);
+ }));
+ [captureDevice
+ processPixelBufferNV12IOSurface:pixelBuffer
+ captureFormat:capture_format
+ colorSpace:gfx::ColorSpace::CreateSRGB()
+ timestamp:base::TimeDelta()];
+ })];
+ }));
+}
+
TEST(VideoCaptureDeviceAVFoundationMacTest, TakePhoto) {
RunTestCase(base::BindOnce([] {
NSString* deviceId = GetFirstDeviceId();
@@ -213,4 +608,29 @@ TEST(VideoCaptureDeviceAVFoundationMacTest,
}));
}
+TEST(VideoCaptureDeviceAVFoundationMacTest, ForwardsOddPixelBufferResolution) {
+ // See crbug/1168112.
+ RunTestCase(base::BindOnce([] {
+ testing::NiceMock<MockVideoCaptureDeviceAVFoundationFrameReceiver>
+ frame_receiver;
+ base::scoped_nsobject<VideoCaptureDeviceAVFoundation> captureDevice(
+ [[VideoCaptureDeviceAVFoundation alloc]
+ initWithFrameReceiver:&frame_receiver]);
+
+ gfx::Size size(1280, 719);
+ VideoCaptureFormat format(size, 30, PIXEL_FORMAT_YUY2);
+ std::unique_ptr<ByteArrayPixelBuffer> buffer =
+ CreateYuvsPixelBufferFromSingleRgbColor(size.width(), size.height(), 0,
+ 0, 0);
+ [captureDevice
+ callLocked:base::BindLambdaForTesting([&] {
+ EXPECT_CALL(frame_receiver, ReceiveFrame(_, _, format, _, _, _, _));
+ [captureDevice processPixelBufferPlanes:buffer->pixel_buffer
+ captureFormat:format
+ colorSpace:gfx::ColorSpace::CreateSRGB()
+ timestamp:base::TimeDelta()];
+ })];
+ }));
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
index f121dd8fe2e..63ccc6ddb1d 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
@@ -8,11 +8,14 @@
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
+#include <vector>
+
#import "base/mac/scoped_nsobject.h"
#include "base/synchronization/lock.h"
#include "base/threading/thread_checker.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video_capture_types.h"
+#include "ui/gfx/geometry/size.h"
namespace media {
class VideoCaptureDeviceMac;
@@ -35,9 +38,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver {
// function may be called from any thread, including those controlled by
// AVFoundation.
virtual void ReceiveExternalGpuMemoryBufferFrame(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& frame_format,
- const gfx::ColorSpace color_space,
+ CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
base::TimeDelta timestamp) = 0;
// Callbacks with the result of a still image capture, or in case of error,
@@ -94,6 +96,13 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver {
width:(int)width
frameRate:(float)frameRate;
+// If an efficient path is available, the capturer will perform scaling and
+// deliver scaled frames to the |frameReceiver| as specified by |resolutions|.
+// The scaled frames are delivered in addition to the original captured frame.
+// Resolutions that match the captured frame or that would result in upscaling
+// are ignored.
+- (void)setScaledResolutions:(std::vector<gfx::Size>)resolutions;
+
// Starts video capturing and registers notification listeners. Must be
// called after setCaptureDevice:, and, eventually, also after
// setCaptureHeight:width:frameRate:.
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
index 82956f85a20..b5fa0a7178c 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
@@ -39,6 +39,9 @@ void ExtractBaseAddressAndLength(char** base_address,
// on if |kMacNextGenerationCapturer| is enabled or disabled.
Class GetVideoCaptureDeviceAVFoundationImplementationClass();
+gfx::Size CAPTURE_EXPORT GetPixelBufferSize(CVPixelBufferRef pixel_buffer);
+gfx::Size CAPTURE_EXPORT GetSampleBufferSize(CMSampleBufferRef sample_buffer);
+
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_DEVICE_AVFOUNDATION_UTILS_MAC_H_
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
index 164e850a526..ca211aeb10d 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
@@ -233,4 +233,21 @@ Class GetVideoCaptureDeviceAVFoundationImplementationClass() {
return [VideoCaptureDeviceAVFoundationLegacy class];
}
+gfx::Size GetPixelBufferSize(CVPixelBufferRef pixel_buffer) {
+ return gfx::Size(CVPixelBufferGetWidth(pixel_buffer),
+ CVPixelBufferGetHeight(pixel_buffer));
+}
+
+gfx::Size GetSampleBufferSize(CMSampleBufferRef sample_buffer) {
+ if (CVPixelBufferRef pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer)) {
+ return GetPixelBufferSize(pixel_buffer);
+ }
+ CMFormatDescriptionRef format_description =
+ CMSampleBufferGetFormatDescription(sample_buffer);
+ CMVideoDimensions dimensions =
+ CMVideoFormatDescriptionGetDimensions(format_description);
+ return gfx::Size(dimensions.width, dimensions.height);
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.h b/chromium/media/capture/video/mac/video_capture_device_mac.h
index 1090327d09b..8178526d54e 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.h
@@ -81,9 +81,8 @@ class VideoCaptureDeviceMac
int aspect_denominator,
base::TimeDelta timestamp) override;
void ReceiveExternalGpuMemoryBufferFrame(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& frame_format,
- const gfx::ColorSpace color_space,
+ CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
base::TimeDelta timestamp) override;
void OnPhotoTaken(const uint8_t* image_data,
size_t image_length,
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.mm b/chromium/media/capture/video/mac/video_capture_device_mac.mm
index 3a0c2e11974..d5691a42324 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.mm
@@ -803,20 +803,19 @@ void VideoCaptureDeviceMac::ReceiveFrame(const uint8_t* video_frame,
}
void VideoCaptureDeviceMac::ReceiveExternalGpuMemoryBufferFrame(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace color_space,
+ CapturedExternalVideoBuffer frame,
+ std::vector<CapturedExternalVideoBuffer> scaled_frames,
base::TimeDelta timestamp) {
- if (capture_format_.frame_size != format.frame_size) {
+ if (capture_format_.frame_size != frame.format.frame_size) {
ReceiveError(VideoCaptureError::kMacReceivedFrameWithUnexpectedResolution,
FROM_HERE,
- "Captured resolution " + format.frame_size.ToString() +
+ "Captured resolution " + frame.format.frame_size.ToString() +
", and expected " + capture_format_.frame_size.ToString());
return;
}
- client_->OnIncomingCapturedExternalBuffer(std::move(handle), format,
- color_space, base::TimeTicks::Now(),
- timestamp);
+ client_->OnIncomingCapturedExternalBuffer(std::move(frame),
+ std::move(scaled_frames),
+ base::TimeTicks::Now(), timestamp);
}
void VideoCaptureDeviceMac::OnPhotoTaken(const uint8_t* image_data,
diff --git a/chromium/media/capture/video/mac/video_capture_metrics_mac.h b/chromium/media/capture/video/mac/video_capture_metrics_mac.h
new file mode 100644
index 00000000000..c56789a5b22
--- /dev/null
+++ b/chromium/media/capture/video/mac/video_capture_metrics_mac.h
@@ -0,0 +1,23 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_METRICS_MAC_H_
+#define MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_METRICS_MAC_H_
+
+#import <AVFoundation/AVFoundation.h>
+#include <CoreMedia/CoreMedia.h>
+#import <Foundation/Foundation.h>
+
+#include "media/capture/capture_export.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+CAPTURE_EXPORT
+void LogFirstCapturedVideoFrame(const AVCaptureDeviceFormat* bestCaptureFormat,
+ const CMSampleBufferRef buffer);
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_MAC_VIDEO_CAPTURE_METRICS_MAC_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/mac/video_capture_metrics_mac.mm b/chromium/media/capture/video/mac/video_capture_metrics_mac.mm
new file mode 100644
index 00000000000..9c6a64dd643
--- /dev/null
+++ b/chromium/media/capture/video/mac/video_capture_metrics_mac.mm
@@ -0,0 +1,88 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "media/capture/video/mac/video_capture_metrics_mac.h"
+
+#include "base/metrics/histogram_functions.h"
+#import "media/capture/video/mac/video_capture_device_avfoundation_mac.h"
+#include "media/capture/video/video_capture_device_info.h"
+
+namespace media {
+
+namespace {
+
+enum class ResolutionComparison {
+ kWidthGtHeightEq = 0,
+ kWidthLtHeightEq = 1,
+ kWidthEqHeightGt = 2,
+ kWidthEqHeightLt = 3,
+ kEq = 4,
+ kWidthGtHeightGt = 5,
+ kWidthLtHeightGt = 6,
+ kWidthGtHeightLt = 7,
+ kWidthLtHeightLt = 8,
+ kMaxValue = kWidthLtHeightLt,
+};
+
+ResolutionComparison CompareDimensions(const CMVideoDimensions& requested,
+ const CMVideoDimensions& captured) {
+ if (requested.width > captured.width) {
+ if (requested.height > captured.height)
+ return ResolutionComparison::kWidthGtHeightGt;
+ if (requested.height < captured.height)
+ return ResolutionComparison::kWidthGtHeightLt;
+ return ResolutionComparison::kWidthGtHeightEq;
+ } else if (requested.width < captured.width) {
+ if (requested.height > captured.height)
+ return ResolutionComparison::kWidthLtHeightGt;
+ if (requested.height < captured.height)
+ return ResolutionComparison::kWidthLtHeightLt;
+ return ResolutionComparison::kWidthLtHeightEq;
+ } else {
+ if (requested.height > captured.height)
+ return ResolutionComparison::kWidthEqHeightGt;
+ if (requested.height < captured.height)
+ return ResolutionComparison::kWidthEqHeightLt;
+ return ResolutionComparison::kEq;
+ }
+}
+
+} // namespace
+
+void LogFirstCapturedVideoFrame(const AVCaptureDeviceFormat* bestCaptureFormat,
+ const CMSampleBufferRef buffer) {
+ if (bestCaptureFormat) {
+ const CMFormatDescriptionRef requestedFormat =
+ [bestCaptureFormat formatDescription];
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Mac.Device.RequestedPixelFormat",
+ [VideoCaptureDeviceAVFoundation
+ FourCCToChromiumPixelFormat:CMFormatDescriptionGetMediaSubType(
+ requestedFormat)],
+ media::VideoPixelFormat::PIXEL_FORMAT_MAX);
+
+ if (buffer) {
+ const CMFormatDescriptionRef capturedFormat =
+ CMSampleBufferGetFormatDescription(buffer);
+ base::UmaHistogramBoolean(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedPixelFormat",
+ CMFormatDescriptionGetMediaSubType(capturedFormat) ==
+ CMFormatDescriptionGetMediaSubType(requestedFormat));
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedResolution",
+ CompareDimensions(
+ CMVideoFormatDescriptionGetDimensions(requestedFormat),
+ CMVideoFormatDescriptionGetDimensions(capturedFormat)));
+
+ const CVPixelBufferRef pixelBufferRef =
+ CMSampleBufferGetImageBuffer(buffer);
+ bool is_io_sufrace =
+ pixelBufferRef && CVPixelBufferGetIOSurface(pixelBufferRef);
+ base::UmaHistogramBoolean(
+ "Media.VideoCapture.Mac.Device.CapturedIOSurface", is_io_sufrace);
+ }
+ }
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm b/chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm
new file mode 100644
index 00000000000..205b20ec9bc
--- /dev/null
+++ b/chromium/media/capture/video/mac/video_capture_metrics_mac_unittest.mm
@@ -0,0 +1,87 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "media/capture/video/mac/video_capture_metrics_mac.h"
+
+#import <AVFoundation/AVFoundation.h>
+#include <CoreMedia/CoreMedia.h>
+#import <Foundation/Foundation.h>
+
+#include "base/mac/scoped_cftyperef.h"
+#include "base/test/metrics/histogram_tester.h"
+#include "media/base/video_types.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#import "third_party/ocmock/OCMock/OCMock.h"
+#include "third_party/ocmock/gtest_support.h"
+
+namespace media {
+
+namespace {} // namespace
+
+TEST(VideoCaptureMetricsMacTest, NoMetricsLoggedIfNullRequestedCaptureFormat) {
+ base::HistogramTester histogram_tester;
+ LogFirstCapturedVideoFrame(nullptr, nullptr);
+ EXPECT_THAT(histogram_tester.GetTotalCountsForPrefix("Media."),
+ testing::IsEmpty());
+}
+
+TEST(VideoCaptureMetricsMacTest, LogRequestedPixelFormat) {
+ base::HistogramTester histogram_tester;
+
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> requested_format;
+ OSStatus status = CMVideoFormatDescriptionCreate(
+ kCFAllocatorDefault,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange /*NV12*/, 320, 180,
+ nullptr, requested_format.InitializeInto());
+ ASSERT_EQ(0, status);
+ id capture_format = OCMClassMock([AVCaptureDeviceFormat class]);
+ OCMStub([capture_format formatDescription]).andReturn(requested_format.get());
+
+ LogFirstCapturedVideoFrame(capture_format, nullptr);
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.RequestedPixelFormat"),
+ testing::UnorderedElementsAre(
+ base::Bucket(VideoPixelFormat::PIXEL_FORMAT_NV12, 1)));
+}
+
+TEST(VideoCaptureMetricsMacTest, LogFirstFrameWhenAsRequested) {
+ base::HistogramTester histogram_tester;
+
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> requested_format;
+ OSStatus status = CMVideoFormatDescriptionCreate(
+ kCFAllocatorDefault,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange /*NV12*/, 320, 180,
+ nullptr, requested_format.InitializeInto());
+ ASSERT_EQ(0, status);
+ id capture_format = OCMClassMock([AVCaptureDeviceFormat class]);
+ OCMStub([capture_format formatDescription]).andReturn(requested_format.get());
+
+ // First frame equal.
+ base::ScopedCFTypeRef<CMSampleBufferRef> first_frame;
+ status = CMSampleBufferCreate(kCFAllocatorDefault, nullptr, false, nullptr,
+ nullptr, requested_format, 0, 0, nullptr, 0,
+ nullptr, first_frame.InitializeInto());
+ ASSERT_EQ(0, status);
+
+ LogFirstCapturedVideoFrame(capture_format, first_frame);
+
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.RequestedPixelFormat"),
+ testing::UnorderedElementsAre(
+ base::Bucket(VideoPixelFormat::PIXEL_FORMAT_NV12, 1)));
+ EXPECT_THAT(
+ histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedPixelFormat"),
+ testing::UnorderedElementsAre(base::Bucket(1, 1)));
+ EXPECT_THAT(
+ histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.CapturedWithRequestedResolution"),
+ testing::UnorderedElementsAre(base::Bucket(4, 1)));
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Mac.Device.CapturedIOSurface"),
+ testing::UnorderedElementsAre(base::Bucket(0, 1)));
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
index adac8b74a2b..0246f6cbe99 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
@@ -7,7 +7,7 @@
#include "media/video/fake_gpu_memory_buffer.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/request_manager.h"
#endif
@@ -26,7 +26,7 @@ MockGpuMemoryBufferManager::CreateFakeGpuMemoryBuffer(
gfx::BufferUsage usage,
gpu::SurfaceHandle surface_handle) {
auto gmb = std::make_unique<FakeGpuMemoryBuffer>(size, format);
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
// For faking a valid JPEG blob buffer.
if (base::checked_cast<size_t>(size.width()) >= sizeof(Camera3JpegBlob)) {
Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
index 20bbf07e4f3..614cf8b8ac6 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.h
@@ -30,6 +30,15 @@ class MockGpuMemoryBufferManager : public gpu::GpuMemoryBufferManager {
void(gfx::GpuMemoryBuffer* buffer,
const gpu::SyncToken& sync_token));
+ MOCK_METHOD3(CopyGpuMemoryBufferAsync,
+ void(gfx::GpuMemoryBufferHandle buffer_handle,
+ base::UnsafeSharedMemoryRegion memory_region,
+ base::OnceCallback<void(bool)> callback));
+
+ MOCK_METHOD2(CopyGpuMemoryBufferSync,
+ bool(gfx::GpuMemoryBufferHandle buffer_handle,
+ base::UnsafeSharedMemoryRegion memory_region));
+
static std::unique_ptr<gfx::GpuMemoryBuffer> CreateFakeGpuMemoryBuffer(
const gfx::Size& size,
gfx::BufferFormat format,
diff --git a/chromium/media/capture/video/mock_video_capture_device_client.h b/chromium/media/capture/video/mock_video_capture_device_client.h
index 1adfc805885..fd0ed66b802 100644
--- a/chromium/media/capture/video/mock_video_capture_device_client.h
+++ b/chromium/media/capture/video/mock_video_capture_device_client.h
@@ -35,10 +35,9 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id));
- MOCK_METHOD5(OnIncomingCapturedExternalBuffer,
- void(gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ MOCK_METHOD4(OnIncomingCapturedExternalBuffer,
+ void(CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp));
MOCK_METHOD4(ReserveOutputBuffer,
diff --git a/chromium/media/capture/video/mock_video_frame_receiver.h b/chromium/media/capture/video/mock_video_frame_receiver.h
index cee00ad229a..01b05db9b2c 100644
--- a/chromium/media/capture/video/mock_video_frame_receiver.h
+++ b/chromium/media/capture/video/mock_video_frame_receiver.h
@@ -37,14 +37,10 @@ class MockVideoFrameReceiver : public VideoFrameReceiver {
}
void OnFrameReadyInBuffer(
- int32_t buffer_id,
- int frame_feedback_id,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- media::mojom::VideoFrameInfoPtr frame_info) override {
- MockOnFrameReadyInBuffer(buffer_id, &buffer_read_permission,
- frame_info->coded_size);
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) override {
+ MockOnFrameReadyInBuffer(frame.buffer_id, &frame.buffer_read_permission,
+ frame.frame_info->coded_size);
}
};
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
index 6913bfed465..acc9a427b1b 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
@@ -15,6 +15,10 @@
#include "media/capture/video/video_capture_buffer_tracker_factory_impl.h"
#include "ui/gfx/buffer_format_util.h"
+#if defined(OS_WIN)
+#include "media/capture/video/win/video_capture_buffer_tracker_factory_win.h"
+#endif // defined(OS_WIN)
+
namespace media {
VideoCaptureBufferPoolImpl::VideoCaptureBufferPoolImpl(
@@ -22,8 +26,14 @@ VideoCaptureBufferPoolImpl::VideoCaptureBufferPoolImpl(
int count)
: buffer_type_(buffer_type),
count_(count),
+#if defined(OS_WIN)
+ buffer_tracker_factory_(
+ std::make_unique<media::VideoCaptureBufferTrackerFactoryWin>())
+#else
buffer_tracker_factory_(
- std::make_unique<media::VideoCaptureBufferTrackerFactoryImpl>()) {
+ std::make_unique<media::VideoCaptureBufferTrackerFactoryImpl>())
+#endif
+{
DCHECK_GT(count, 0);
}
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
index 92c2a96d7b1..0fb7b918cdd 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
@@ -10,7 +10,7 @@
#include "build/build_config.h"
#include "media/capture/video/shared_memory_buffer_tracker.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/gpu_memory_buffer_tracker.h"
#endif
@@ -25,7 +25,7 @@ VideoCaptureBufferTrackerFactoryImpl::CreateTracker(
VideoCaptureBufferType buffer_type) {
switch (buffer_type) {
case VideoCaptureBufferType::kGpuMemoryBuffer:
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
return std::make_unique<GpuMemoryBufferTracker>();
#elif defined(OS_MAC)
return std::make_unique<GpuMemoryBufferTrackerMac>();
diff --git a/chromium/media/capture/video/video_capture_device.cc b/chromium/media/capture/video/video_capture_device.cc
index 4085a0c505f..648ca2fb57c 100644
--- a/chromium/media/capture/video/video_capture_device.cc
+++ b/chromium/media/capture/video/video_capture_device.cc
@@ -13,6 +13,30 @@
namespace media {
+CapturedExternalVideoBuffer::CapturedExternalVideoBuffer(
+ gfx::GpuMemoryBufferHandle handle,
+ VideoCaptureFormat format,
+ gfx::ColorSpace color_space)
+ : handle(std::move(handle)),
+ format(std::move(format)),
+ color_space(std::move(color_space)) {}
+
+CapturedExternalVideoBuffer::CapturedExternalVideoBuffer(
+ CapturedExternalVideoBuffer&& other)
+ : handle(std::move(other.handle)),
+ format(std::move(other.format)),
+ color_space(std::move(other.color_space)) {}
+
+CapturedExternalVideoBuffer::~CapturedExternalVideoBuffer() = default;
+
+CapturedExternalVideoBuffer& CapturedExternalVideoBuffer::operator=(
+ CapturedExternalVideoBuffer&& other) {
+ handle = std::move(other.handle);
+ format = std::move(other.format);
+ color_space = std::move(other.color_space);
+ return *this;
+}
+
VideoCaptureDevice::Client::Buffer::Buffer() : id(0), frame_feedback_id(0) {}
VideoCaptureDevice::Client::Buffer::Buffer(
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index 8fee0be95e5..b0c4c0466a0 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -67,6 +67,20 @@ class CAPTURE_EXPORT VideoFrameConsumerFeedbackObserver {
media::VideoFrameFeedback feedback) {}
};
+struct CAPTURE_EXPORT CapturedExternalVideoBuffer {
+ CapturedExternalVideoBuffer(gfx::GpuMemoryBufferHandle handle,
+ VideoCaptureFormat format,
+ gfx::ColorSpace color_space);
+ CapturedExternalVideoBuffer(CapturedExternalVideoBuffer&& other);
+ ~CapturedExternalVideoBuffer();
+
+ CapturedExternalVideoBuffer& operator=(CapturedExternalVideoBuffer&& other);
+
+ gfx::GpuMemoryBufferHandle handle;
+ VideoCaptureFormat format;
+ gfx::ColorSpace color_space;
+};
+
class CAPTURE_EXPORT VideoCaptureDevice
: public VideoFrameConsumerFeedbackObserver {
public:
@@ -187,9 +201,8 @@ class CAPTURE_EXPORT VideoCaptureDevice
// gfx::ScopedInUseIOSurface is used to prevent reuse of buffers until all
// consumers have consumed them.
virtual void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) = 0;
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index d11eece6e42..cbe37e1b753 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -24,9 +24,9 @@
#include "media/capture/video_capture_types.h"
#include "third_party/libyuv/include/libyuv.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/video_capture_jpeg_decoder.h"
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
namespace {
@@ -163,7 +163,7 @@ class BufferPoolBufferHandleProvider
const int buffer_id_;
};
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureDeviceClient::VideoCaptureDeviceClient(
VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
@@ -188,7 +188,7 @@ VideoCaptureDeviceClient::VideoCaptureDeviceClient(
receiver_(std::move(receiver)),
buffer_pool_(std::move(buffer_pool)),
last_captured_pixel_format_(PIXEL_FORMAT_UNKNOWN) {}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {
for (int buffer_id : buffer_ids_known_by_receiver_)
@@ -226,7 +226,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
OnLog("Pixel format: " + VideoPixelFormatToString(format.pixel_format));
last_captured_pixel_format_ = format.pixel_format;
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (format.pixel_format == PIXEL_FORMAT_MJPEG &&
optional_jpeg_decoder_factory_callback_) {
external_jpeg_decoder_ =
@@ -234,7 +234,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
DCHECK(external_jpeg_decoder_);
external_jpeg_decoder_->Initialize();
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
}
if (!format.IsValid()) {
@@ -353,7 +353,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
// paddings and/or alignments, but it cannot be smaller.
DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize());
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (external_jpeg_decoder_) {
const VideoCaptureJpegDecoder::STATUS status =
external_jpeg_decoder_->GetStatus();
@@ -369,7 +369,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
return;
}
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// libyuv::ConvertToI420 use Rec601 to convert RGB to YUV.
if (libyuv::ConvertToI420(
@@ -465,16 +465,31 @@ void VideoCaptureDeviceClient::OnIncomingCapturedGfxBuffer(
}
void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp) {
+ auto ready_frame = CreateReadyFrameFromExternalBuffer(
+ std::move(buffer), reference_time, timestamp);
+ std::vector<ReadyFrameInBuffer> scaled_ready_frames;
+ scaled_ready_frames.reserve(scaled_buffers.size());
+ for (auto& scaled_buffer : scaled_buffers) {
+ scaled_ready_frames.push_back(CreateReadyFrameFromExternalBuffer(
+ std::move(scaled_buffer), reference_time, timestamp));
+ }
+ receiver_->OnFrameReadyInBuffer(std::move(ready_frame),
+ std::move(scaled_ready_frames));
+}
+
+ReadyFrameInBuffer VideoCaptureDeviceClient::CreateReadyFrameFromExternalBuffer(
+ CapturedExternalVideoBuffer buffer,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
// Reserve an ID for this buffer that will not conflict with any of the IDs
// used by |buffer_pool_|.
int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
- int buffer_id =
- buffer_pool_->ReserveIdForExternalBuffer(handle, &buffer_id_to_drop);
+ int buffer_id = buffer_pool_->ReserveIdForExternalBuffer(buffer.handle,
+ &buffer_id_to_drop);
// If a buffer to retire was specified, retire one.
if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
@@ -491,30 +506,30 @@ void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
if (!base::Contains(buffer_ids_known_by_receiver_, buffer_id)) {
media::mojom::VideoBufferHandlePtr buffer_handle =
media::mojom::VideoBufferHandle::New();
- buffer_handle->set_gpu_memory_buffer_handle(std::move(handle));
+ buffer_handle->set_gpu_memory_buffer_handle(std::move(buffer.handle));
receiver_->OnNewBuffer(buffer_id, std::move(buffer_handle));
buffer_ids_known_by_receiver_.push_back(buffer_id);
}
- // Tell |receiver_| that the frame has been received.
- {
- mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
- info->timestamp = timestamp;
- info->pixel_format = format.pixel_format;
- info->color_space = color_space;
- info->coded_size = format.frame_size;
- info->visible_rect = gfx::Rect(format.frame_size);
- info->metadata.frame_rate = format.frame_rate;
- info->metadata.reference_time = reference_time;
-
- buffer_pool_->HoldForConsumers(buffer_id, 1);
- buffer_pool_->RelinquishProducerReservation(buffer_id);
- receiver_->OnFrameReadyInBuffer(
- buffer_id, 0 /* frame_feedback_id */,
- std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
- buffer_pool_, buffer_id),
- std::move(info));
- }
+ // Construct the ready frame, to be passed on to the |receiver_| by the caller
+ // of this method.
+ mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
+ info->timestamp = timestamp;
+ info->pixel_format = buffer.format.pixel_format;
+ info->color_space = buffer.color_space;
+ info->coded_size = buffer.format.frame_size;
+ info->visible_rect = gfx::Rect(buffer.format.frame_size);
+ info->metadata.frame_rate = buffer.format.frame_rate;
+ info->metadata.reference_time = reference_time;
+
+ buffer_pool_->HoldForConsumers(buffer_id, 1);
+ buffer_pool_->RelinquishProducerReservation(buffer_id);
+
+ return ReadyFrameInBuffer(
+ buffer_id, 0 /* frame_feedback_id */,
+ std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
+ buffer_pool_, buffer_id),
+ std::move(info));
}
VideoCaptureDevice::Client::ReserveResult
@@ -614,10 +629,12 @@ void VideoCaptureDeviceClient::OnIncomingCapturedBufferExt(
buffer_pool_->HoldForConsumers(buffer.id, 1);
receiver_->OnFrameReadyInBuffer(
- buffer.id, buffer.frame_feedback_id,
- std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
- buffer_pool_, buffer.id),
- std::move(info));
+ ReadyFrameInBuffer(
+ buffer.id, buffer.frame_feedback_id,
+ std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
+ buffer_pool_, buffer.id),
+ std::move(info)),
+ {});
}
void VideoCaptureDeviceClient::OnError(VideoCaptureError error,
diff --git a/chromium/media/capture/video/video_capture_device_client.h b/chromium/media/capture/video/video_capture_device_client.h
index 7e9d6eb82ff..07d39c57cb5 100644
--- a/chromium/media/capture/video/video_capture_device_client.h
+++ b/chromium/media/capture/video/video_capture_device_client.h
@@ -18,6 +18,7 @@
#include "media/capture/capture_export.h"
#include "media/capture/mojom/video_capture_types.mojom.h"
#include "media/capture/video/video_capture_device.h"
+#include "media/capture/video/video_frame_receiver.h"
namespace media {
class VideoCaptureBufferPool;
@@ -44,7 +45,7 @@ using VideoCaptureJpegDecoderFactoryCB =
class CAPTURE_EXPORT VideoCaptureDeviceClient
: public VideoCaptureDevice::Client {
public:
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureDeviceClient(
VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
@@ -54,7 +55,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
VideoCaptureDeviceClient(VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
scoped_refptr<VideoCaptureBufferPool> buffer_pool);
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
~VideoCaptureDeviceClient() override;
static Buffer MakeBufferStruct(
@@ -81,9 +82,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
base::TimeDelta timestamp,
int frame_feedback_id = 0) override;
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override;
ReserveResult ReserveOutputBuffer(const gfx::Size& dimensions,
@@ -111,6 +111,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
double GetBufferPoolUtilization() const override;
private:
+ ReadyFrameInBuffer CreateReadyFrameFromExternalBuffer(
+ CapturedExternalVideoBuffer buffer,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp);
+
// A branch of OnIncomingCapturedData for Y16 frame_format.pixel_format.
void OnIncomingCapturedY16Data(const uint8_t* data,
int length,
@@ -125,11 +130,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
const std::unique_ptr<VideoFrameReceiver> receiver_;
std::vector<int> buffer_ids_known_by_receiver_;
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
VideoCaptureJpegDecoderFactoryCB optional_jpeg_decoder_factory_callback_;
std::unique_ptr<VideoCaptureJpegDecoder> external_jpeg_decoder_;
base::OnceClosure on_started_using_gpu_cb_;
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// The pool of shared-memory buffers used for capturing.
const scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
diff --git a/chromium/media/capture/video/video_capture_device_client_unittest.cc b/chromium/media/capture/video/video_capture_device_client_unittest.cc
index e23ddf03e51..7933122fc55 100644
--- a/chromium/media/capture/video/video_capture_device_client_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_client_unittest.cc
@@ -22,9 +22,9 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "media/capture/video/chromeos/video_capture_jpeg_decoder.h"
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
using ::testing::_;
using ::testing::AtLeast;
@@ -38,11 +38,11 @@ namespace media {
namespace {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<VideoCaptureJpegDecoder> ReturnNullPtrAsJpecDecoder() {
return nullptr;
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
} // namespace
@@ -61,7 +61,7 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
receiver_ = controller.get();
gpu_memory_buffer_manager_ =
std::make_unique<unittest_internal::MockGpuMemoryBufferManager>();
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
VideoCaptureBufferType::kSharedMemory, std::move(controller),
buffer_pool, base::BindRepeating(&ReturnNullPtrAsJpecDecoder));
@@ -69,7 +69,7 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
VideoCaptureBufferType::kSharedMemory, std::move(controller),
buffer_pool);
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
}
~VideoCaptureDeviceClientTest() override = default;
@@ -110,7 +110,7 @@ TEST_F(VideoCaptureDeviceClientTest, Minimal) {
std::unique_ptr<gfx::GpuMemoryBuffer> buffer =
gpu_memory_buffer_manager_->CreateFakeGpuMemoryBuffer(
kBufferDimensions, gfx::BufferFormat::YUV_420_BIPLANAR,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
gpu::kNullSurfaceHandle);
{
InSequence s;
diff --git a/chromium/media/capture/video/video_capture_device_factory.cc b/chromium/media/capture/video/video_capture_device_factory.cc
index 7bcdbb65e3f..dc668051cf7 100644
--- a/chromium/media/capture/video/video_capture_device_factory.cc
+++ b/chromium/media/capture/video/video_capture_device_factory.cc
@@ -21,10 +21,4 @@ VideoCaptureDeviceFactory::VideoCaptureDeviceFactory() {
VideoCaptureDeviceFactory::~VideoCaptureDeviceFactory() = default;
-#if BUILDFLAG(IS_ASH)
-bool VideoCaptureDeviceFactory::IsSupportedCameraAppDeviceBridge() {
- return false;
-}
-#endif // BUILDFLAG(IS_ASH)
-
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index 8b653ef4750..565343b975e 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -44,10 +44,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
std::vector<VideoCaptureDeviceInfo> devices_info)>;
virtual void GetDevicesInfo(GetDevicesInfoCallback callback) = 0;
-#if BUILDFLAG(IS_ASH)
- virtual bool IsSupportedCameraAppDeviceBridge();
-#endif // BUILDFLAG(IS_ASH)
-
protected:
base::ThreadChecker thread_checker_;
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index 563f687e126..ec301cbf976 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -49,10 +49,10 @@
#include "media/capture/video/android/video_capture_device_factory_android.h"
#endif
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "chromeos/dbus/power/power_manager_client.h"
+#include "media/capture/video/chromeos/ash/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
-#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
@@ -66,9 +66,19 @@
DISABLED_UsingRealWebcam_AllocateBadSize
// We will always get YUYV from the Mac AVFoundation implementations.
#define MAYBE_UsingRealWebcam_CaptureMjpeg DISABLED_UsingRealWebcam_CaptureMjpeg
-#define MAYBE_UsingRealWebcam_TakePhoto UsingRealWebcam_TakePhoto
-#define MAYBE_UsingRealWebcam_GetPhotoState UsingRealWebcam_GetPhotoState
-#define MAYBE_UsingRealWebcam_CaptureWithSize UsingRealWebcam_CaptureWithSize
+
+// TODO(crbug.com/1128470): Re-enable as soon as issues with resource access
+// are fixed.
+#define MAYBE_UsingRealWebcam_TakePhoto DISABLED_UsingRealWebcam_TakePhoto
+// TODO(crbug.com/1128470): Re-enable as soon as issues with resource access
+// are fixed.
+#define MAYBE_UsingRealWebcam_GetPhotoState \
+ DISABLED_UsingRealWebcam_GetPhotoState
+// TODO(crbug.com/1128470): Re-enable as soon as issues with resource access
+// are fixed.
+#define MAYBE_UsingRealWebcam_CaptureWithSize \
+ DISABLED_UsingRealWebcam_CaptureWithSize
+
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
#elif defined(OS_WIN) || defined(OS_FUCHSIA)
@@ -94,7 +104,7 @@
#define MAYBE_UsingRealWebcam_CaptureWithSize UsingRealWebcam_CaptureWithSize
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
-#elif BUILDFLAG(IS_ASH)
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
#define MAYBE_UsingRealWebcam_AllocateBadSize \
DISABLED_UsingRealWebcam_AllocateBadSize
#define MAYBE_UsingRealWebcam_CaptureMjpeg UsingRealWebcam_CaptureMjpeg
@@ -107,7 +117,7 @@
DISABLED_UsingRealWebcam_CaptureWithSize
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
-#elif defined(OS_LINUX)
+#elif defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
// UsingRealWebcam_AllocateBadSize will hang when a real camera is attached and
// if more than one test is trying to use the camera (even across processes). Do
// NOT renable this test without fixing the many bugs associated with it:
@@ -270,7 +280,7 @@ class VideoCaptureDeviceTest
main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
video_capture_client_(CreateDeviceClient()),
image_capture_client_(new MockImageCaptureClient()) {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
local_gpu_memory_buffer_manager_ =
std::make_unique<LocalGpuMemoryBufferManager>();
VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
@@ -289,7 +299,7 @@ class VideoCaptureDeviceTest
}
void SetUp() override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
chromeos::PowerManagerClient::InitializeFake();
#endif
#if defined(OS_ANDROID)
@@ -304,7 +314,7 @@ class VideoCaptureDeviceTest
}
void TearDown() override {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
chromeos::PowerManagerClient::Shutdown();
#endif
}
@@ -459,7 +469,7 @@ class VideoCaptureDeviceTest
std::unique_ptr<MockVideoCaptureDeviceClient> video_capture_client_;
const scoped_refptr<MockImageCaptureClient> image_capture_client_;
VideoCaptureFormat last_format_;
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
std::unique_ptr<LocalGpuMemoryBufferManager> local_gpu_memory_buffer_manager_;
#endif
std::unique_ptr<VideoCaptureDeviceFactory> video_capture_device_factory_;
@@ -467,7 +477,7 @@ class VideoCaptureDeviceTest
// Causes a flaky crash on Chrome OS. https://crbug.com/1069608
// Cause hangs on Windows Debug. http://crbug.com/417824
-#if BUILDFLAG(IS_ASH) || (defined(OS_WIN) && !defined(NDEBUG))
+#if BUILDFLAG(IS_CHROMEOS_ASH) || (defined(OS_WIN) && !defined(NDEBUG))
#define MAYBE_OpenInvalidDevice DISABLED_OpenInvalidDevice
#else
#define MAYBE_OpenInvalidDevice OpenInvalidDevice
@@ -654,7 +664,7 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_UsingRealWebcam_CaptureMjpeg) {
base::Unretained(this)));
}
void VideoCaptureDeviceTest::RunCaptureMjpegTestCase() {
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
if (media::ShouldUseCrosCameraService()) {
VLOG(1)
<< "Skipped on Chrome OS device where HAL v3 camera service is used";
@@ -695,7 +705,7 @@ void VideoCaptureDeviceTest::RunCaptureMjpegTestCase() {
}
// Flaky on ChromeOS. See https://crbug.com/1096082
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#define MAYBE_NoCameraSupportsPixelFormatMax \
DISABLED_NoCameraSupportsPixelFormatMax
#else
diff --git a/chromium/media/capture/video/video_capture_metrics.cc b/chromium/media/capture/video/video_capture_metrics.cc
new file mode 100644
index 00000000000..23e222bca44
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_metrics.cc
@@ -0,0 +1,180 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_capture_metrics.h"
+
+#include "base/containers/fixed_flat_map.h"
+#include "base/containers/flat_set.h"
+#include "base/containers/span.h"
+#include "base/logging.h"
+#include "base/metrics/histogram_functions.h"
+#include "base/metrics/histogram_macros.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace {
+
+// These resolutions are ones supported on a test webcam. Names given
+// where possible, from https://en.wikipedia.org/wiki/List_of_common_resolutions
+enum class VideoResolutionDesignation {
+ kUnknown = 0, // Catch-all for resolutions not understood.
+ // Video Graphics Array resolutions
+ kQQVGA = 1, // 160x120
+ kHQVGA = 2, // 240x160
+ kQVGA = 3, // 320x240
+ kWQVGA = 4, // 432x240
+ kHVGA = 5, // 480x320
+ kVGA = 6, // 640x480
+ kWVGA = 7, // 720x480
+ kWSVGA = 8, // 1024x576
+ kSVGA = 9, // 800x600
+
+ // Extended Graphics Array resolutions
+ kSXGA_MINUS = 10, // 1280x960
+ kUXGA = 11, // 1600x1200
+ kQXGA = 12, // 2048x1536
+
+ // Common Intermediate Format resolutions
+ kQCIF = 13, // 176x144
+ kCIF = 14, // 352x288
+
+ // High-definition resolutions.
+ kNHD = 15, // 640x360
+ kQHD = 16, // 960x540
+ kHD_FULLSCREEN = 17, // 960x720
+ kHD = 18, // 1280x720
+ kHD_PLUS = 19, // 1600x900
+ kFHD = 20, // 1920x1080
+ kWQHD = 21, // 2560x1440
+ kQHD_PLUS = 22, // 3200x1800
+ k4K_UHD = 23, // 3840x2160
+ kDCI_4K = 24, // 4096x2160
+ k5K = 25, // 5120x2880
+ k8K_UHD = 26, // 7680x4320
+
+ // Odd resolutions with no name
+ k160x90 = 27,
+ k320x176 = 28,
+ k320x180 = 29,
+ k480x270 = 30,
+ k544x288 = 31,
+ k752x416 = 32,
+ k864x480 = 33,
+ k800x448 = 34,
+ k960x544 = 35,
+ k1184x656 = 36,
+ k1392x768 = 37,
+ k1504x832 = 38,
+ k1600x896 = 39,
+ k1712x960 = 40,
+ k1792x1008 = 41,
+ k2592x1944 = 42,
+
+ kMaxValue = k2592x1944,
+};
+
+struct FrameSizeCompare {
+ // Return true iff lhs < rhs.
+ constexpr bool operator()(const gfx::Size& lhs, const gfx::Size& rhs) const {
+ return (lhs.height() < rhs.height() ||
+ (lhs.height() == rhs.height() && lhs.width() < rhs.width()));
+ }
+};
+
+constexpr auto kResolutions =
+ base::MakeFixedFlatMap<gfx::Size, VideoResolutionDesignation>(
+ {
+ {{160, 120}, VideoResolutionDesignation::kQQVGA},
+ {{240, 160}, VideoResolutionDesignation::kHQVGA},
+ {{320, 240}, VideoResolutionDesignation::kQVGA},
+ {{432, 240}, VideoResolutionDesignation::kWQVGA},
+ {{480, 320}, VideoResolutionDesignation::kHVGA},
+ {{640, 480}, VideoResolutionDesignation::kVGA},
+ {{720, 480}, VideoResolutionDesignation::kWVGA},
+ {{1024, 576}, VideoResolutionDesignation::kWSVGA},
+ {{800, 600}, VideoResolutionDesignation::kSVGA},
+ {{1280, 960}, VideoResolutionDesignation::kSXGA_MINUS},
+ {{1600, 1200}, VideoResolutionDesignation::kUXGA},
+ {{2048, 1536}, VideoResolutionDesignation::kQXGA},
+ {{176, 144}, VideoResolutionDesignation::kQCIF},
+ {{352, 288}, VideoResolutionDesignation::kCIF},
+ {{640, 360}, VideoResolutionDesignation::kNHD},
+ {{960, 540}, VideoResolutionDesignation::kQHD},
+ {{960, 720}, VideoResolutionDesignation::kHD_FULLSCREEN},
+ {{1280, 720}, VideoResolutionDesignation::kHD},
+ {{1600, 900}, VideoResolutionDesignation::kHD_PLUS},
+ {{1920, 1080}, VideoResolutionDesignation::kFHD},
+ {{2560, 1440}, VideoResolutionDesignation::kWQHD},
+ {{3200, 1800}, VideoResolutionDesignation::kQHD_PLUS},
+ {{3840, 2160}, VideoResolutionDesignation::k4K_UHD},
+ {{4096, 2160}, VideoResolutionDesignation::kDCI_4K},
+ {{5120, 2880}, VideoResolutionDesignation::k5K},
+ {{7680, 4320}, VideoResolutionDesignation::k8K_UHD},
+ {{160, 90}, VideoResolutionDesignation::k160x90},
+ {{320, 176}, VideoResolutionDesignation::k320x176},
+ {{320, 180}, VideoResolutionDesignation::k320x180},
+ {{480, 270}, VideoResolutionDesignation::k480x270},
+ {{544, 288}, VideoResolutionDesignation::k544x288},
+ {{752, 416}, VideoResolutionDesignation::k752x416},
+ {{864, 480}, VideoResolutionDesignation::k864x480},
+ {{800, 448}, VideoResolutionDesignation::k800x448},
+ {{960, 544}, VideoResolutionDesignation::k960x544},
+ {{1184, 656}, VideoResolutionDesignation::k1184x656},
+ {{1392, 768}, VideoResolutionDesignation::k1392x768},
+ {{1504, 832}, VideoResolutionDesignation::k1504x832},
+ {{1600, 896}, VideoResolutionDesignation::k1600x896},
+ {{1712, 960}, VideoResolutionDesignation::k1712x960},
+ {{1792, 1008}, VideoResolutionDesignation::k1792x1008},
+ {{2592, 1944}, VideoResolutionDesignation::k2592x1944},
+ },
+ FrameSizeCompare());
+
+static_assert(kResolutions.size() ==
+ static_cast<size_t>(VideoResolutionDesignation::kMaxValue),
+ "Each resolution must have one entry in kResolutions.");
+
+VideoResolutionDesignation ResolutionNameFromSize(gfx::Size frame_size) {
+ // Rotate such that we are always in landscape.
+ if (frame_size.width() < frame_size.height()) {
+ int tmp = frame_size.width();
+ frame_size.set_width(frame_size.height());
+ frame_size.set_width(tmp);
+ }
+ auto* it = kResolutions.find(frame_size);
+ return it != kResolutions.end() ? it->second
+ : VideoResolutionDesignation::kUnknown;
+}
+
+} // namespace
+
+namespace media {
+
+void LogCaptureDeviceMetrics(
+ base::span<const media::VideoCaptureDeviceInfo> devices_info) {
+ for (const auto& device : devices_info) {
+ base::flat_set<media::VideoPixelFormat> supported_pixel_formats;
+ base::flat_set<gfx::Size, FrameSizeCompare> resolutions;
+ for (const auto& format : device.supported_formats) {
+ VLOG(2) << "Device supports "
+ << media::VideoPixelFormatToString(format.pixel_format) << " at "
+ << format.frame_size.ToString() << " ("
+ << static_cast<int>(ResolutionNameFromSize(format.frame_size))
+ << ")";
+ media::VideoPixelFormat pixel_format = format.pixel_format;
+ bool inserted = supported_pixel_formats.insert(pixel_format).second;
+ if (inserted) {
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Device.SupportedPixelFormat", pixel_format,
+ media::VideoPixelFormat::PIXEL_FORMAT_MAX);
+ }
+ if (!resolutions.contains(format.frame_size)) {
+ resolutions.insert(format.frame_size);
+ base::UmaHistogramEnumeration(
+ "Media.VideoCapture.Device.SupportedResolution",
+ ResolutionNameFromSize(format.frame_size));
+ }
+ }
+ }
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/video_capture_metrics.h b/chromium/media/capture/video/video_capture_metrics.h
new file mode 100644
index 00000000000..f9a963c5a65
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_metrics.h
@@ -0,0 +1,19 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_METRICS_H_
+#define MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_METRICS_H_
+
+#include "base/containers/span.h"
+#include "media/capture/video/video_capture_device_info.h"
+
+namespace media {
+
+CAPTURE_EXPORT
+void LogCaptureDeviceMetrics(
+ base::span<const media::VideoCaptureDeviceInfo> devices_info);
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_VIDEO_CAPTURE_METRICS_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/video_capture_metrics_unittest.cc b/chromium/media/capture/video/video_capture_metrics_unittest.cc
new file mode 100644
index 00000000000..d259f2098f2
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_metrics_unittest.cc
@@ -0,0 +1,59 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_capture_metrics.h"
+
+#include "base/test/metrics/histogram_tester.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+namespace test {
+
+TEST(VideoCaptureMetricsTest, TestLogCaptureDeviceMetrics) {
+ base::HistogramTester histogram_tester;
+ std::vector<media::VideoCaptureDeviceInfo> devices_info;
+ // First device
+ VideoCaptureDeviceInfo first_device;
+ first_device.supported_formats = {
+ // NV12 QQVGA at 30fps, 15fps
+ {{160, 120}, 30.0, media::PIXEL_FORMAT_NV12},
+ {{160, 120}, 15.0, media::PIXEL_FORMAT_NV12},
+ // NV12 VGA
+ {{640, 480}, 30.0, media::PIXEL_FORMAT_NV12},
+ // UYVY VGA
+ {{640, 480}, 30.0, media::PIXEL_FORMAT_UYVY},
+ // MJPEG 4K
+ {{3840, 2160}, 30.0, media::PIXEL_FORMAT_MJPEG},
+ // Odd resolution
+ {{844, 400}, 30.0, media::PIXEL_FORMAT_NV12},
+ // HD at unknown pixel format
+ {{1280, 720}, 30.0, media::PIXEL_FORMAT_UNKNOWN}};
+ devices_info.push_back(first_device);
+ VideoCaptureDeviceInfo second_device;
+ second_device.supported_formats = {
+ // UYVY VGA to test that we get 2 UYVY and 2 VGA in metrics.
+ {{640, 480}, 30.0, media::PIXEL_FORMAT_UYVY}};
+ devices_info.push_back(second_device);
+
+ LogCaptureDeviceMetrics(devices_info);
+
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Device.SupportedPixelFormat"),
+ testing::UnorderedElementsAre(
+ base::Bucket(media::PIXEL_FORMAT_NV12, 1),
+ base::Bucket(media::PIXEL_FORMAT_UYVY, 2),
+ base::Bucket(media::PIXEL_FORMAT_MJPEG, 1),
+ base::Bucket(media::PIXEL_FORMAT_UNKNOWN, 1)));
+
+ EXPECT_THAT(histogram_tester.GetAllSamples(
+ "Media.VideoCapture.Device.SupportedResolution"),
+ testing::UnorderedElementsAre(
+ base::Bucket(0 /*other*/, 1), base::Bucket(1 /*qqvga*/, 1),
+ base::Bucket(6 /*vga*/, 2), base::Bucket(23 /*4k_UHD*/, 1),
+ base::Bucket(18 /*hd*/, 1)));
+}
+
+} // namespace test
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/video_capture_system_impl.cc b/chromium/media/capture/video/video_capture_system_impl.cc
index 6f13af9ff0e..5ff37450c12 100644
--- a/chromium/media/capture/video/video_capture_system_impl.cc
+++ b/chromium/media/capture/video/video_capture_system_impl.cc
@@ -10,6 +10,7 @@
#include "base/callback_helpers.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/capture/video/video_capture_metrics.h"
namespace {
@@ -111,6 +112,11 @@ void VideoCaptureSystemImpl::DevicesInfoReady(
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!device_enum_request_queue_.empty());
+ // Only save metrics the first time device infos are populated.
+ if (devices_info_cache_.empty()) {
+ LogCaptureDeviceMetrics(devices_info);
+ }
+
for (auto& device_info : devices_info) {
ConsolidateCaptureFormats(&device_info.supported_formats);
}
diff --git a/chromium/media/capture/video/video_frame_receiver.cc b/chromium/media/capture/video/video_frame_receiver.cc
new file mode 100644
index 00000000000..627143e4487
--- /dev/null
+++ b/chromium/media/capture/video/video_frame_receiver.cc
@@ -0,0 +1,36 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_frame_receiver.h"
+
+namespace media {
+
+ReadyFrameInBuffer::ReadyFrameInBuffer(
+ int buffer_id,
+ int frame_feedback_id,
+ std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
+ buffer_read_permission,
+ mojom::VideoFrameInfoPtr frame_info)
+ : buffer_id(buffer_id),
+ frame_feedback_id(frame_feedback_id),
+ buffer_read_permission(std::move(buffer_read_permission)),
+ frame_info(std::move(frame_info)) {}
+
+ReadyFrameInBuffer::ReadyFrameInBuffer(ReadyFrameInBuffer&& other)
+ : buffer_id(other.buffer_id),
+ frame_feedback_id(other.frame_feedback_id),
+ buffer_read_permission(std::move(other.buffer_read_permission)),
+ frame_info(std::move(other.frame_info)) {}
+
+ReadyFrameInBuffer::~ReadyFrameInBuffer() = default;
+
+ReadyFrameInBuffer& ReadyFrameInBuffer::operator=(ReadyFrameInBuffer&& other) {
+ buffer_id = other.buffer_id;
+ frame_feedback_id = other.frame_feedback_id;
+ buffer_read_permission = std::move(other.buffer_read_permission);
+ frame_info = std::move(other.frame_info);
+ return *this;
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/video_frame_receiver.h b/chromium/media/capture/video/video_frame_receiver.h
index e153aa99885..05794e7e715 100644
--- a/chromium/media/capture/video/video_frame_receiver.h
+++ b/chromium/media/capture/video/video_frame_receiver.h
@@ -12,6 +12,26 @@
namespace media {
+struct CAPTURE_EXPORT ReadyFrameInBuffer {
+ ReadyFrameInBuffer(
+ int buffer_id,
+ int frame_feedback_id,
+ std::unique_ptr<
+ VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
+ buffer_read_permission,
+ mojom::VideoFrameInfoPtr frame_info);
+ ReadyFrameInBuffer(ReadyFrameInBuffer&& other);
+ ~ReadyFrameInBuffer();
+
+ ReadyFrameInBuffer& operator=(ReadyFrameInBuffer&& other);
+
+ int buffer_id;
+ int frame_feedback_id;
+ std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
+ buffer_read_permission;
+ mojom::VideoFrameInfoPtr frame_info;
+};
+
// Callback interface for VideoCaptureDeviceClient to communicate with its
// clients. On some platforms, VideoCaptureDeviceClient calls these methods from
// OS or capture driver provided threads which do not have a task runner and
@@ -40,12 +60,8 @@ class CAPTURE_EXPORT VideoFrameReceiver {
// alive and unchanged until VideoFrameReceiver releases the given
// |buffer_read_permission|.
virtual void OnFrameReadyInBuffer(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info) = 0;
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) = 0;
// Tells the VideoFrameReceiver that the producer is no longer going to use
// the buffer with id |buffer_id| for frame delivery. This may be called even
diff --git a/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc b/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
index d563ed5e2ac..db8990cf1c7 100644
--- a/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
+++ b/chromium/media/capture/video/video_frame_receiver_on_task_runner.cc
@@ -27,16 +27,12 @@ void VideoFrameReceiverOnTaskRunner::OnNewBuffer(
}
void VideoFrameReceiverOnTaskRunner::OnFrameReadyInBuffer(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info) {
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) {
task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VideoFrameReceiver::OnFrameReadyInBuffer, receiver_,
- buffer_id, frame_feedback_id,
- std::move(buffer_read_permission), std::move(frame_info)));
+ std::move(frame), std::move(scaled_frames)));
}
void VideoFrameReceiverOnTaskRunner::OnBufferRetired(int buffer_id) {
diff --git a/chromium/media/capture/video/video_frame_receiver_on_task_runner.h b/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
index cb472218258..ff2d8113d3a 100644
--- a/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
+++ b/chromium/media/capture/video/video_frame_receiver_on_task_runner.h
@@ -26,12 +26,8 @@ class CAPTURE_EXPORT VideoFrameReceiverOnTaskRunner
void OnNewBuffer(int32_t buffer_id,
media::mojom::VideoBufferHandlePtr buffer_handle) override;
void OnFrameReadyInBuffer(
- int buffer_id,
- int frame_feedback_id,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- buffer_read_permission,
- mojom::VideoFrameInfoPtr frame_info) override;
+ ReadyFrameInBuffer frame,
+ std::vector<ReadyFrameInBuffer> scaled_frames) override;
void OnBufferRetired(int buffer_id) override;
void OnError(VideoCaptureError error) override;
void OnFrameDropped(VideoCaptureFrameDropReason reason) override;
diff --git a/chromium/media/capture/video/win/OWNERS b/chromium/media/capture/video/win/OWNERS
new file mode 100644
index 00000000000..1829bc129d7
--- /dev/null
+++ b/chromium/media/capture/video/win/OWNERS
@@ -0,0 +1,6 @@
+ilnik@chromium.org
+
+# Original (legacy) owners.
+chfremer@chromium.org
+emircan@chromium.org
+mcasas@chromium.org
diff --git a/chromium/media/capture/video/win/d3d_capture_test_utils.cc b/chromium/media/capture/video/win/d3d_capture_test_utils.cc
new file mode 100644
index 00000000000..6f82581cc79
--- /dev/null
+++ b/chromium/media/capture/video/win/d3d_capture_test_utils.cc
@@ -0,0 +1,918 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/win/d3d_capture_test_utils.h"
+
+namespace media {
+
+MockD3D11DeviceContext::MockD3D11DeviceContext() = default;
+MockD3D11DeviceContext::~MockD3D11DeviceContext() = default;
+
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers_out) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views_out) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetShader(ID3D11PixelShader* pixel_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers_out) {
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetShader(ID3D11VertexShader* vertex_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawIndexed(UINT index_count,
+ UINT start_index_location,
+ INT base_vertex_location) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Draw(UINT vertex_count, UINT start_vertex_location) {}
+IFACEMETHODIMP MockD3D11DeviceContext::Map(
+ ID3D11Resource* resource,
+ UINT subresource,
+ D3D11_MAP MapType,
+ UINT MapFlags,
+ D3D11_MAPPED_SUBRESOURCE* mapped_resource) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Unmap(ID3D11Resource* resource, UINT subresource) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetInputLayout(ID3D11InputLayout* input_layout) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* vertex_buffers,
+ const UINT* strides,
+ const UINT* offsets) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetIndexBuffer(ID3D11Buffer* index_buffer,
+ DXGI_FORMAT format,
+ UINT offset) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawIndexedInstanced(UINT index_count_per_instance,
+ UINT instance_count,
+ UINT start_index_location,
+ INT base_vertex_location,
+ UINT start_instance_location) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawInstanced(UINT vertex_count_per_instance,
+ UINT instance_count,
+ UINT start_vertex_location,
+ UINT start_instance_location) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetShader(ID3D11GeometryShader* shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IASetPrimitiveTopology(
+ D3D11_PRIMITIVE_TOPOLOGY topology) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Begin(ID3D11Asynchronous* async) {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::End(ID3D11Asynchronous* async) {}
+IFACEMETHODIMP MockD3D11DeviceContext::GetData(ID3D11Asynchronous* async,
+ void* data,
+ UINT data_size,
+ UINT get_data_flags) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SetPredication(ID3D11Predicate* pPredicate,
+ BOOL PredicateValue) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetRenderTargets(
+ UINT num_views,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetBlendState(ID3D11BlendState* blend_state,
+ const FLOAT blend_factor[4],
+ UINT sample_mask) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMSetDepthStencilState(
+ ID3D11DepthStencilState* depth_stencil_state,
+ UINT stencil_ref) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SOSetTargets(UINT num_buffers,
+ ID3D11Buffer* const* so_targets,
+ const UINT* offsets) {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::DrawAuto() {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawIndexedInstancedIndirect(
+ ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DrawInstancedIndirect(
+ ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::Dispatch(UINT thread_group_count_x,
+ UINT thread_group_count_y,
+ UINT thread_group_count_z) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DispatchIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSSetState(ID3D11RasterizerState* rasterizer_state) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSSetViewports(UINT num_viewports,
+ const D3D11_VIEWPORT* viewports) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSSetScissorRects(UINT num_rects,
+ const D3D11_RECT* rects) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CopySubresourceRegion(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ UINT dest_x,
+ UINT dest_y,
+ UINT dest_z,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ const D3D11_BOX* source_box) {
+ OnCopySubresourceRegion(dest_resource, dest_subresource, dest_x, dest_y,
+ dest_z, source_resource, source_subresource,
+ source_box);
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CopyResource(ID3D11Resource* dest_resource,
+ ID3D11Resource* source_resource) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::UpdateSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ const D3D11_BOX* dest_box,
+ const void* source_data,
+ UINT source_row_pitch,
+ UINT source_depth_pitch) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CopyStructureCount(
+ ID3D11Buffer* dest_buffer,
+ UINT dest_aligned_byte_offset,
+ ID3D11UnorderedAccessView* source_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearRenderTargetView(
+ ID3D11RenderTargetView* render_target_view,
+ const FLOAT color_rgba[4]) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearUnorderedAccessViewUint(
+ ID3D11UnorderedAccessView* unordered_access_view,
+ const UINT values[4]) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearUnorderedAccessViewFloat(
+ ID3D11UnorderedAccessView* unordered_access_view,
+ const FLOAT values[4]) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ClearDepthStencilView(
+ ID3D11DepthStencilView* depth_stencil_view,
+ UINT clear_flags,
+ FLOAT depth,
+ UINT8 stencil) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GenerateMips(
+ ID3D11ShaderResourceView* shader_resource_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SetResourceMinLOD(ID3D11Resource* resource,
+ FLOAT min_lod) {}
+IFACEMETHODIMP_(FLOAT)
+MockD3D11DeviceContext::GetResourceMinLOD(ID3D11Resource* resource) {
+ return 0;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ResolveSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ DXGI_FORMAT format) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::ExecuteCommandList(ID3D11CommandList* command_list,
+ BOOL restore_context_state) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetShader(ID3D11HullShader* hull_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetShader(ID3D11DomainShader* domain_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetShader(ID3D11ComputeShader* computer_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSSetConstantBuffers(
+ UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetShader(ID3D11PixelShader** pixel_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetShader(ID3D11VertexShader** vertex_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::PSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetInputLayout(ID3D11InputLayout** input_layout) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** vertex_buffers,
+ UINT* strides,
+ UINT* offsets) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetIndexBuffer(ID3D11Buffer** index_buffer,
+ DXGI_FORMAT* format,
+ UINT* offset) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetShader(ID3D11GeometryShader** geometry_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::IAGetPrimitiveTopology(
+ D3D11_PRIMITIVE_TOPOLOGY* topology) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::VSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GetPredication(ID3D11Predicate** predicate,
+ BOOL* predicate_value) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetRenderTargets(
+ UINT num_views,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetBlendState(ID3D11BlendState** blend_state,
+ FLOAT blend_factor[4],
+ UINT* sample_mask) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::OMGetDepthStencilState(
+ ID3D11DepthStencilState** depth_stencil_state,
+ UINT* stencil_ref) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::SOGetTargets(UINT num_buffers,
+ ID3D11Buffer** so_targets) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSGetState(ID3D11RasterizerState** rasterizer_state) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSGetViewports(UINT* num_viewports,
+ D3D11_VIEWPORT* viewports) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::RSGetScissorRects(UINT* num_rects, D3D11_RECT* rects) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetShader(ID3D11HullShader** hull_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::HSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetShader(ID3D11DomainShader** domain_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::DSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetShader(ID3D11ComputeShader** pcomputer_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) {}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::CSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::ClearState() {}
+IFACEMETHODIMP_(void) MockD3D11DeviceContext::Flush() {}
+IFACEMETHODIMP_(D3D11_DEVICE_CONTEXT_TYPE) MockD3D11DeviceContext::GetType() {
+ return D3D11_DEVICE_CONTEXT_IMMEDIATE;
+}
+IFACEMETHODIMP_(UINT) MockD3D11DeviceContext::GetContextFlags() {
+ return 0;
+}
+IFACEMETHODIMP MockD3D11DeviceContext::FinishCommandList(
+ BOOL restore_deferred_context_state,
+ ID3D11CommandList** command_list) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP_(void)
+MockD3D11DeviceContext::GetDevice(ID3D11Device** device) {}
+IFACEMETHODIMP MockD3D11DeviceContext::GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11DeviceContext::SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11DeviceContext::SetPrivateDataInterface(
+ REFGUID guid,
+ const IUnknown* data) {
+ return E_NOTIMPL;
+}
+
+MockD3D11Device::MockD3D11Device()
+ : mock_immediate_context_(new MockD3D11DeviceContext()) {}
+MockD3D11Device::~MockD3D11Device() {}
+
+IFACEMETHODIMP MockD3D11Device::CreateBuffer(
+ const D3D11_BUFFER_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Buffer** ppBuffer) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateTexture1D(
+ const D3D11_TEXTURE1D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture1D** texture1D) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateTexture2D(
+ const D3D11_TEXTURE2D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture2D** texture2D) {
+ OnCreateTexture2D(desc, initial_data, texture2D);
+ Microsoft::WRL::ComPtr<MockD3D11Texture2D> mock_texture(
+ new MockD3D11Texture2D());
+ return mock_texture.CopyTo(IID_PPV_ARGS(texture2D));
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateTexture3D(
+ const D3D11_TEXTURE3D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture3D** texture2D) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateShaderResourceView(
+ ID3D11Resource* resource,
+ const D3D11_SHADER_RESOURCE_VIEW_DESC* desc,
+ ID3D11ShaderResourceView** srv) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateUnorderedAccessView(
+ ID3D11Resource* resource,
+ const D3D11_UNORDERED_ACCESS_VIEW_DESC* desc,
+ ID3D11UnorderedAccessView** uaview) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateRenderTargetView(
+ ID3D11Resource* resource,
+ const D3D11_RENDER_TARGET_VIEW_DESC* desc,
+ ID3D11RenderTargetView** rtv) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDepthStencilView(
+ ID3D11Resource* resource,
+ const D3D11_DEPTH_STENCIL_VIEW_DESC* desc,
+ ID3D11DepthStencilView** depth_stencil_view) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateInputLayout(
+ const D3D11_INPUT_ELEMENT_DESC* input_element_descs,
+ UINT num_elements,
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11InputLayout** input_layout) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateVertexShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11VertexShader** vertex_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateGeometryShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateGeometryShaderWithStreamOutput(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ const D3D11_SO_DECLARATION_ENTRY* so_declaration,
+ UINT num_entries,
+ const UINT* buffer_strides,
+ UINT num_strides,
+ UINT rasterized_stream,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreatePixelShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11PixelShader** pixel_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateHullShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11HullShader** hull_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDomainShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11DomainShader** domain_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateComputeShader(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11ComputeShader** compute_shader) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateClassLinkage(
+ ID3D11ClassLinkage** linkage) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateBlendState(
+ const D3D11_BLEND_DESC* blend_state_desc,
+ ID3D11BlendState** blend_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDepthStencilState(
+ const D3D11_DEPTH_STENCIL_DESC* depth_stencil_desc,
+ ID3D11DepthStencilState** depth_stencil_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateRasterizerState(
+ const D3D11_RASTERIZER_DESC* rasterizer_desc,
+ ID3D11RasterizerState** rasterizer_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateSamplerState(
+ const D3D11_SAMPLER_DESC* sampler_desc,
+ ID3D11SamplerState** sampler_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateQuery(const D3D11_QUERY_DESC* query_desc,
+ ID3D11Query** query) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreatePredicate(
+ const D3D11_QUERY_DESC* predicate_desc,
+ ID3D11Predicate** predicate) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateCounter(
+ const D3D11_COUNTER_DESC* counter_desc,
+ ID3D11Counter** counter) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDeferredContext(
+ UINT context_flags,
+ ID3D11DeviceContext** deferred_context) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::OpenSharedResource(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out) {
+ return DoOpenSharedResource1(resource, returned_interface, resource_out);
+}
+
+IFACEMETHODIMP MockD3D11Device::CheckFormatSupport(DXGI_FORMAT format,
+ UINT* format_support) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CheckMultisampleQualityLevels(
+ DXGI_FORMAT format,
+ UINT sample_count,
+ UINT* num_quality_levels) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP_(void)
+MockD3D11Device::CheckCounterInfo(D3D11_COUNTER_INFO* counter_info) {}
+
+IFACEMETHODIMP MockD3D11Device::CheckCounter(const D3D11_COUNTER_DESC* desc,
+ D3D11_COUNTER_TYPE* type,
+ UINT* active_counters,
+ LPSTR name,
+ UINT* name_length,
+ LPSTR units,
+ UINT* units_length,
+ LPSTR description,
+ UINT* description_length) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CheckFeatureSupport(
+ D3D11_FEATURE feature,
+ void* feature_support_data,
+ UINT feature_support_data_size) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::SetPrivateDataInterface(REFGUID guid,
+ const IUnknown* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP_(D3D_FEATURE_LEVEL) MockD3D11Device::GetFeatureLevel() {
+ return D3D_FEATURE_LEVEL_11_1;
+}
+
+IFACEMETHODIMP_(UINT) MockD3D11Device::GetCreationFlags() {
+ return 0;
+}
+
+IFACEMETHODIMP MockD3D11Device::GetDeviceRemovedReason() {
+ return OnGetDeviceRemovedReason();
+}
+
+IFACEMETHODIMP_(void)
+MockD3D11Device::GetImmediateContext(ID3D11DeviceContext** immediate_context) {
+ mock_immediate_context_.CopyTo(immediate_context);
+}
+
+IFACEMETHODIMP MockD3D11Device::SetExceptionMode(UINT raise_flags) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP_(UINT) MockD3D11Device::GetExceptionMode() {
+ return 0;
+}
+
+IFACEMETHODIMP_(void)
+MockD3D11Device::GetImmediateContext1(
+ ID3D11DeviceContext1** immediate_context) {}
+
+IFACEMETHODIMP MockD3D11Device::CreateDeferredContext1(
+ UINT context_flags,
+ ID3D11DeviceContext1** deferred_context) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateBlendState1(
+ const D3D11_BLEND_DESC1* blend_state_desc,
+ ID3D11BlendState1** blend_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateRasterizerState1(
+ const D3D11_RASTERIZER_DESC1* rasterizer_desc,
+ ID3D11RasterizerState1** rasterizer_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::CreateDeviceContextState(
+ UINT flags,
+ const D3D_FEATURE_LEVEL* feature_levels,
+ UINT feature_level_count,
+ UINT sdk_version,
+ REFIID emulated_interface,
+ D3D_FEATURE_LEVEL* chosen_feature_level,
+ ID3DDeviceContextState** context_state) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockD3D11Device::OpenSharedResource1(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out) {
+ return DoOpenSharedResource1(resource, returned_interface, resource_out);
+}
+
+IFACEMETHODIMP MockD3D11Device::OpenSharedResourceByName(
+ LPCWSTR name,
+ DWORD desired_access,
+ REFIID returned_interface,
+ void** resource_out) {
+ return E_NOTIMPL;
+}
+
+// Setup default actions for mocked methods
+void MockD3D11Device::SetupDefaultMocks() {
+ ON_CALL(*this, OnGetDeviceRemovedReason).WillByDefault([]() { return S_OK; });
+ ON_CALL(*this, DoOpenSharedResource1)
+ .WillByDefault([](HANDLE, REFIID, void**) { return E_NOTIMPL; });
+}
+
+IFACEMETHODIMP MockDXGIResource::CreateSubresourceSurface(
+ UINT index,
+ IDXGISurface2** surface) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::CreateSharedHandle(
+ const SECURITY_ATTRIBUTES* attributes,
+ DWORD access,
+ LPCWSTR name,
+ HANDLE* handle) {
+ // Need to provide a real handle to client, so create an event handle
+ *handle = CreateEvent(nullptr, FALSE, FALSE, nullptr);
+ return S_OK;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetSharedHandle(HANDLE* shared_handle) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetUsage(DXGI_USAGE* usage) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::SetEvictionPriority(UINT eviction_priority) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetEvictionPriority(UINT* eviction_priority) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetDevice(REFIID riid, void** device) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::SetPrivateData(REFGUID name,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::SetPrivateDataInterface(
+ REFGUID name,
+ const IUnknown* unknown) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetPrivateData(REFGUID name,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::GetParent(REFIID riid, void** parent) {
+ return E_NOTIMPL;
+}
+
+IFACEMETHODIMP MockDXGIResource::AcquireSync(UINT64 key, DWORD milliseconds) {
+ return S_OK;
+}
+IFACEMETHODIMP MockDXGIResource::ReleaseSync(UINT64 key) {
+ return S_OK;
+}
+
+MockDXGIResource::~MockDXGIResource() {}
+
+MockD3D11Texture2D::MockD3D11Texture2D(D3D11_TEXTURE2D_DESC desc,
+ ID3D11Device* device)
+ : desc_(desc), device_(device) {}
+MockD3D11Texture2D::MockD3D11Texture2D() {}
+
+IFACEMETHODIMP MockD3D11Texture2D::QueryInterface(REFIID riid, void** object) {
+ if (riid == __uuidof(IDXGIResource1) || riid == __uuidof(IDXGIKeyedMutex)) {
+ if (!mock_resource_) {
+ mock_resource_ = new MockDXGIResource();
+ }
+ return mock_resource_.CopyTo(riid, object);
+ }
+ return MockInterface::QueryInterface(riid, object);
+}
+
+IFACEMETHODIMP_(void) MockD3D11Texture2D::GetDesc(D3D11_TEXTURE2D_DESC* desc) {
+ *desc = desc_;
+}
+IFACEMETHODIMP_(void)
+MockD3D11Texture2D::GetType(D3D11_RESOURCE_DIMENSION* resource_dimension) {}
+IFACEMETHODIMP_(void)
+MockD3D11Texture2D::SetEvictionPriority(UINT eviction_priority) {}
+IFACEMETHODIMP_(UINT) MockD3D11Texture2D::GetEvictionPriority() {
+ return 0;
+}
+IFACEMETHODIMP_(void) MockD3D11Texture2D::GetDevice(ID3D11Device** device) {
+ if (device_) {
+ device_.CopyTo(device);
+ }
+}
+IFACEMETHODIMP MockD3D11Texture2D::GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11Texture2D::SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) {
+ return E_NOTIMPL;
+}
+IFACEMETHODIMP MockD3D11Texture2D::SetPrivateDataInterface(
+ REFGUID guid,
+ const IUnknown* data) {
+ return E_NOTIMPL;
+}
+MockD3D11Texture2D::~MockD3D11Texture2D() {}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/d3d_capture_test_utils.h b/chromium/media/capture/video/win/d3d_capture_test_utils.h
new file mode 100644
index 00000000000..c9dc9198086
--- /dev/null
+++ b/chromium/media/capture/video/win/d3d_capture_test_utils.h
@@ -0,0 +1,731 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_WIN_D3D_CAPTURE_TEST_UTILS_H_
+#define MEDIA_CAPTURE_VIDEO_WIN_D3D_CAPTURE_TEST_UTILS_H_
+
+#include <d3d11_4.h>
+#include <wrl.h>
+#include "base/memory/ref_counted.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+template <class... Interface>
+class MockInterface
+ : public base::RefCountedThreadSafe<MockInterface<Interface...>> {
+ public:
+ // IUnknown
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) {
+ if (riid == __uuidof(IUnknown)) {
+ this->AddRef();
+ *object = this;
+ return S_OK;
+ }
+ return E_NOINTERFACE;
+ }
+ IFACEMETHODIMP_(ULONG) AddRef() {
+ base::RefCountedThreadSafe<MockInterface<Interface...>>::AddRef();
+ return 1U;
+ }
+ IFACEMETHODIMP_(ULONG) Release() {
+ base::RefCountedThreadSafe<MockInterface<Interface...>>::Release();
+ return 1U;
+ }
+
+ protected:
+ friend class base::RefCountedThreadSafe<MockInterface<Interface...>>;
+ virtual ~MockInterface() = default;
+};
+
+template <class Interface, class... Interfaces>
+class MockInterface<Interface, Interfaces...>
+ : public MockInterface<Interfaces...>, public Interface {
+ public:
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override {
+ if (riid == __uuidof(Interface)) {
+ this->AddRef();
+ *object = static_cast<Interface*>(this);
+ return S_OK;
+ }
+ return MockInterface<Interfaces...>::QueryInterface(riid, object);
+ }
+
+ IFACEMETHODIMP_(ULONG) AddRef() override {
+ return MockInterface<Interfaces...>::AddRef();
+ }
+
+ IFACEMETHODIMP_(ULONG) Release() override {
+ return MockInterface<Interfaces...>::Release();
+ }
+};
+
+class MockD3D11DeviceContext final : public MockInterface<ID3D11DeviceContext> {
+ public:
+ MockD3D11DeviceContext();
+
+ // ID3D11DeviceContext
+ IFACEMETHODIMP_(void)
+ VSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers_out) override;
+ IFACEMETHODIMP_(void)
+ PSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views_out) override;
+ IFACEMETHODIMP_(void)
+ PSSetShader(ID3D11PixelShader* pixel_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ PSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers_out) override;
+ IFACEMETHODIMP_(void)
+ VSSetShader(ID3D11VertexShader* vertex_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ DrawIndexed(UINT index_count,
+ UINT start_index_location,
+ INT base_vertex_location) override;
+ IFACEMETHODIMP_(void)
+ Draw(UINT vertex_count, UINT start_vertex_location) override;
+ IFACEMETHODIMP Map(ID3D11Resource* resource,
+ UINT subresource,
+ D3D11_MAP MapType,
+ UINT MapFlags,
+ D3D11_MAPPED_SUBRESOURCE* mapped_resource) override;
+ IFACEMETHODIMP_(void)
+ Unmap(ID3D11Resource* resource, UINT subresource) override;
+ IFACEMETHODIMP_(void)
+ PSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ IASetInputLayout(ID3D11InputLayout* input_layout) override;
+ IFACEMETHODIMP_(void)
+ IASetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* vertex_buffers,
+ const UINT* strides,
+ const UINT* offsets) override;
+ IFACEMETHODIMP_(void)
+ IASetIndexBuffer(ID3D11Buffer* index_buffer,
+ DXGI_FORMAT format,
+ UINT offset) override;
+ IFACEMETHODIMP_(void)
+ DrawIndexedInstanced(UINT index_count_per_instance,
+ UINT instance_count,
+ UINT start_index_location,
+ INT base_vertex_location,
+ UINT start_instance_location) override;
+ IFACEMETHODIMP_(void)
+ DrawInstanced(UINT vertex_count_per_instance,
+ UINT instance_count,
+ UINT start_vertex_location,
+ UINT start_instance_location) override;
+ IFACEMETHODIMP_(void)
+ GSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ GSSetShader(ID3D11GeometryShader* shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY topology) override;
+ IFACEMETHODIMP_(void)
+ VSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ VSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void) Begin(ID3D11Asynchronous* async) override;
+ IFACEMETHODIMP_(void) End(ID3D11Asynchronous* async) override;
+ IFACEMETHODIMP GetData(ID3D11Asynchronous* async,
+ void* data,
+ UINT data_size,
+ UINT get_data_flags) override;
+ IFACEMETHODIMP_(void)
+ SetPredication(ID3D11Predicate* pPredicate, BOOL PredicateValue) override;
+ IFACEMETHODIMP_(void)
+ GSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ GSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ OMSetRenderTargets(UINT num_views,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view) override;
+ IFACEMETHODIMP_(void)
+ OMSetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView* const* render_target_views,
+ ID3D11DepthStencilView* depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) override;
+ IFACEMETHODIMP_(void)
+ OMSetBlendState(ID3D11BlendState* blend_state,
+ const FLOAT blend_factor[4],
+ UINT sample_mask) override;
+ IFACEMETHODIMP_(void)
+ OMSetDepthStencilState(ID3D11DepthStencilState* depth_stencil_state,
+ UINT stencil_ref) override;
+ IFACEMETHODIMP_(void)
+ SOSetTargets(UINT num_buffers,
+ ID3D11Buffer* const* so_targets,
+ const UINT* offsets) override;
+ IFACEMETHODIMP_(void) DrawAuto() override;
+ IFACEMETHODIMP_(void)
+ DrawIndexedInstancedIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) override;
+ IFACEMETHODIMP_(void)
+ DrawInstancedIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) override;
+ IFACEMETHODIMP_(void)
+ Dispatch(UINT thread_group_count_x,
+ UINT thread_group_count_y,
+ UINT thread_group_count_z) override;
+ IFACEMETHODIMP_(void)
+ DispatchIndirect(ID3D11Buffer* buffer_for_args,
+ UINT aligned_byte_offset_for_args) override;
+ IFACEMETHODIMP_(void)
+ RSSetState(ID3D11RasterizerState* rasterizer_state) override;
+ IFACEMETHODIMP_(void)
+ RSSetViewports(UINT num_viewports, const D3D11_VIEWPORT* viewports) override;
+ IFACEMETHODIMP_(void)
+ RSSetScissorRects(UINT num_rects, const D3D11_RECT* rects) override;
+ IFACEMETHODIMP_(void)
+ CopySubresourceRegion(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ UINT dest_x,
+ UINT dest_y,
+ UINT dest_z,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ const D3D11_BOX* source_box) override;
+ MOCK_METHOD8(OnCopySubresourceRegion,
+ void(ID3D11Resource*,
+ UINT,
+ UINT,
+ UINT,
+ UINT,
+ ID3D11Resource*,
+ UINT,
+ const D3D11_BOX*));
+ IFACEMETHODIMP_(void)
+ CopyResource(ID3D11Resource* dest_resource,
+ ID3D11Resource* source_resource) override;
+ IFACEMETHODIMP_(void)
+ UpdateSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ const D3D11_BOX* dest_box,
+ const void* source_data,
+ UINT source_row_pitch,
+ UINT source_depth_pitch) override;
+ IFACEMETHODIMP_(void)
+ CopyStructureCount(ID3D11Buffer* dest_buffer,
+ UINT dest_aligned_byte_offset,
+ ID3D11UnorderedAccessView* source_view) override;
+ IFACEMETHODIMP_(void)
+ ClearRenderTargetView(ID3D11RenderTargetView* render_target_view,
+ const FLOAT color_rgba[4]) override;
+ IFACEMETHODIMP_(void)
+ ClearUnorderedAccessViewUint(ID3D11UnorderedAccessView* unordered_access_view,
+ const UINT values[4]) override;
+ IFACEMETHODIMP_(void)
+ ClearUnorderedAccessViewFloat(
+ ID3D11UnorderedAccessView* unordered_access_view,
+ const FLOAT values[4]) override;
+ IFACEMETHODIMP_(void)
+ ClearDepthStencilView(ID3D11DepthStencilView* depth_stencil_view,
+ UINT clear_flags,
+ FLOAT depth,
+ UINT8 stencil) override;
+ IFACEMETHODIMP_(void)
+ GenerateMips(ID3D11ShaderResourceView* shader_resource_view) override;
+ IFACEMETHODIMP_(void)
+ SetResourceMinLOD(ID3D11Resource* resource, FLOAT min_lod) override;
+ IFACEMETHODIMP_(FLOAT) GetResourceMinLOD(ID3D11Resource* resource) override;
+ IFACEMETHODIMP_(void)
+ ResolveSubresource(ID3D11Resource* dest_resource,
+ UINT dest_subresource,
+ ID3D11Resource* source_resource,
+ UINT source_subresource,
+ DXGI_FORMAT format) override;
+ IFACEMETHODIMP_(void)
+ ExecuteCommandList(ID3D11CommandList* command_list,
+ BOOL restore_context_state) override;
+ IFACEMETHODIMP_(void)
+ HSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ HSSetShader(ID3D11HullShader* hull_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ HSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ HSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ DSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ DSSetShader(ID3D11DomainShader* domain_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ DSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ DSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ CSSetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView* const* shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ CSSetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView* const* unordered_access_views,
+ const UINT* uav_initial_counts) override;
+ IFACEMETHODIMP_(void)
+ CSSetShader(ID3D11ComputeShader* computer_shader,
+ ID3D11ClassInstance* const* class_instances,
+ UINT num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ CSSetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState* const* samplers) override;
+ IFACEMETHODIMP_(void)
+ CSSetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer* const* constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ VSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ PSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ PSGetShader(ID3D11PixelShader** pixel_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ PSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ VSGetShader(ID3D11VertexShader** vertex_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ PSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ IAGetInputLayout(ID3D11InputLayout** input_layout) override;
+ IFACEMETHODIMP_(void)
+ IAGetVertexBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** vertex_buffers,
+ UINT* strides,
+ UINT* offsets) override;
+ IFACEMETHODIMP_(void)
+ IAGetIndexBuffer(ID3D11Buffer** index_buffer,
+ DXGI_FORMAT* format,
+ UINT* offset) override;
+ IFACEMETHODIMP_(void)
+ GSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ GSGetShader(ID3D11GeometryShader** geometry_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ IAGetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY* topology) override;
+ IFACEMETHODIMP_(void)
+ VSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ VSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ GetPredication(ID3D11Predicate** predicate, BOOL* predicate_value) override;
+ IFACEMETHODIMP_(void)
+ GSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ GSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ OMGetRenderTargets(UINT num_views,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view) override;
+ IFACEMETHODIMP_(void)
+ OMGetRenderTargetsAndUnorderedAccessViews(
+ UINT num_rtvs,
+ ID3D11RenderTargetView** render_target_views,
+ ID3D11DepthStencilView** depth_stencil_view,
+ UINT uav_start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) override;
+ IFACEMETHODIMP_(void)
+ OMGetBlendState(ID3D11BlendState** blend_state,
+ FLOAT blend_factor[4],
+ UINT* sample_mask) override;
+ IFACEMETHODIMP_(void)
+ OMGetDepthStencilState(ID3D11DepthStencilState** depth_stencil_state,
+ UINT* stencil_ref) override;
+ IFACEMETHODIMP_(void)
+ SOGetTargets(UINT num_buffers, ID3D11Buffer** so_targets) override;
+ IFACEMETHODIMP_(void)
+ RSGetState(ID3D11RasterizerState** rasterizer_state) override;
+ IFACEMETHODIMP_(void)
+ RSGetViewports(UINT* num_viewports, D3D11_VIEWPORT* viewports) override;
+ IFACEMETHODIMP_(void)
+ RSGetScissorRects(UINT* num_rects, D3D11_RECT* rects) override;
+ IFACEMETHODIMP_(void)
+ HSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ HSGetShader(ID3D11HullShader** hull_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ HSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ HSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ DSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ DSGetShader(ID3D11DomainShader** domain_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ DSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ DSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void)
+ CSGetShaderResources(
+ UINT start_slot,
+ UINT num_views,
+ ID3D11ShaderResourceView** shader_resource_views) override;
+ IFACEMETHODIMP_(void)
+ CSGetUnorderedAccessViews(
+ UINT start_slot,
+ UINT num_uavs,
+ ID3D11UnorderedAccessView** unordered_access_views) override;
+ IFACEMETHODIMP_(void)
+ CSGetShader(ID3D11ComputeShader** pcomputer_shader,
+ ID3D11ClassInstance** class_instances,
+ UINT* num_class_instances) override;
+ IFACEMETHODIMP_(void)
+ CSGetSamplers(UINT start_slot,
+ UINT num_samplers,
+ ID3D11SamplerState** samplers) override;
+ IFACEMETHODIMP_(void)
+ CSGetConstantBuffers(UINT start_slot,
+ UINT num_buffers,
+ ID3D11Buffer** constant_buffers) override;
+ IFACEMETHODIMP_(void) ClearState() override;
+ IFACEMETHODIMP_(void) Flush() override;
+ IFACEMETHODIMP_(D3D11_DEVICE_CONTEXT_TYPE) GetType() override;
+ IFACEMETHODIMP_(UINT) GetContextFlags() override;
+ IFACEMETHODIMP FinishCommandList(BOOL restore_deferred_context_state,
+ ID3D11CommandList** command_list) override;
+
+ // ID3D11DeviceChild
+ IFACEMETHODIMP_(void) GetDevice(ID3D11Device** device) override;
+ IFACEMETHODIMP GetPrivateData(REFGUID guid,
+ UINT* data_size,
+ void* data) override;
+ IFACEMETHODIMP SetPrivateData(REFGUID guid,
+ UINT data_size,
+ const void* data) override;
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID guid,
+ const IUnknown* data) override;
+
+ private:
+ ~MockD3D11DeviceContext() override;
+};
+
+class MockD3D11Device final : public MockInterface<ID3D11Device1> {
+ public:
+ MockD3D11Device();
+
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override {
+ if (riid == __uuidof(ID3D11Device)) {
+ this->AddRef();
+ *object = static_cast<ID3D11Device*>(this);
+ return S_OK;
+ }
+ return MockInterface::QueryInterface(riid, object);
+ }
+
+ // ID3D11Device
+ IFACEMETHODIMP CreateBuffer(const D3D11_BUFFER_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Buffer** ppBuffer);
+ IFACEMETHODIMP CreateTexture1D(const D3D11_TEXTURE1D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture1D** texture1D);
+ IFACEMETHODIMP CreateTexture2D(const D3D11_TEXTURE2D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture2D** texture2D);
+ MOCK_METHOD3(OnCreateTexture2D,
+ HRESULT(const D3D11_TEXTURE2D_DESC*,
+ const D3D11_SUBRESOURCE_DATA*,
+ ID3D11Texture2D**));
+ IFACEMETHODIMP CreateTexture3D(const D3D11_TEXTURE3D_DESC* desc,
+ const D3D11_SUBRESOURCE_DATA* initial_data,
+ ID3D11Texture3D** texture2D);
+ IFACEMETHODIMP CreateShaderResourceView(
+ ID3D11Resource* resource,
+ const D3D11_SHADER_RESOURCE_VIEW_DESC* desc,
+ ID3D11ShaderResourceView** srv);
+ IFACEMETHODIMP CreateUnorderedAccessView(
+ ID3D11Resource* resource,
+ const D3D11_UNORDERED_ACCESS_VIEW_DESC* desc,
+ ID3D11UnorderedAccessView** uaview);
+ IFACEMETHODIMP CreateRenderTargetView(
+ ID3D11Resource* resource,
+ const D3D11_RENDER_TARGET_VIEW_DESC* desc,
+ ID3D11RenderTargetView** rtv);
+ IFACEMETHODIMP CreateDepthStencilView(
+ ID3D11Resource* resource,
+ const D3D11_DEPTH_STENCIL_VIEW_DESC* desc,
+ ID3D11DepthStencilView** depth_stencil_view);
+ IFACEMETHODIMP CreateInputLayout(
+ const D3D11_INPUT_ELEMENT_DESC* input_element_descs,
+ UINT num_elements,
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11InputLayout** input_layout);
+ IFACEMETHODIMP CreateVertexShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11VertexShader** vertex_shader);
+ IFACEMETHODIMP CreateGeometryShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader);
+ IFACEMETHODIMP CreateGeometryShaderWithStreamOutput(
+ const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ const D3D11_SO_DECLARATION_ENTRY* so_declaration,
+ UINT num_entries,
+ const UINT* buffer_strides,
+ UINT num_strides,
+ UINT rasterized_stream,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11GeometryShader** geometry_shader);
+ IFACEMETHODIMP CreatePixelShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11PixelShader** pixel_shader);
+ IFACEMETHODIMP CreateHullShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11HullShader** hull_shader);
+ IFACEMETHODIMP CreateDomainShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11DomainShader** domain_shader);
+ IFACEMETHODIMP CreateComputeShader(const void* shader_bytecode,
+ SIZE_T bytecode_length,
+ ID3D11ClassLinkage* class_linkage,
+ ID3D11ComputeShader** compute_shader);
+ IFACEMETHODIMP CreateClassLinkage(ID3D11ClassLinkage** linkage);
+ IFACEMETHODIMP CreateBlendState(const D3D11_BLEND_DESC* blend_state_desc,
+ ID3D11BlendState** blend_state);
+ IFACEMETHODIMP CreateDepthStencilState(
+ const D3D11_DEPTH_STENCIL_DESC* depth_stencil_desc,
+ ID3D11DepthStencilState** depth_stencil_state);
+ IFACEMETHODIMP CreateRasterizerState(
+ const D3D11_RASTERIZER_DESC* rasterizer_desc,
+ ID3D11RasterizerState** rasterizer_state);
+ IFACEMETHODIMP CreateSamplerState(const D3D11_SAMPLER_DESC* sampler_desc,
+ ID3D11SamplerState** sampler_state);
+ IFACEMETHODIMP CreateQuery(const D3D11_QUERY_DESC* query_desc,
+ ID3D11Query** query);
+ IFACEMETHODIMP CreatePredicate(const D3D11_QUERY_DESC* predicate_desc,
+ ID3D11Predicate** predicate);
+ IFACEMETHODIMP CreateCounter(const D3D11_COUNTER_DESC* counter_desc,
+ ID3D11Counter** counter);
+ IFACEMETHODIMP CreateDeferredContext(UINT context_flags,
+ ID3D11DeviceContext** deferred_context);
+ IFACEMETHODIMP OpenSharedResource(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out);
+ IFACEMETHODIMP CheckFormatSupport(DXGI_FORMAT format, UINT* format_support);
+ IFACEMETHODIMP CheckMultisampleQualityLevels(DXGI_FORMAT format,
+ UINT sample_count,
+ UINT* num_quality_levels);
+ IFACEMETHODIMP_(void) CheckCounterInfo(D3D11_COUNTER_INFO* counter_info);
+ IFACEMETHODIMP CheckCounter(const D3D11_COUNTER_DESC* desc,
+ D3D11_COUNTER_TYPE* type,
+ UINT* active_counters,
+ LPSTR name,
+ UINT* name_length,
+ LPSTR units,
+ UINT* units_length,
+ LPSTR description,
+ UINT* description_length);
+ IFACEMETHODIMP CheckFeatureSupport(D3D11_FEATURE feature,
+ void* feature_support_data,
+ UINT feature_support_data_size);
+ IFACEMETHODIMP GetPrivateData(REFGUID guid, UINT* data_size, void* data);
+ IFACEMETHODIMP SetPrivateData(REFGUID guid, UINT data_size, const void* data);
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID guid, const IUnknown* data);
+ IFACEMETHODIMP_(D3D_FEATURE_LEVEL) GetFeatureLevel();
+ IFACEMETHODIMP_(UINT) GetCreationFlags();
+ IFACEMETHODIMP GetDeviceRemovedReason();
+ MOCK_METHOD0(OnGetDeviceRemovedReason, HRESULT());
+ IFACEMETHODIMP_(void)
+ GetImmediateContext(ID3D11DeviceContext** immediate_context);
+ IFACEMETHODIMP SetExceptionMode(UINT raise_flags);
+ IFACEMETHODIMP_(UINT) GetExceptionMode();
+
+ // ID3D11Device1
+ IFACEMETHODIMP_(void)
+ GetImmediateContext1(ID3D11DeviceContext1** immediate_context);
+ IFACEMETHODIMP CreateDeferredContext1(
+ UINT context_flags,
+ ID3D11DeviceContext1** deferred_context);
+ IFACEMETHODIMP CreateBlendState1(const D3D11_BLEND_DESC1* blend_state_desc,
+ ID3D11BlendState1** blend_state);
+ IFACEMETHODIMP CreateRasterizerState1(
+ const D3D11_RASTERIZER_DESC1* rasterizer_desc,
+ ID3D11RasterizerState1** rasterizer_state);
+ IFACEMETHODIMP CreateDeviceContextState(
+ UINT flags,
+ const D3D_FEATURE_LEVEL* feature_levels,
+ UINT feature_level_count,
+ UINT sdk_version,
+ REFIID emulated_interface,
+ D3D_FEATURE_LEVEL* chosen_feature_level,
+ ID3DDeviceContextState** context_state);
+ IFACEMETHODIMP OpenSharedResource1(HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out);
+ MOCK_METHOD3(DoOpenSharedResource1, HRESULT(HANDLE, REFIID, void**));
+ IFACEMETHODIMP OpenSharedResourceByName(LPCWSTR name,
+ DWORD desired_access,
+ REFIID returned_interface,
+ void** resource_out);
+
+ void SetupDefaultMocks();
+
+ Microsoft::WRL::ComPtr<MockD3D11DeviceContext> mock_immediate_context_;
+
+ private:
+ ~MockD3D11Device();
+};
+
+class MockDXGIResource final
+ : public MockInterface<IDXGIResource1, IDXGIKeyedMutex> {
+ public:
+ // IDXGIResource1
+ IFACEMETHODIMP CreateSubresourceSurface(UINT index, IDXGISurface2** surface);
+ IFACEMETHODIMP CreateSharedHandle(const SECURITY_ATTRIBUTES* attributes,
+ DWORD access,
+ LPCWSTR name,
+ HANDLE* handle);
+ // IDXGIResource
+ IFACEMETHODIMP GetSharedHandle(HANDLE* shared_handle);
+ IFACEMETHODIMP GetUsage(DXGI_USAGE* usage);
+ IFACEMETHODIMP SetEvictionPriority(UINT eviction_priority);
+ IFACEMETHODIMP GetEvictionPriority(UINT* eviction_priority);
+ // IDXGIDeviceSubObject
+ IFACEMETHODIMP GetDevice(REFIID riid, void** device);
+ // IDXGIObject
+ IFACEMETHODIMP SetPrivateData(REFGUID name, UINT data_size, const void* data);
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID name, const IUnknown* unknown);
+ IFACEMETHODIMP GetPrivateData(REFGUID name, UINT* data_size, void* data);
+ IFACEMETHODIMP GetParent(REFIID riid, void** parent);
+ // IDXGIKeyedMutex
+ IFACEMETHODIMP AcquireSync(UINT64 key, DWORD milliseconds) override;
+ IFACEMETHODIMP ReleaseSync(UINT64 key) override;
+
+ private:
+ ~MockDXGIResource() override;
+};
+
+class MockD3D11Texture2D final : public MockInterface<ID3D11Texture2D> {
+ public:
+ MockD3D11Texture2D(D3D11_TEXTURE2D_DESC desc, ID3D11Device* device);
+ MockD3D11Texture2D();
+ // IUnknown
+ IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override;
+ // ID3D11Texture2D
+ IFACEMETHODIMP_(void) GetDesc(D3D11_TEXTURE2D_DESC* desc);
+ // ID3D11Resource
+ IFACEMETHODIMP_(void) GetType(D3D11_RESOURCE_DIMENSION* resource_dimension);
+ IFACEMETHODIMP_(void) SetEvictionPriority(UINT eviction_priority);
+ IFACEMETHODIMP_(UINT) GetEvictionPriority();
+ // ID3D11DeviceChild
+ IFACEMETHODIMP_(void) GetDevice(ID3D11Device** device);
+ IFACEMETHODIMP GetPrivateData(REFGUID guid, UINT* data_size, void* data);
+ IFACEMETHODIMP SetPrivateData(REFGUID guid, UINT data_size, const void* data);
+ IFACEMETHODIMP SetPrivateDataInterface(REFGUID guid, const IUnknown* data);
+
+ Microsoft::WRL::ComPtr<MockDXGIResource> mock_resource_;
+
+ private:
+ ~MockD3D11Texture2D() override;
+ D3D11_TEXTURE2D_DESC desc_ = {};
+ Microsoft::WRL::ComPtr<ID3D11Device> device_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_WIN_D3D_CAPTURE_TEST_UTILS_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc
new file mode 100644
index 00000000000..c0c4b84694d
--- /dev/null
+++ b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.cc
@@ -0,0 +1,151 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/win/gpu_memory_buffer_tracker.h"
+
+#include "base/check.h"
+#include "base/notreached.h"
+#include "base/win/scoped_handle.h"
+#include "gpu/ipc/common/gpu_memory_buffer_impl_dxgi.h"
+#include "media/capture/video/video_capture_buffer_handle.h"
+#include "ui/gfx/geometry/size.h"
+
+#include <dxgi1_2.h>
+
+namespace media {
+
+namespace {
+
+base::win::ScopedHandle CreateNV12Texture(ID3D11Device* d3d11_device,
+ const gfx::Size& size) {
+ const DXGI_FORMAT dxgi_format = DXGI_FORMAT_NV12;
+ D3D11_TEXTURE2D_DESC desc = {
+ .Width = size.width(),
+ .Height = size.height(),
+ .MipLevels = 1,
+ .ArraySize = 1,
+ .Format = dxgi_format,
+ .SampleDesc = {1, 0},
+ .Usage = D3D11_USAGE_DEFAULT,
+ .BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET,
+ .CPUAccessFlags = 0,
+ .MiscFlags = D3D11_RESOURCE_MISC_SHARED_NTHANDLE |
+ D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX};
+
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> d3d11_texture;
+
+ HRESULT hr = d3d11_device->CreateTexture2D(&desc, nullptr, &d3d11_texture);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to create D3D11 texture: "
+ << logging::SystemErrorCodeToString(hr);
+ return base::win::ScopedHandle();
+ }
+
+ Microsoft::WRL::ComPtr<IDXGIResource1> dxgi_resource;
+ hr = d3d11_texture.As(&dxgi_resource);
+ CHECK(SUCCEEDED(hr));
+
+ HANDLE texture_handle;
+ hr = dxgi_resource->CreateSharedHandle(
+ nullptr, DXGI_SHARED_RESOURCE_READ | DXGI_SHARED_RESOURCE_WRITE, nullptr,
+ &texture_handle);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to create shared D3D11 texture handle: "
+ << logging::SystemErrorCodeToString(hr);
+ return base::win::ScopedHandle();
+ }
+ return base::win::ScopedHandle(texture_handle);
+}
+
+} // namespace
+
+GpuMemoryBufferTracker::GpuMemoryBufferTracker(
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager)
+ : dxgi_device_manager_(std::move(dxgi_device_manager)),
+ d3d_device_(dxgi_device_manager_->GetDevice()) {}
+
+GpuMemoryBufferTracker::~GpuMemoryBufferTracker() = default;
+
+bool GpuMemoryBufferTracker::Init(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) {
+ // Only support NV12
+ if (format != PIXEL_FORMAT_NV12) {
+ NOTREACHED() << "Unsupported VideoPixelFormat " << format;
+ return false;
+ }
+
+ buffer_size_ = dimensions;
+
+ return CreateBufferInternal();
+}
+
+bool GpuMemoryBufferTracker::CreateBufferInternal() {
+ gfx::GpuMemoryBufferHandle buffer_handle;
+ buffer_handle.dxgi_handle =
+ CreateNV12Texture(d3d_device_.Get(), buffer_size_);
+
+ buffer_ = gpu::GpuMemoryBufferImplDXGI::CreateFromHandle(
+ std::move(buffer_handle), buffer_size_,
+ gfx::BufferFormat::YUV_420_BIPLANAR, gfx::BufferUsage::GPU_READ,
+ gpu::GpuMemoryBufferImpl::DestructionCallback());
+ if (!buffer_) {
+ NOTREACHED() << "Failed to create GPU memory buffer";
+ return false;
+ }
+ return true;
+}
+
+bool GpuMemoryBufferTracker::EnsureD3DDevice() {
+ // Check for and handle device loss by recreating the texture
+ if (FAILED(d3d_device_->GetDeviceRemovedReason())) {
+ DVLOG(1) << "Detected device loss.";
+ dxgi_device_manager_->ResetDevice();
+ d3d_device_ = dxgi_device_manager_->GetDevice();
+ if (!d3d_device_) {
+ return false;
+ }
+
+ return CreateBufferInternal();
+ }
+ return true;
+}
+
+bool GpuMemoryBufferTracker::IsReusableForFormat(
+ const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) {
+ return (format == PIXEL_FORMAT_NV12) && (dimensions == buffer_->GetSize());
+}
+
+std::unique_ptr<VideoCaptureBufferHandle>
+GpuMemoryBufferTracker::GetMemoryMappedAccess() {
+ NOTREACHED() << "Unsupported operation";
+ return std::make_unique<NullHandle>();
+}
+
+base::UnsafeSharedMemoryRegion
+GpuMemoryBufferTracker::DuplicateAsUnsafeRegion() {
+ NOTREACHED() << "Unsupported operation";
+ return base::UnsafeSharedMemoryRegion();
+}
+
+mojo::ScopedSharedBufferHandle GpuMemoryBufferTracker::DuplicateAsMojoBuffer() {
+ NOTREACHED() << "Unsupported operation";
+ return mojo::ScopedSharedBufferHandle();
+}
+
+gfx::GpuMemoryBufferHandle GpuMemoryBufferTracker::GetGpuMemoryBufferHandle() {
+ if (!EnsureD3DDevice()) {
+ return gfx::GpuMemoryBufferHandle();
+ }
+ return buffer_->CloneHandle();
+}
+
+uint32_t GpuMemoryBufferTracker::GetMemorySizeInBytes() {
+ DCHECK(buffer_);
+ return (buffer_->GetSize().width() * buffer_->GetSize().height() * 3) / 2;
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h
new file mode 100644
index 00000000000..c960abc6596
--- /dev/null
+++ b/chromium/media/capture/video/win/gpu_memory_buffer_tracker.h
@@ -0,0 +1,54 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_WIN_GPU_MEMORY_BUFFER_TRACKER_H_
+#define MEDIA_CAPTURE_VIDEO_WIN_GPU_MEMORY_BUFFER_TRACKER_H_
+
+#include "media/base/win/dxgi_device_manager.h"
+#include "media/capture/video/video_capture_buffer_tracker.h"
+
+#include <d3d11.h>
+#include <wrl.h>
+
+namespace gfx {
+class Size;
+} // namespace gfx
+
+namespace media {
+
+// Tracker specifics for Windows GpuMemoryBuffer.
+class CAPTURE_EXPORT GpuMemoryBufferTracker final
+ : public VideoCaptureBufferTracker {
+ public:
+ explicit GpuMemoryBufferTracker(
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager);
+ ~GpuMemoryBufferTracker() override;
+
+ // Implementation of VideoCaptureBufferTracker:
+ bool Init(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) override;
+ bool IsReusableForFormat(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) override;
+ uint32_t GetMemorySizeInBytes() override;
+ std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() override;
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override;
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override;
+ gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override;
+
+ private:
+ std::unique_ptr<gfx::GpuMemoryBuffer> buffer_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
+ Microsoft::WRL::ComPtr<ID3D11Device> d3d_device_;
+ gfx::Size buffer_size_;
+ bool CreateBufferInternal();
+ bool EnsureD3DDevice();
+
+ DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferTracker);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_WIN_GPU_MEMORY_BUFFER_TRACKER_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc b/chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc
new file mode 100644
index 00000000000..267cfc5e696
--- /dev/null
+++ b/chromium/media/capture/video/win/gpu_memory_buffer_tracker_unittest.cc
@@ -0,0 +1,167 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <mfidl.h>
+
+#include <dxgi1_2.h>
+#include <mfapi.h>
+#include <mferror.h>
+#include <wrl.h>
+#include <wrl/client.h>
+
+#include "base/memory/scoped_refptr.h"
+#include "base/test/task_environment.h"
+#include "base/win/windows_version.h"
+#include "media/capture/video/win/d3d_capture_test_utils.h"
+#include "media/capture/video/win/gpu_memory_buffer_tracker.h"
+#include "media/capture/video/win/video_capture_device_factory_win.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::AllOf;
+using ::testing::Field;
+using ::testing::Invoke;
+using ::testing::Mock;
+using ::testing::Pointee;
+
+namespace media {
+
+namespace {
+
+class MockDXGIDeviceManager : public DXGIDeviceManager {
+ public:
+ MockDXGIDeviceManager()
+ : DXGIDeviceManager(nullptr, 0),
+ mock_d3d_device_(new MockD3D11Device()) {}
+
+ // Associates a new D3D device with the DXGI Device Manager
+ HRESULT ResetDevice() override { return S_OK; }
+
+ // Directly access D3D device stored in DXGI device manager
+ Microsoft::WRL::ComPtr<ID3D11Device> GetDevice() override {
+ Microsoft::WRL::ComPtr<ID3D11Device> device;
+ mock_d3d_device_.As(&device);
+ return device;
+ }
+
+ Microsoft::WRL::ComPtr<MockD3D11Device> GetMockDevice() {
+ return mock_d3d_device_;
+ }
+
+ protected:
+ ~MockDXGIDeviceManager() override {}
+ Microsoft::WRL::ComPtr<MockD3D11Device> mock_d3d_device_;
+};
+
+} // namespace
+
+class GpuMemoryBufferTrackerTest : public ::testing::Test {
+ protected:
+ GpuMemoryBufferTrackerTest()
+ : media_foundation_supported_(
+ VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation()) {}
+
+ bool ShouldSkipTest() {
+ if (!media_foundation_supported_) {
+ DVLOG(1) << "Media foundation is not supported by the current platform. "
+ "Skipping test.";
+ return true;
+ }
+ // D3D11 is only supported with Media Foundation on Windows 8 or later
+ if (base::win::GetVersion() < base::win::Version::WIN8) {
+ DVLOG(1) << "D3D11 with Media foundation is not supported by the current "
+ "platform. "
+ "Skipping test.";
+ return true;
+ }
+ return false;
+ }
+
+ void SetUp() override {
+ if (ShouldSkipTest()) {
+ GTEST_SKIP();
+ }
+
+ dxgi_device_manager_ =
+ scoped_refptr<MockDXGIDeviceManager>(new MockDXGIDeviceManager());
+ }
+
+ base::test::TaskEnvironment task_environment_;
+ const bool media_foundation_supported_;
+ scoped_refptr<MockDXGIDeviceManager> dxgi_device_manager_;
+};
+
+TEST_F(GpuMemoryBufferTrackerTest, TextureCreation) {
+ // Verify that GpuMemoryBufferTracker creates a D3D11 texture with the correct
+ // properties
+ const gfx::Size expected_buffer_size = {1920, 1080};
+ const DXGI_FORMAT expected_buffer_format = DXGI_FORMAT_NV12;
+ dxgi_device_manager_->GetMockDevice()->SetupDefaultMocks();
+ EXPECT_CALL(*(dxgi_device_manager_->GetMockDevice().Get()),
+ OnCreateTexture2D(
+ Pointee(AllOf(Field(&D3D11_TEXTURE2D_DESC::Format,
+ expected_buffer_format),
+ Field(&D3D11_TEXTURE2D_DESC::Width,
+ static_cast<const unsigned int>(
+ expected_buffer_size.width())),
+ Field(&D3D11_TEXTURE2D_DESC::Height,
+ static_cast<const unsigned int>(
+ expected_buffer_size.height())))),
+ _, _));
+ std::unique_ptr<VideoCaptureBufferTracker> tracker =
+ std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ EXPECT_EQ(tracker->Init(expected_buffer_size, PIXEL_FORMAT_NV12, nullptr),
+ true);
+}
+
+TEST_F(GpuMemoryBufferTrackerTest, TextureRecreationOnDeviceLoss) {
+ // Verify that GpuMemoryBufferTracker recreates a D3D11 texture with the
+ // correct properties when there is a device loss
+ const gfx::Size expected_buffer_size = {1920, 1080};
+ const DXGI_FORMAT expected_buffer_format = DXGI_FORMAT_NV12;
+ dxgi_device_manager_->GetMockDevice()->SetupDefaultMocks();
+ // Expect two texture creation calls (the second occurs on device loss
+ // recovery)
+ EXPECT_CALL(*(dxgi_device_manager_->GetMockDevice().Get()),
+ OnCreateTexture2D(
+ Pointee(AllOf(Field(&D3D11_TEXTURE2D_DESC::Format,
+ expected_buffer_format),
+ Field(&D3D11_TEXTURE2D_DESC::Width,
+ static_cast<const unsigned int>(
+ expected_buffer_size.width())),
+ Field(&D3D11_TEXTURE2D_DESC::Height,
+ static_cast<const unsigned int>(
+ expected_buffer_size.height())))),
+ _, _))
+ .Times(2);
+ // Mock device loss
+ EXPECT_CALL(*(dxgi_device_manager_->GetMockDevice().Get()),
+ OnGetDeviceRemovedReason())
+ .WillOnce(Invoke([]() { return DXGI_ERROR_DEVICE_REMOVED; }));
+ // Create and init tracker (causes initial texture creation)
+ std::unique_ptr<VideoCaptureBufferTracker> tracker =
+ std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ EXPECT_EQ(tracker->Init(expected_buffer_size, PIXEL_FORMAT_NV12, nullptr),
+ true);
+ // Get GpuMemoryBufferHandle (should trigger device/texture recreation)
+ gfx::GpuMemoryBufferHandle gmb = tracker->GetGpuMemoryBufferHandle();
+}
+
+TEST_F(GpuMemoryBufferTrackerTest, GetMemorySizeInBytes) {
+ // Verify that GpuMemoryBufferTracker returns an expected value from
+ // GetMemorySizeInBytes
+ const gfx::Size expected_buffer_size = {1920, 1080};
+ dxgi_device_manager_->GetMockDevice()->SetupDefaultMocks();
+ std::unique_ptr<VideoCaptureBufferTracker> tracker =
+ std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ EXPECT_EQ(tracker->Init(expected_buffer_size, PIXEL_FORMAT_NV12, nullptr),
+ true);
+
+ const uint32_t expectedSizeInBytes =
+ (expected_buffer_size.width() * expected_buffer_size.height() * 3) / 2;
+ EXPECT_EQ(tracker->GetMemorySizeInBytes(), expectedSizeInBytes);
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc
new file mode 100644
index 00000000000..25f2207c464
--- /dev/null
+++ b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.cc
@@ -0,0 +1,37 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/win/video_capture_buffer_tracker_factory_win.h"
+
+#include <memory>
+
+#include "media/capture/video/shared_memory_buffer_tracker.h"
+#include "media/capture/video/win/gpu_memory_buffer_tracker.h"
+
+namespace media {
+
+VideoCaptureBufferTrackerFactoryWin::VideoCaptureBufferTrackerFactoryWin()
+ : dxgi_device_manager_(DXGIDeviceManager::Create()) {}
+
+VideoCaptureBufferTrackerFactoryWin::~VideoCaptureBufferTrackerFactoryWin() {}
+
+std::unique_ptr<VideoCaptureBufferTracker>
+VideoCaptureBufferTrackerFactoryWin::CreateTracker(
+ VideoCaptureBufferType buffer_type) {
+ switch (buffer_type) {
+ case VideoCaptureBufferType::kGpuMemoryBuffer:
+ return std::make_unique<GpuMemoryBufferTracker>(dxgi_device_manager_);
+ default:
+ return std::make_unique<SharedMemoryBufferTracker>();
+ }
+}
+
+std::unique_ptr<VideoCaptureBufferTracker>
+VideoCaptureBufferTrackerFactoryWin::CreateTrackerForExternalGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) {
+ // Not supported
+ return nullptr;
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h
new file mode 100644
index 00000000000..d7d5958a261
--- /dev/null
+++ b/chromium/media/capture/video/win/video_capture_buffer_tracker_factory_win.h
@@ -0,0 +1,35 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_BUFFER_TRACKER_FACTORY_WIN_H_
+#define MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_BUFFER_TRACKER_FACTORY_WIN_H_
+
+#include <memory>
+
+#include "base/memory/weak_ptr.h"
+#include "media/base/win/dxgi_device_manager.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/video_capture_buffer_tracker_factory.h"
+
+namespace media {
+
+class CAPTURE_EXPORT VideoCaptureBufferTrackerFactoryWin
+ : public VideoCaptureBufferTrackerFactory {
+ public:
+ VideoCaptureBufferTrackerFactoryWin();
+ ~VideoCaptureBufferTrackerFactoryWin() override;
+ std::unique_ptr<VideoCaptureBufferTracker> CreateTracker(
+ VideoCaptureBufferType buffer_type) override;
+ std::unique_ptr<VideoCaptureBufferTracker>
+ CreateTrackerForExternalGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) override;
+
+ private:
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
+ base::WeakPtrFactory<VideoCaptureBufferTrackerFactoryWin> weak_factory_{this};
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_BUFFER_TRACKER_FACTORY_WIN_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index fe412a57c28..c82499b8ae1 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -133,18 +133,30 @@ const char* const kDisplayNamesBlockedForMediaFoundation[] = {
const std::vector<
std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>>&
GetMFAttributes() {
+ if (base::FeatureList::IsEnabled(
+ media::kIncludeIRCamerasInDeviceEnumeration)) {
+ static const base::NoDestructor<std::vector<
+ std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>>>
+ mf_attributes({{{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
+ {
+ {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
+ }},
+ {VideoCaptureApi::WIN_MEDIA_FOUNDATION_SENSOR,
+ {{MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
+ {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
+ KSCATEGORY_SENSOR_CAMERA}}}}});
+ return *mf_attributes;
+ }
+
static const base::NoDestructor<std::vector<
std::pair<VideoCaptureApi, std::vector<std::pair<GUID, GUID>>>>>
- mf_attributes({{{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
- {
- {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
- }},
- {VideoCaptureApi::WIN_MEDIA_FOUNDATION_SENSOR,
- {{MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
- MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
- {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_CATEGORY,
- KSCATEGORY_SENSOR_CAMERA}}}}});
+ mf_attributes({{VideoCaptureApi::WIN_MEDIA_FOUNDATION,
+ {
+ {MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE,
+ MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID},
+ }}});
return *mf_attributes;
}
@@ -312,6 +324,8 @@ bool VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation() {
VideoCaptureDeviceFactoryWin::VideoCaptureDeviceFactoryWin()
: use_media_foundation_(
base::FeatureList::IsEnabled(media::kMediaFoundationVideoCapture)),
+ use_d3d11_with_media_foundation_(base::FeatureList::IsEnabled(
+ media::kMediaFoundationD3D11VideoCapture)),
com_thread_("Windows Video Capture COM Thread") {
if (use_media_foundation_ && !PlatformSupportsMediaFoundation()) {
use_media_foundation_ = false;
@@ -705,7 +719,7 @@ DevicesInfo VideoCaptureDeviceFactoryWin::GetDevicesInfoMediaFoundation() {
DevicesInfo devices_info;
if (use_d3d11_with_media_foundation_ && !dxgi_device_manager_) {
- dxgi_device_manager_ = VideoCaptureDXGIDeviceManager::Create();
+ dxgi_device_manager_ = DXGIDeviceManager::Create();
}
// Recent non-RGB (depth, IR) cameras could be marked as sensor cameras in
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.h b/chromium/media/capture/video/win/video_capture_device_factory_win.h
index bd8b86ce161..43047d7b53f 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.h
@@ -17,9 +17,9 @@
#include "base/macros.h"
#include "base/threading/thread.h"
+#include "media/base/win/dxgi_device_manager.h"
#include "media/base/win/mf_initializer.h"
#include "media/capture/video/video_capture_device_factory.h"
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
namespace media {
@@ -77,8 +77,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
return use_d3d11_with_media_foundation_;
}
- scoped_refptr<VideoCaptureDXGIDeviceManager>
- dxgi_device_manager_for_testing() {
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_for_testing() {
return dxgi_device_manager_;
}
@@ -97,7 +96,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
std::vector<VideoCaptureDeviceInfo> GetDevicesInfoDirectShow();
bool use_media_foundation_;
- bool use_d3d11_with_media_foundation_ = false;
+ bool use_d3d11_with_media_foundation_;
MFSessionLifetime session_;
// For calling WinRT methods on a COM initiated thread.
@@ -105,7 +104,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
std::unordered_set<IAsyncOperation<DeviceInformationCollection*>*> async_ops_;
// For hardware acceleration in MediaFoundation capture engine
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
base::WeakPtrFactory<VideoCaptureDeviceFactoryWin> weak_ptr_factory_{this};
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryWin);
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
index 7cd97c083b3..274bab4fdac 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
@@ -18,8 +18,10 @@
#include "base/run_loop.h"
#include "base/strings/sys_string_conversions.h"
#include "base/test/bind.h"
+#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "base/win/windows_version.h"
+#include "media/base/media_switches.h"
#include "media/capture/video/win/video_capture_device_factory_win.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -1353,6 +1355,104 @@ TEST_P(VideoCaptureDeviceFactoryMFWinTest, GetDevicesInfo) {
}));
run_loop.Run();
+ EXPECT_EQ(devices_info.size(), 6U);
+ for (auto it = devices_info.begin(); it != devices_info.end(); it++) {
+ // Verify that there are no duplicates.
+ EXPECT_EQ(
+ FindDeviceInRange(devices_info.begin(), it, it->descriptor.device_id),
+ it);
+ }
+ iterator it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kMFDeviceId0));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_MEDIA_FOUNDATION);
+ EXPECT_EQ(it->descriptor.display_name(), base::SysWideToUTF8(kMFDeviceName0));
+ // No IAMCameraControl and no IAMVideoProcAmp interfaces.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kMFDeviceId1));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_MEDIA_FOUNDATION);
+ EXPECT_EQ(it->descriptor.display_name(), base::SysWideToUTF8(kMFDeviceName1));
+ // No pan/tilt/zoom in IAMCameraControl interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId3));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName3));
+ // No ICameraControl interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId4));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName4));
+ // No IVideoProcAmp interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ // Devices that are listed in MediaFoundation but only report supported
+ // formats in DirectShow are expected to get enumerated with
+ // VideoCaptureApi::WIN_DIRECT_SHOW
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId5));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName5));
+ // No pan, tilt, or zoom ranges in ICameraControl interface.
+ EXPECT_FALSE(it->descriptor.control_support().pan);
+ EXPECT_FALSE(it->descriptor.control_support().tilt);
+ EXPECT_FALSE(it->descriptor.control_support().zoom);
+
+ // Devices that are listed in both MediaFoundation and DirectShow but are
+ // blocked for use with MediaFoundation are expected to get enumerated with
+ // VideoCaptureApi::WIN_DIRECT_SHOW.
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kDirectShowDeviceId6));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.capture_api, VideoCaptureApi::WIN_DIRECT_SHOW);
+ EXPECT_EQ(it->descriptor.display_name(),
+ base::SysWideToUTF8(kDirectShowDeviceName6));
+ EXPECT_TRUE(it->descriptor.control_support().pan);
+ EXPECT_TRUE(it->descriptor.control_support().tilt);
+ EXPECT_TRUE(it->descriptor.control_support().zoom);
+}
+
+TEST_P(VideoCaptureDeviceFactoryMFWinTest, GetDevicesInfo_IncludeIRCameras) {
+ base::test::ScopedFeatureList feature_list;
+ feature_list.InitAndEnableFeature(kIncludeIRCamerasInDeviceEnumeration);
+
+ if (ShouldSkipMFTest())
+ return;
+
+ const bool use_d3d11 = GetParam();
+ if (use_d3d11 && ShouldSkipD3D11Test())
+ return;
+ factory_.set_use_d3d11_with_media_foundation_for_testing(use_d3d11);
+
+ std::vector<VideoCaptureDeviceInfo> devices_info;
+ base::RunLoop run_loop;
+ factory_.GetDevicesInfo(base::BindLambdaForTesting(
+ [&devices_info, &run_loop](std::vector<VideoCaptureDeviceInfo> result) {
+ devices_info = std::move(result);
+ run_loop.Quit();
+ }));
+ run_loop.Run();
+
EXPECT_EQ(devices_info.size(), 7U);
for (auto it = devices_info.begin(); it != devices_info.end(); it++) {
// Verify that there are no duplicates.
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.cc b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
index 18c9acd16ee..f56c71b4c10 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
@@ -4,6 +4,7 @@
#include "media/capture/video/win/video_capture_device_mf_win.h"
+#include <d3d11_4.h>
#include <mfapi.h>
#include <mferror.h>
#include <stddef.h>
@@ -464,6 +465,88 @@ mojom::RangePtr RetrieveControlRangeAndCurrent(
},
supported_modes, current_mode, value_converter, step_converter);
}
+
+HRESULT GetTextureFromMFBuffer(IMFMediaBuffer* mf_buffer,
+ ID3D11Texture2D** texture_out) {
+ Microsoft::WRL::ComPtr<IMFDXGIBuffer> dxgi_buffer;
+ HRESULT hr = mf_buffer->QueryInterface(IID_PPV_ARGS(&dxgi_buffer));
+ DLOG_IF_FAILED_WITH_HRESULT("Failed to retrieve IMFDXGIBuffer", hr);
+
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> d3d_texture;
+ if (SUCCEEDED(hr)) {
+ hr = dxgi_buffer->GetResource(IID_PPV_ARGS(&d3d_texture));
+ DLOG_IF_FAILED_WITH_HRESULT("Failed to retrieve ID3D11Texture2D", hr);
+ }
+
+ *texture_out = d3d_texture.Detach();
+ if (SUCCEEDED(hr)) {
+ CHECK(*texture_out);
+ }
+ return hr;
+}
+
+void GetTextureSizeAndFormat(ID3D11Texture2D* texture,
+ gfx::Size& size,
+ VideoPixelFormat& format) {
+ D3D11_TEXTURE2D_DESC desc;
+ texture->GetDesc(&desc);
+ size.set_width(desc.Width);
+ size.set_height(desc.Height);
+
+ switch (desc.Format) {
+ // Only support NV12
+ case DXGI_FORMAT_NV12:
+ format = PIXEL_FORMAT_NV12;
+ break;
+ default:
+ DLOG(ERROR) << "Unsupported camera DXGI texture format: " << desc.Format;
+ format = PIXEL_FORMAT_UNKNOWN;
+ break;
+ }
+}
+
+HRESULT CopyTextureToGpuMemoryBuffer(ID3D11Texture2D* texture,
+ gfx::GpuMemoryBufferHandle gmb_handle) {
+ Microsoft::WRL::ComPtr<ID3D11Device> texture_device;
+ texture->GetDevice(&texture_device);
+
+ Microsoft::WRL::ComPtr<ID3D11Device1> device1;
+ HRESULT hr = texture_device.As(&device1);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to get ID3D11Device1: "
+ << logging::SystemErrorCodeToString(hr);
+ return hr;
+ }
+
+ // Open shared resource from GpuMemoryBuffer on source texture D3D11 device
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> target_texture;
+ hr = device1->OpenSharedResource1(gmb_handle.dxgi_handle.Get(),
+ IID_PPV_ARGS(&target_texture));
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to open shared camera target texture: "
+ << logging::SystemErrorCodeToString(hr);
+ return hr;
+ }
+
+ Microsoft::WRL::ComPtr<ID3D11DeviceContext> device_context;
+ texture_device->GetImmediateContext(&device_context);
+
+ Microsoft::WRL::ComPtr<IDXGIKeyedMutex> keyed_mutex;
+ hr = target_texture.As(&keyed_mutex);
+ CHECK(SUCCEEDED(hr));
+
+ keyed_mutex->AcquireSync(0, INFINITE);
+ device_context->CopySubresourceRegion(target_texture.Get(), 0, 0, 0, 0,
+ texture, 0, nullptr);
+ keyed_mutex->ReleaseSync(0);
+
+ // Need to flush context to ensure that other devices receive updated contents
+ // of shared resource
+ device_context->Flush();
+
+ return S_OK;
+}
+
} // namespace
class MFVideoCallback final
@@ -534,16 +617,8 @@ class MFVideoCallback final
ComPtr<IMFMediaBuffer> buffer;
sample->GetBufferByIndex(i, &buffer);
if (buffer) {
- ScopedBufferLock locked_buffer(buffer);
- if (locked_buffer.data()) {
- observer_->OnIncomingCapturedData(locked_buffer.data(),
- locked_buffer.length(),
- reference_time, timestamp);
- } else {
- observer_->OnFrameDropped(
- VideoCaptureFrameDropReason::
- kWinMediaFoundationLockingBufferDelieveredNullptr);
- }
+ observer_->OnIncomingCapturedData(buffer.Get(), reference_time,
+ timestamp);
} else {
observer_->OnFrameDropped(
VideoCaptureFrameDropReason::
@@ -738,7 +813,7 @@ HRESULT VideoCaptureDeviceMFWin::FillCapabilities(
VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager)
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager)
: VideoCaptureDeviceMFWin(device_descriptor,
source,
std::move(dxgi_device_manager),
@@ -747,7 +822,7 @@ VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
VideoCaptureDeviceMFWin::VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager,
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager,
ComPtr<IMFCaptureEngine> engine)
: facing_mode_(device_descriptor.facing),
create_mf_photo_callback_(base::BindRepeating(&CreateMFPhotoCallback)),
@@ -1365,17 +1440,105 @@ void VideoCaptureDeviceMFWin::SetPhotoOptions(
std::move(callback).Run(true);
}
-
void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
- const uint8_t* data,
- int length,
+ IMFMediaBuffer* buffer,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
+ VideoCaptureFrameDropReason frame_drop_reason =
+ VideoCaptureFrameDropReason::kNone;
+ OnIncomingCapturedDataInternal(buffer, reference_time, timestamp,
+ frame_drop_reason);
+ if (frame_drop_reason != VideoCaptureFrameDropReason::kNone) {
+ OnFrameDropped(frame_drop_reason);
+ }
+}
+
+HRESULT VideoCaptureDeviceMFWin::DeliverTextureToClient(
+ ID3D11Texture2D* texture,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp) {
+ // Check for device loss
+ Microsoft::WRL::ComPtr<ID3D11Device> texture_device;
+ texture->GetDevice(&texture_device);
+
+ HRESULT hr = texture_device->GetDeviceRemovedReason();
+
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Camera texture device lost.";
+ DCHECK(dxgi_device_manager_->ResetDevice());
+ return hr;
+ }
+
+ gfx::Size texture_size;
+ VideoPixelFormat pixel_format;
+ GetTextureSizeAndFormat(texture, texture_size, pixel_format);
+
+ if (pixel_format != PIXEL_FORMAT_NV12) {
+ return MF_E_UNSUPPORTED_FORMAT;
+ }
+
+ VideoCaptureDevice::Client::Buffer capture_buffer;
+ constexpr int kDummyFrameFeedbackId = 0;
+ auto result = client_->ReserveOutputBuffer(
+ texture_size, pixel_format, kDummyFrameFeedbackId, &capture_buffer);
+ if (result != VideoCaptureDevice::Client::ReserveResult::kSucceeded) {
+ DLOG(ERROR) << "Failed to reserve output capture buffer: " << (int)result;
+ return MF_E_UNEXPECTED;
+ }
+
+ hr = CopyTextureToGpuMemoryBuffer(
+ texture, capture_buffer.handle_provider->GetGpuMemoryBufferHandle());
+
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "Failed to copy camera device texture to output texture: "
+ << logging::SystemErrorCodeToString(hr);
+ return hr;
+ }
+
+ VideoRotation frame_rotation = VIDEO_ROTATION_0;
+ DCHECK(camera_rotation_.has_value());
+ switch (camera_rotation_.value()) {
+ case 0:
+ frame_rotation = VIDEO_ROTATION_0;
+ break;
+ case 90:
+ frame_rotation = VIDEO_ROTATION_90;
+ break;
+ case 180:
+ frame_rotation = VIDEO_ROTATION_180;
+ break;
+ case 270:
+ frame_rotation = VIDEO_ROTATION_270;
+ break;
+ default:
+ break;
+ }
+
+ VideoFrameMetadata frame_metadata;
+ frame_metadata.transformation = VideoTransformation(frame_rotation);
+
+ client_->OnIncomingCapturedBufferExt(
+ std::move(capture_buffer),
+ VideoCaptureFormat(
+ texture_size, selected_video_capability_->supported_format.frame_rate,
+ pixel_format),
+ gfx::ColorSpace(), reference_time, timestamp, gfx::Rect(texture_size),
+ frame_metadata);
+
+ return hr;
+}
+
+void VideoCaptureDeviceMFWin::OnIncomingCapturedDataInternal(
+ IMFMediaBuffer* buffer,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ VideoCaptureFrameDropReason& frame_drop_reason) {
base::AutoLock lock(lock_);
- DCHECK(data);
SendOnStartedIfNotYetSent();
+ bool delivered_texture = false;
+
if (client_.get()) {
if (!has_sent_on_started_to_client_) {
has_sent_on_started_to_client_ = true;
@@ -1387,13 +1550,38 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
if (!camera_rotation_.has_value() || IsAutoRotationEnabled())
camera_rotation_ = GetCameraRotation(facing_mode_);
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> texture;
+ if (dxgi_device_manager_ &&
+ SUCCEEDED(GetTextureFromMFBuffer(buffer, &texture))) {
+ HRESULT hr =
+ DeliverTextureToClient(texture.Get(), reference_time, timestamp);
+ DLOG_IF_FAILED_WITH_HRESULT("Failed to deliver D3D11 texture to client.",
+ hr);
+ delivered_texture = SUCCEEDED(hr);
+ }
+ }
+
+ if (delivered_texture && video_stream_take_photo_callbacks_.empty()) {
+ return;
+ }
+
+ ScopedBufferLock locked_buffer(buffer);
+ if (!locked_buffer.data()) {
+ DLOG(ERROR) << "Locked buffer delivered nullptr";
+ frame_drop_reason = VideoCaptureFrameDropReason::
+ kWinMediaFoundationLockingBufferDelieveredNullptr;
+ return;
+ }
+
+ if (!delivered_texture && client_.get()) {
// TODO(julien.isorce): retrieve the color space information using Media
// Foundation api, MFGetAttributeSize/MF_MT_VIDEO_PRIMARIES,in order to
// build a gfx::ColorSpace. See http://crbug.com/959988.
client_->OnIncomingCapturedData(
- data, length, selected_video_capability_->supported_format,
- gfx::ColorSpace(), camera_rotation_.value(), false /* flip_y */,
- reference_time, timestamp);
+ locked_buffer.data(), locked_buffer.length(),
+ selected_video_capability_->supported_format, gfx::ColorSpace(),
+ camera_rotation_.value(), false /* flip_y */, reference_time,
+ timestamp);
}
while (!video_stream_take_photo_callbacks_.empty()) {
@@ -1401,8 +1589,9 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
std::move(video_stream_take_photo_callbacks_.front());
video_stream_take_photo_callbacks_.pop();
- mojom::BlobPtr blob = RotateAndBlobify(
- data, length, selected_video_capability_->supported_format, 0);
+ mojom::BlobPtr blob =
+ RotateAndBlobify(locked_buffer.data(), locked_buffer.length(),
+ selected_video_capability_->supported_format, 0);
if (!blob) {
LogWindowsImageCaptureOutcome(
VideoCaptureWinBackend::kMediaFoundation,
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.h b/chromium/media/capture/video/win/video_capture_device_mf_win.h
index 116b6d964b2..817c697e93d 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.h
@@ -22,11 +22,11 @@
#include "base/macros.h"
#include "base/optional.h"
#include "base/sequence_checker.h"
+#include "media/base/win/dxgi_device_manager.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/win/capability_list_win.h"
#include "media/capture/video/win/metrics.h"
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
interface IMFSourceReader;
@@ -49,11 +49,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
explicit VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
Microsoft::WRL::ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager);
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager);
explicit VideoCaptureDeviceMFWin(
const VideoCaptureDeviceDescriptor& device_descriptor,
Microsoft::WRL::ComPtr<IMFMediaSource> source,
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager,
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager,
Microsoft::WRL::ComPtr<IMFCaptureEngine> engine);
~VideoCaptureDeviceMFWin() override;
@@ -72,8 +72,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
SetPhotoOptionsCallback callback) override;
// Captured new video data.
- void OnIncomingCapturedData(const uint8_t* data,
- int length,
+ void OnIncomingCapturedData(IMFMediaBuffer* buffer,
base::TimeTicks reference_time,
base::TimeDelta timestamp);
void OnFrameDropped(VideoCaptureFrameDropReason reason);
@@ -101,7 +100,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
}
void set_dxgi_device_manager_for_testing(
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager) {
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager) {
dxgi_device_manager_ = std::move(dxgi_device_manager);
}
@@ -132,6 +131,14 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
const char* message);
void SendOnStartedIfNotYetSent();
HRESULT WaitOnCaptureEvent(GUID capture_event_guid);
+ HRESULT DeliverTextureToClient(ID3D11Texture2D* texture,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp);
+ void OnIncomingCapturedDataInternal(
+ IMFMediaBuffer* buffer,
+ base::TimeTicks reference_time,
+ base::TimeDelta timestamp,
+ VideoCaptureFrameDropReason& frame_drop_reason);
VideoFacingMode facing_mode_;
CreateMFPhotoCallbackCB create_mf_photo_callback_;
@@ -162,7 +169,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
base::queue<TakePhotoCallback> video_stream_take_photo_callbacks_;
base::WaitableEvent capture_initialize_;
base::WaitableEvent capture_error_;
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
base::Optional<int> camera_rotation_;
SEQUENCE_CHECKER(sequence_checker_);
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
index d38980b0924..4e6fef1f5ce 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -13,12 +13,13 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/test/task_environment.h"
+#include "base/win/scoped_handle.h"
#include "base/win/windows_version.h"
#include "media/base/win/mf_helpers.h"
+#include "media/capture/video/win/d3d_capture_test_utils.h"
#include "media/capture/video/win/sink_filter_win.h"
#include "media/capture/video/win/video_capture_device_factory_win.h"
#include "media/capture/video/win/video_capture_device_mf_win.h"
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -67,9 +68,8 @@ class MockClient : public VideoCaptureDevice::Client {
int frame_feedback_id = 0) override {}
void OnIncomingCapturedExternalBuffer(
- gfx::GpuMemoryBufferHandle handle,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
+ CapturedExternalVideoBuffer buffer,
+ std::vector<CapturedExternalVideoBuffer> scaled_buffers,
base::TimeTicks reference_time,
base::TimeDelta timestamp) override {}
@@ -81,14 +81,14 @@ class MockClient : public VideoCaptureDevice::Client {
base::TimeTicks reference_,
base::TimeDelta timestamp) override {}
- void OnIncomingCapturedBufferExt(
- Buffer buffer,
- const VideoCaptureFormat& format,
- const gfx::ColorSpace& color_space,
- base::TimeTicks reference_time,
- base::TimeDelta timestamp,
- gfx::Rect visible_rect,
- const VideoFrameMetadata& additional_metadata) override {}
+ MOCK_METHOD7(OnIncomingCapturedBufferExt,
+ void(Buffer,
+ const VideoCaptureFormat&,
+ const gfx::ColorSpace&,
+ base::TimeTicks,
+ base::TimeDelta,
+ gfx::Rect,
+ const VideoFrameMetadata&));
MOCK_METHOD3(OnError,
void(VideoCaptureError,
@@ -970,6 +970,40 @@ struct DepthDeviceParams {
// Depth device sometimes provides multiple video streams.
bool additional_i420_video_stream;
};
+
+class MockCaptureHandleProvider
+ : public VideoCaptureDevice::Client::Buffer::HandleProvider {
+ public:
+ // Duplicate as an writable (unsafe) shared memory region.
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override {
+ return base::UnsafeSharedMemoryRegion();
+ }
+
+ // Duplicate as a writable (unsafe) mojo buffer.
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override {
+ return mojo::ScopedSharedBufferHandle();
+ }
+
+ // Access a |VideoCaptureBufferHandle| for local, writable memory.
+ std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess()
+ override {
+ return nullptr;
+ }
+
+ // Clone a |GpuMemoryBufferHandle| for IPC.
+ gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override {
+ // Create a fake DXGI buffer handle
+ // (ensure that the fake is still a valid NT handle by using an event
+ // handle)
+ base::win::ScopedHandle fake_dxgi_handle(
+ CreateEvent(nullptr, FALSE, FALSE, nullptr));
+ gfx::GpuMemoryBufferHandle handle;
+ handle.type = gfx::GpuMemoryBufferType::DXGI_SHARED_HANDLE;
+ handle.dxgi_handle = std::move(fake_dxgi_handle);
+ return handle;
+ }
+};
+
} // namespace
const int kArbitraryValidVideoWidth = 1920;
@@ -1222,7 +1256,7 @@ class VideoCaptureDeviceMFWinTest : public ::testing::Test {
scoped_refptr<MockMFCaptureSource> capture_source_;
scoped_refptr<MockCapturePreviewSink> capture_preview_sink_;
base::test::TaskEnvironment task_environment_;
- scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager_;
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
private:
const bool media_foundation_supported_;
@@ -1731,7 +1765,7 @@ class VideoCaptureDeviceMFWinTestWithDXGI : public VideoCaptureDeviceMFWinTest {
if (ShouldSkipD3D11Test())
GTEST_SKIP();
- dxgi_device_manager_ = VideoCaptureDXGIDeviceManager::Create();
+ dxgi_device_manager_ = DXGIDeviceManager::Create();
VideoCaptureDeviceMFWinTest::SetUp();
}
};
@@ -1784,4 +1818,95 @@ TEST_F(VideoCaptureDeviceMFWinTestWithDXGI, EnsureNV12SinkSubtype) {
capture_preview_sink_->sample_callback->OnSample(nullptr);
}
+TEST_F(VideoCaptureDeviceMFWinTestWithDXGI, DeliverGMBCaptureBuffers) {
+ if (ShouldSkipTest())
+ return;
+
+ const GUID expected_subtype = MFVideoFormat_NV12;
+ PrepareMFDeviceWithOneVideoStream(expected_subtype);
+
+ const gfx::Size expected_size(640, 480);
+
+ // Verify that an output capture buffer is reserved from the client
+ EXPECT_CALL(*client_, ReserveOutputBuffer)
+ .WillOnce(Invoke(
+ [expected_size](const gfx::Size& size, VideoPixelFormat format,
+ int feedback_id,
+ VideoCaptureDevice::Client::Buffer* capture_buffer) {
+ EXPECT_EQ(size.width(), expected_size.width());
+ EXPECT_EQ(size.height(), expected_size.height());
+ EXPECT_EQ(format, PIXEL_FORMAT_NV12);
+ capture_buffer->handle_provider =
+ std::make_unique<MockCaptureHandleProvider>();
+ return VideoCaptureDevice::Client::ReserveResult::kSucceeded;
+ }));
+
+ Microsoft::WRL::ComPtr<MockD3D11Device> mock_device(new MockD3D11Device());
+
+ // Create mock source texture (to be provided to capture device from MF
+ // capture API)
+ D3D11_TEXTURE2D_DESC mock_desc = {};
+ mock_desc.Format = DXGI_FORMAT_NV12;
+ mock_desc.Width = expected_size.width();
+ mock_desc.Height = expected_size.height();
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> mock_source_texture_2d;
+ Microsoft::WRL::ComPtr<MockD3D11Texture2D> mock_source_texture(
+ new MockD3D11Texture2D(mock_desc, mock_device.Get()));
+ EXPECT_TRUE(SUCCEEDED(
+ mock_source_texture.CopyTo(IID_PPV_ARGS(&mock_source_texture_2d))));
+
+ // Create mock target texture with matching dimensions/format
+ // (to be provided from the capture device to the capture client)
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> mock_target_texture_2d;
+ Microsoft::WRL::ComPtr<MockD3D11Texture2D> mock_target_texture(
+ new MockD3D11Texture2D(mock_desc, mock_device.Get()));
+ EXPECT_TRUE(SUCCEEDED(
+ mock_target_texture.CopyTo(IID_PPV_ARGS(&mock_target_texture_2d))));
+ // Mock OpenSharedResource call on mock D3D device to return target texture
+ EXPECT_CALL(*mock_device.Get(), DoOpenSharedResource1)
+ .WillOnce(Invoke([&mock_target_texture_2d](HANDLE resource,
+ REFIID returned_interface,
+ void** resource_out) {
+ return mock_target_texture_2d.CopyTo(returned_interface, resource_out);
+ }));
+ // Expect call to copy source texture to target on immediate context
+ ID3D11Resource* expected_source =
+ static_cast<ID3D11Resource*>(mock_source_texture_2d.Get());
+ ID3D11Resource* expected_target =
+ static_cast<ID3D11Resource*>(mock_target_texture_2d.Get());
+ EXPECT_CALL(*mock_device->mock_immediate_context_.Get(),
+ OnCopySubresourceRegion(expected_target, _, _, _, _,
+ expected_source, _, _))
+ .Times(1);
+ // Expect the client to receive a buffer containing a GMB containing the
+ // expected fake DXGI handle
+ EXPECT_CALL(*client_, OnIncomingCapturedBufferExt)
+ .WillOnce(Invoke([](VideoCaptureDevice::Client::Buffer buffer,
+ const VideoCaptureFormat&, const gfx::ColorSpace&,
+ base::TimeTicks, base::TimeDelta, gfx::Rect,
+ const VideoFrameMetadata&) {
+ gfx::GpuMemoryBufferHandle gmb_handle =
+ buffer.handle_provider->GetGpuMemoryBufferHandle();
+ EXPECT_EQ(gmb_handle.type,
+ gfx::GpuMemoryBufferType::DXGI_SHARED_HANDLE);
+ }));
+
+ // Init capture
+ VideoCaptureFormat format(expected_size, 30, media::PIXEL_FORMAT_NV12);
+ VideoCaptureParams video_capture_params;
+ video_capture_params.requested_format = format;
+ device_->AllocateAndStart(video_capture_params, std::move(client_));
+
+ // Create MF sample and provide to sample callback on capture device
+ Microsoft::WRL::ComPtr<IMFSample> sample;
+ EXPECT_TRUE(SUCCEEDED(MFCreateSample(&sample)));
+ Microsoft::WRL::ComPtr<IMFMediaBuffer> dxgi_buffer;
+ EXPECT_TRUE(SUCCEEDED(MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D),
+ mock_source_texture_2d.Get(),
+ 0, FALSE, &dxgi_buffer)));
+ EXPECT_TRUE(SUCCEEDED(sample->AddBuffer(dxgi_buffer.Get())));
+
+ capture_preview_sink_->sample_callback->OnSample(sample.Get());
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc
deleted file mode 100644
index 94650aabece..00000000000
--- a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc
+++ /dev/null
@@ -1,92 +0,0 @@
-// Copyright (c) 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/win/video_capture_dxgi_device_manager.h"
-
-#include <d3d11.h>
-#include <mfcaptureengine.h>
-#include <mfreadwrite.h>
-#include "base/logging.h"
-
-using Microsoft::WRL::ComPtr;
-
-namespace media {
-
-scoped_refptr<VideoCaptureDXGIDeviceManager>
-VideoCaptureDXGIDeviceManager::Create() {
- ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager;
- UINT d3d_device_reset_token = 0;
- HRESULT hr = MFCreateDXGIDeviceManager(&d3d_device_reset_token,
- &mf_dxgi_device_manager);
- if (FAILED(hr)) {
- DLOG(ERROR) << "Failed to create MF DXGI device manager: "
- << logging::SystemErrorCodeToString(hr);
- return scoped_refptr<VideoCaptureDXGIDeviceManager>();
- }
- scoped_refptr<VideoCaptureDXGIDeviceManager>
- video_capture_dxgi_device_manager(new VideoCaptureDXGIDeviceManager(
- std::move(mf_dxgi_device_manager), d3d_device_reset_token));
- if (!video_capture_dxgi_device_manager->ResetDevice()) {
- // If setting a device failed, ensure that an empty scoped_refptr is
- // returned so that we fall back to software mode
- return scoped_refptr<VideoCaptureDXGIDeviceManager>();
- }
- return video_capture_dxgi_device_manager;
-}
-
-VideoCaptureDXGIDeviceManager::VideoCaptureDXGIDeviceManager(
- Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager,
- UINT d3d_device_reset_token)
- : mf_dxgi_device_manager_(std::move(mf_dxgi_device_manager)),
- d3d_device_reset_token_(d3d_device_reset_token) {}
-
-VideoCaptureDXGIDeviceManager::~VideoCaptureDXGIDeviceManager() {}
-
-bool VideoCaptureDXGIDeviceManager::ResetDevice() {
- Microsoft::WRL::ComPtr<ID3D11Device> d3d_device;
- constexpr uint32_t device_flags =
- (D3D11_CREATE_DEVICE_VIDEO_SUPPORT | D3D11_CREATE_DEVICE_BGRA_SUPPORT);
- HRESULT hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
- device_flags, nullptr, 0, D3D11_SDK_VERSION,
- &d3d_device, nullptr, nullptr);
- if (FAILED(hr)) {
- DLOG(ERROR) << "D3D11 device creation failed: "
- << logging::SystemErrorCodeToString(hr);
- return false;
- }
- hr = mf_dxgi_device_manager_->ResetDevice(d3d_device.Get(),
- d3d_device_reset_token_);
- if (FAILED(hr)) {
- DLOG(ERROR) << "Failed to reset device on MF DXGI device manager: "
- << logging::SystemErrorCodeToString(hr);
- return false;
- }
- return true;
-}
-
-void VideoCaptureDXGIDeviceManager::RegisterInCaptureEngineAttributes(
- IMFAttributes* attributes) {
- HRESULT result = attributes->SetUnknown(MF_CAPTURE_ENGINE_D3D_MANAGER,
- mf_dxgi_device_manager_.Get());
- DCHECK(SUCCEEDED(result));
-}
-
-void VideoCaptureDXGIDeviceManager::RegisterInSourceReaderAttributes(
- IMFAttributes* attributes) {
- HRESULT result = attributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER,
- mf_dxgi_device_manager_.Get());
- DCHECK(SUCCEEDED(result));
-}
-
-void VideoCaptureDXGIDeviceManager::RegisterWithMediaSource(
- ComPtr<IMFMediaSource> media_source) {
- ComPtr<IMFMediaSourceEx> source_ext;
- if (FAILED(media_source.As(&source_ext))) {
- DCHECK(false);
- return;
- }
- source_ext->SetD3DManager(mf_dxgi_device_manager_.Get());
-}
-
-} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h
deleted file mode 100644
index d4c1bde2d44..00000000000
--- a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (c) 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DXGI_DEVICE_MANAGER_H_
-#define MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DXGI_DEVICE_MANAGER_H_
-
-#include <mfapi.h>
-#include <mfidl.h>
-#include <wrl/client.h>
-#include "base/memory/ref_counted.h"
-#include "base/memory/scoped_refptr.h"
-#include "media/capture/capture_export.h"
-
-namespace media {
-
-class CAPTURE_EXPORT VideoCaptureDXGIDeviceManager
- : public base::RefCounted<VideoCaptureDXGIDeviceManager> {
- public:
- // Returns a VideoCaptureDXGIDeviceManager with associated D3D device set, or
- // nullptr on failure.
- static scoped_refptr<VideoCaptureDXGIDeviceManager> Create();
-
- // Associates a new D3D device with the DXGI Device Manager
- bool ResetDevice();
-
- // Registers this manager in capture engine attributes.
- void RegisterInCaptureEngineAttributes(IMFAttributes* attributes);
-
- // Registers this manager in source reader attributes.
- void RegisterInSourceReaderAttributes(IMFAttributes* attributes);
-
- // Registers this manager with a media source
- void RegisterWithMediaSource(
- Microsoft::WRL::ComPtr<IMFMediaSource> media_source);
-
- protected:
- friend class base::RefCounted<VideoCaptureDXGIDeviceManager>;
- VideoCaptureDXGIDeviceManager(
- Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager,
- UINT d3d_device_reset_token);
- virtual ~VideoCaptureDXGIDeviceManager();
-
- Microsoft::WRL::ComPtr<IMFDXGIDeviceManager> mf_dxgi_device_manager_;
- UINT d3d_device_reset_token_ = 0;
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_WIN_VIDEO_CAPTURE_DXGI_DEVICE_MANAGER_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video_capture_types.cc b/chromium/media/capture/video_capture_types.cc
index bbee389d99f..36db9294356 100644
--- a/chromium/media/capture/video_capture_types.cc
+++ b/chromium/media/capture/video_capture_types.cc
@@ -30,12 +30,12 @@ VideoCaptureFormat::VideoCaptureFormat(const gfx::Size& frame_size,
pixel_format(pixel_format) {}
bool VideoCaptureFormat::IsValid() const {
- return (frame_size.width() < media::limits::kMaxDimension) &&
- (frame_size.height() < media::limits::kMaxDimension) &&
+ return (frame_size.width() <= media::limits::kMaxDimension) &&
+ (frame_size.height() <= media::limits::kMaxDimension) &&
(frame_size.GetArea() >= 0) &&
- (frame_size.GetArea() < media::limits::kMaxCanvas) &&
+ (frame_size.GetArea() <= media::limits::kMaxCanvas) &&
(frame_rate >= 0.0f) &&
- (frame_rate < media::limits::kMaxFramesPerSecond) &&
+ (frame_rate <= media::limits::kMaxFramesPerSecond) &&
(pixel_format >= PIXEL_FORMAT_UNKNOWN &&
pixel_format <= PIXEL_FORMAT_MAX);
}
diff --git a/chromium/media/capture/video_capture_types.h b/chromium/media/capture/video_capture_types.h
index 23e1826aa12..f05b51dac9f 100644
--- a/chromium/media/capture/video_capture_types.h
+++ b/chromium/media/capture/video_capture_types.h
@@ -190,7 +190,9 @@ enum class VideoCaptureError {
kFuchsiaUnsupportedPixelFormat = 121,
kFuchsiaFailedToMapSysmemBuffer = 122,
kCrosHalV3DeviceContextDuplicatedClient = 123,
- kMaxValue = 123
+ kDesktopCaptureDeviceMacFailedStreamCreate = 124,
+ kDesktopCaptureDeviceMacFailedStreamStart = 125,
+ kMaxValue = 125
};
// WARNING: Do not change the values assigned to the entries. They are used for
diff --git a/chromium/media/capture/video_capturer_source.h b/chromium/media/capture/video_capturer_source.h
index c48032be48e..2039ba26996 100644
--- a/chromium/media/capture/video_capturer_source.h
+++ b/chromium/media/capture/video_capturer_source.h
@@ -42,9 +42,17 @@ class CAPTURE_EXPORT VideoCapturerSource {
// frame relative to the first frame generated by the corresponding source.
// Because a source can start generating frames before a subscriber is added,
// the first video frame delivered may not have timestamp equal to 0.
- using VideoCaptureDeliverFrameCB =
- base::RepeatingCallback<void(scoped_refptr<media::VideoFrame> video_frame,
- base::TimeTicks estimated_capture_time)>;
+ //
+ // |scaled_video_frames| are alternative representations of |video_frame|
+ // where scaling has already been applied. Scaled frames may be produced by
+ // some capturers that listen to media::VideoFrameFeedback. However, because
+ // not all capturers support in-capturer scaling or due to delays in the
+ // pipeline, listeners have to be prepared for scaled video frames not being
+ // present or being present with other sizes than most recently requested.
+ using VideoCaptureDeliverFrameCB = base::RepeatingCallback<void(
+ scoped_refptr<media::VideoFrame> video_frame,
+ std::vector<scoped_refptr<media::VideoFrame>> scaled_video_frames,
+ base::TimeTicks estimated_capture_time)>;
using VideoCaptureDeviceFormatsCB =
base::OnceCallback<void(const media::VideoCaptureFormats&)>;
diff --git a/chromium/media/cast/BUILD.gn b/chromium/media/cast/BUILD.gn
index e8c3619f86b..5051d4dedc1 100644
--- a/chromium/media/cast/BUILD.gn
+++ b/chromium/media/cast/BUILD.gn
@@ -364,14 +364,13 @@ test("cast_unittests") {
}
}
-if (is_win || is_mac || is_linux || is_lacros) {
+if (is_win || is_mac || is_linux || is_chromeos_lacros) {
# This is a target for the collection of cast development tools. They are
# not built/linked into the Chromium browser.
group("testing_tools") {
testonly = true
deps = [
":cast_benchmarks",
- ":cast_receiver_app",
":cast_sender_app",
":cast_simulator",
":generate_barcode_video",
@@ -398,36 +397,6 @@ if (is_win || is_mac || is_linux || is_lacros) {
]
}
- executable("cast_receiver_app") {
- testonly = true
- sources = [ "test/receiver.cc" ]
- public_deps = []
- deps = [
- ":common",
- ":net",
- ":receiver",
- ":test_support",
- "//base",
- "//build/win:default_exe_manifest",
- "//media:test_support",
- "//net",
- "//ui/base:features",
- "//ui/gfx/geometry",
- ]
-
- if (use_x11) {
- sources += [
- "test/linux_output_window.cc",
- "test/linux_output_window.h",
- ]
- public_deps += [ "//ui/gfx/x" ]
- deps += [
- "//cc/paint",
- "//ui/base/x",
- ]
- }
- }
-
executable("cast_sender_app") {
testonly = true
sources = [ "test/sender.cc" ]
@@ -496,13 +465,13 @@ if (is_win || is_mac || is_linux || is_lacros) {
"//net",
]
}
-} else { # !(is_win || is_mac || is_linux || is_lacros)
+} else { # !(is_win || is_mac || is_linux || is_chromeos_lacros)
# The testing tools are only built for the desktop platforms.
group("testing_tools") {
}
}
-if (is_linux || is_lacros) {
+if (is_linux || is_chromeos_lacros) {
test("tap_proxy") {
sources = [ "test/utility/tap_proxy.cc" ]
diff --git a/chromium/media/cast/OWNERS b/chromium/media/cast/OWNERS
index 90320c9bd1b..866a0313dc6 100644
--- a/chromium/media/cast/OWNERS
+++ b/chromium/media/cast/OWNERS
@@ -1,2 +1,2 @@
-miu@chromium.org
+jophba@chromium.org
mfoltz@chromium.org
diff --git a/chromium/media/cast/README.md b/chromium/media/cast/README.md
new file mode 100644
index 00000000000..5d28612985f
--- /dev/null
+++ b/chromium/media/cast/README.md
@@ -0,0 +1,28 @@
+# media/cast/
+
+This directory contains a collection of components related to streaming using
+the Cast Streaming Protocol (over UDP network sockets). They encode/decode raw
+audio or video frames, and send/receive encoded data reliably over a local area
+network.
+
+NOTE: This implementation is **deprecated**, and to be replaced soon by the one
+found in `../../third_party/openscreen/src/cast/streaming/`. Contact
+jophba@chromium.org for details.
+
+# Directory Breakdown
+
+* common/ - Collection of shared utility code and constants.
+
+* logging/ - Packet/Frame logging, for study/experimentation of the protocol at
+ runtime.
+
+* net/ - Wire-level packetization and pacing.
+
+* receiver/ - A minimal receiver implementation, used only for end-to-end
+ testing.
+
+* sender/ - Encoder front-ends and frame-level sender implementation for
+ audio/video.
+
+* test/ - A collection of end-to-end tests, experiments, benchmarks, and related
+ utility code.
diff --git a/chromium/media/cast/cast_config.h b/chromium/media/cast/cast_config.h
index f01e9a1d2b6..67d156c604a 100644
--- a/chromium/media/cast/cast_config.h
+++ b/chromium/media/cast/cast_config.h
@@ -246,15 +246,15 @@ struct FrameReceiverConfig {
};
// TODO(miu): Remove the CreateVEA callbacks. http://crbug.com/454029
-typedef base::Callback<void(scoped_refptr<base::SingleThreadTaskRunner>,
- std::unique_ptr<media::VideoEncodeAccelerator>)>
+typedef base::OnceCallback<void(scoped_refptr<base::SingleThreadTaskRunner>,
+ std::unique_ptr<media::VideoEncodeAccelerator>)>
ReceiveVideoEncodeAcceleratorCallback;
-typedef base::Callback<void(const ReceiveVideoEncodeAcceleratorCallback&)>
+typedef base::RepeatingCallback<void(ReceiveVideoEncodeAcceleratorCallback)>
CreateVideoEncodeAcceleratorCallback;
-typedef base::Callback<void(base::UnsafeSharedMemoryRegion)>
+typedef base::OnceCallback<void(base::UnsafeSharedMemoryRegion)>
ReceiveVideoEncodeMemoryCallback;
-typedef base::Callback<void(size_t size,
- const ReceiveVideoEncodeMemoryCallback&)>
+typedef base::RepeatingCallback<void(size_t size,
+ ReceiveVideoEncodeMemoryCallback)>
CreateVideoEncodeMemoryCallback;
} // namespace cast
diff --git a/chromium/media/cast/net/udp_packet_pipe_unittest.cc b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
index d02b4655ba7..69ca2d17814 100644
--- a/chromium/media/cast/net/udp_packet_pipe_unittest.cc
+++ b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
@@ -26,11 +26,13 @@ constexpr uint32_t kDefaultDataPipeCapacityBytes = 10;
class UdpPacketPipeTest : public ::testing::Test {
public:
UdpPacketPipeTest() {
- mojo::DataPipe data_pipe(kDefaultDataPipeCapacityBytes);
- writer_ = std::make_unique<UdpPacketPipeWriter>(
- std::move(data_pipe.producer_handle));
- reader_ = std::make_unique<UdpPacketPipeReader>(
- std::move(data_pipe.consumer_handle));
+ mojo::ScopedDataPipeProducerHandle producer_handle;
+ mojo::ScopedDataPipeConsumerHandle consumer_handle;
+ CHECK_EQ(mojo::CreateDataPipe(kDefaultDataPipeCapacityBytes,
+ producer_handle, consumer_handle),
+ MOJO_RESULT_OK);
+ writer_ = std::make_unique<UdpPacketPipeWriter>(std::move(producer_handle));
+ reader_ = std::make_unique<UdpPacketPipeReader>(std::move(consumer_handle));
}
~UdpPacketPipeTest() override = default;
diff --git a/chromium/media/cast/net/udp_transport_unittest.cc b/chromium/media/cast/net/udp_transport_unittest.cc
index aa655636bb2..0946281ac72 100644
--- a/chromium/media/cast/net/udp_transport_unittest.cc
+++ b/chromium/media/cast/net/udp_transport_unittest.cc
@@ -139,9 +139,12 @@ TEST_F(UdpTransportImplTest, UdpTransportSendAndReceive) {
recv_transport_->StartReceiving(
packet_receiver_on_receiver.packet_receiver());
- mojo::DataPipe data_pipe(5);
- send_transport_->StartSending(std::move(data_pipe.consumer_handle));
- UdpPacketPipeWriter writer(std::move(data_pipe.producer_handle));
+ mojo::ScopedDataPipeProducerHandle producer_handle;
+ mojo::ScopedDataPipeConsumerHandle consumer_handle;
+ ASSERT_EQ(mojo::CreateDataPipe(5, producer_handle, consumer_handle),
+ MOJO_RESULT_OK);
+ send_transport_->StartSending(std::move(consumer_handle));
+ UdpPacketPipeWriter writer(std::move(producer_handle));
base::MockCallback<base::OnceClosure> done_callback;
EXPECT_CALL(done_callback, Run()).Times(1);
writer.Write(new base::RefCountedData<Packet>(packet), done_callback.Get());
diff --git a/chromium/media/cast/sender/external_video_encoder.cc b/chromium/media/cast/sender/external_video_encoder.cc
index a85cbdf429c..f9688c6ea85 100644
--- a/chromium/media/cast/sender/external_video_encoder.cc
+++ b/chromium/media/cast/sender/external_video_encoder.cc
@@ -182,7 +182,7 @@ class ExternalVideoEncoder::VEAClientImpl final
create_video_encode_memory_cb_.Run(
media::VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420,
frame_coded_size_),
- base::Bind(&VEAClientImpl::OnCreateInputSharedMemory, this));
+ base::BindOnce(&VEAClientImpl::OnCreateInputSharedMemory, this));
}
AbortLatestEncodeAttemptDueToErrors();
return;
@@ -258,7 +258,7 @@ class ExternalVideoEncoder::VEAClientImpl final
for (size_t j = 0; j < kOutputBufferCount; ++j) {
create_video_encode_memory_cb_.Run(
output_buffer_size,
- base::Bind(&VEAClientImpl::OnCreateSharedMemory, this));
+ base::BindOnce(&VEAClientImpl::OnCreateSharedMemory, this));
}
}
@@ -326,7 +326,7 @@ class ExternalVideoEncoder::VEAClientImpl final
// If FRAME_DURATION metadata was provided in the source VideoFrame,
// compute the utilization metrics.
base::TimeDelta frame_duration =
- request.video_frame->metadata()->frame_duration.value_or(
+ request.video_frame->metadata().frame_duration.value_or(
base::TimeDelta());
if (frame_duration > base::TimeDelta()) {
// Compute encoder utilization in terms of the number of frames in
@@ -657,9 +657,9 @@ ExternalVideoEncoder::ExternalVideoEncoder(
DCHECK_GT(bit_rate_, 0);
create_vea_cb.Run(
- base::Bind(&ExternalVideoEncoder::OnCreateVideoEncodeAccelerator,
- weak_factory_.GetWeakPtr(), video_config, first_frame_id,
- std::move(status_change_cb)));
+ base::BindOnce(&ExternalVideoEncoder::OnCreateVideoEncodeAccelerator,
+ weak_factory_.GetWeakPtr(), video_config, first_frame_id,
+ std::move(status_change_cb)));
}
ExternalVideoEncoder::~ExternalVideoEncoder() {
diff --git a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc
index ed8a56f2d3e..b9c7ea280be 100644
--- a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc
+++ b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc
@@ -38,7 +38,7 @@ void FakeVideoEncodeAcceleratorFactory::SetAutoRespond(bool auto_respond) {
}
void FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator(
- const ReceiveVideoEncodeAcceleratorCallback& callback) {
+ ReceiveVideoEncodeAcceleratorCallback callback) {
DCHECK(!callback.is_null());
DCHECK(!next_response_vea_);
@@ -46,18 +46,19 @@ void FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator(
new FakeVideoEncodeAccelerator(task_runner_);
vea->SetWillInitializationSucceed(will_init_succeed_);
next_response_vea_.reset(vea);
- vea_response_callback_ = callback;
+ vea_response_callback_ = std::move(callback);
if (auto_respond_)
RespondWithVideoEncodeAccelerator();
}
void FakeVideoEncodeAcceleratorFactory::CreateSharedMemory(
- size_t size, const ReceiveVideoEncodeMemoryCallback& callback) {
+ size_t size,
+ ReceiveVideoEncodeMemoryCallback callback) {
DCHECK(!callback.is_null());
DCHECK(!next_response_shm_.IsValid());
next_response_shm_ = base::UnsafeSharedMemoryRegion::Create(size);
- shm_response_callback_ = callback;
+ shm_response_callback_ = std::move(callback);
if (auto_respond_)
RespondWithSharedMemory();
}
diff --git a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
index 238ec163beb..81984145719 100644
--- a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
+++ b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
@@ -45,13 +45,12 @@ class FakeVideoEncodeAcceleratorFactory {
// Creates a media::FakeVideoEncodeAccelerator. If in auto-respond mode,
// |callback| is run synchronously (i.e., before this method returns).
void CreateVideoEncodeAccelerator(
- const ReceiveVideoEncodeAcceleratorCallback& callback);
+ ReceiveVideoEncodeAcceleratorCallback callback);
// Creates shared memory of the requested |size|. If in auto-respond mode,
// |callback| is run synchronously (i.e., before this method returns).
- void CreateSharedMemory(
- size_t size,
- const ReceiveVideoEncodeMemoryCallback& callback);
+ void CreateSharedMemory(size_t size,
+ ReceiveVideoEncodeMemoryCallback callback);
// Runs the |callback| provided to the last call to
// CreateVideoEncodeAccelerator() with the new VideoEncodeAccelerator
diff --git a/chromium/media/cast/sender/performance_metrics_overlay.cc b/chromium/media/cast/sender/performance_metrics_overlay.cc
index 49328234f9c..0bc1e9770a3 100644
--- a/chromium/media/cast/sender/performance_metrics_overlay.cc
+++ b/chromium/media/cast/sender/performance_metrics_overlay.cc
@@ -265,7 +265,7 @@ scoped_refptr<VideoFrame> MaybeRenderPerformanceMetricsOverlay(
memcpy(dst, src, bytes_per_row);
}
}
- frame->metadata()->MergeMetadataFrom(source->metadata());
+ frame->metadata().MergeMetadataFrom(source->metadata());
// Important: After all consumers are done with the frame, copy-back the
// changed/new metadata to the source frame, as it contains feedback signals
// that need to propagate back up the video stack. The destruction callback
@@ -273,18 +273,18 @@ scoped_refptr<VideoFrame> MaybeRenderPerformanceMetricsOverlay(
// the source frame has the right metadata before its destruction observers
// are invoked.
frame->AddDestructionObserver(base::BindOnce(
- [](const VideoFrameMetadata* sent_frame_metadata,
+ [](const VideoFrameMetadata& sent_frame_metadata,
scoped_refptr<VideoFrame> source_frame) {
- source_frame->set_metadata(*sent_frame_metadata);
+ source_frame->set_metadata(sent_frame_metadata);
},
frame->metadata(), std::move(source)));
// Line 3: Frame duration, resolution, and timestamp.
int frame_duration_ms = 0;
int frame_duration_ms_frac = 0;
- if (frame->metadata()->frame_duration.has_value()) {
+ if (frame->metadata().frame_duration.has_value()) {
const int decimilliseconds = base::saturated_cast<int>(
- frame->metadata()->frame_duration->InMicroseconds() / 100.0 + 0.5);
+ frame->metadata().frame_duration->InMicroseconds() / 100.0 + 0.5);
frame_duration_ms = decimilliseconds / 10;
frame_duration_ms_frac = decimilliseconds % 10;
}
@@ -309,11 +309,11 @@ scoped_refptr<VideoFrame> MaybeRenderPerformanceMetricsOverlay(
// Line 2: Capture duration, target playout delay, low-latency mode, and
// target bitrate.
int capture_duration_ms = 0;
- if (frame->metadata()->capture_begin_time &&
- frame->metadata()->capture_end_time) {
+ if (frame->metadata().capture_begin_time &&
+ frame->metadata().capture_end_time) {
capture_duration_ms =
- base::saturated_cast<int>((*frame->metadata()->capture_end_time -
- *frame->metadata()->capture_begin_time)
+ base::saturated_cast<int>((*frame->metadata().capture_end_time -
+ *frame->metadata().capture_begin_time)
.InMillisecondsF() +
0.5);
}
diff --git a/chromium/media/cast/sender/video_encoder_unittest.cc b/chromium/media/cast/sender/video_encoder_unittest.cc
index 290da56b605..ad082761982 100644
--- a/chromium/media/cast/sender/video_encoder_unittest.cc
+++ b/chromium/media/cast/sender/video_encoder_unittest.cc
@@ -40,8 +40,7 @@ class VideoEncoderTest
protected:
VideoEncoderTest()
: task_runner_(new FakeSingleThreadTaskRunner(&testing_clock_)),
- thread_task_runner_override_reverter_(
- base::ThreadTaskRunnerHandle::OverrideForTesting(task_runner_)),
+ task_runner_handle_override_(task_runner_),
cast_environment_(new CastEnvironment(&testing_clock_,
task_runner_,
task_runner_,
@@ -74,11 +73,12 @@ class VideoEncoderTest
cast_environment_, video_config_,
base::BindRepeating(&VideoEncoderTest::OnOperationalStatusChange,
base::Unretained(this)),
- base::Bind(
+ base::BindRepeating(
&FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator,
base::Unretained(vea_factory_.get())),
- base::Bind(&FakeVideoEncodeAcceleratorFactory::CreateSharedMemory,
- base::Unretained(vea_factory_.get())));
+ base::BindRepeating(
+ &FakeVideoEncodeAcceleratorFactory::CreateSharedMemory,
+ base::Unretained(vea_factory_.get())));
RunTasksAndAdvanceClock();
if (is_encoder_present())
ASSERT_EQ(STATUS_INITIALIZED, operational_status_);
@@ -193,7 +193,7 @@ class VideoEncoderTest
base::SimpleTestTickClock testing_clock_;
const scoped_refptr<FakeSingleThreadTaskRunner> task_runner_;
- base::ScopedClosureRunner thread_task_runner_override_reverter_;
+ base::ThreadTaskRunnerHandleOverrideForTesting task_runner_handle_override_;
const scoped_refptr<CastEnvironment> cast_environment_;
FrameSenderConfig video_config_;
std::unique_ptr<FakeVideoEncodeAcceleratorFactory> vea_factory_;
diff --git a/chromium/media/cast/sender/video_sender.cc b/chromium/media/cast/sender/video_sender.cc
index a2960f8aaee..aa8bacf69cf 100644
--- a/chromium/media/cast/sender/video_sender.cc
+++ b/chromium/media/cast/sender/video_sender.cc
@@ -63,11 +63,10 @@ void LogVideoCaptureTimestamps(CastEnvironment* cast_environment,
capture_end_event->width = video_frame.visible_rect().width();
capture_end_event->height = video_frame.visible_rect().height();
- if (video_frame.metadata()->capture_begin_time.has_value() &&
- video_frame.metadata()->capture_end_time.has_value()) {
- capture_begin_event->timestamp =
- *video_frame.metadata()->capture_begin_time;
- capture_end_event->timestamp = *video_frame.metadata()->capture_end_time;
+ if (video_frame.metadata().capture_begin_time.has_value() &&
+ video_frame.metadata().capture_end_time.has_value()) {
+ capture_begin_event->timestamp = *video_frame.metadata().capture_begin_time;
+ capture_end_event->timestamp = *video_frame.metadata().capture_end_time;
} else {
// The frame capture timestamps were not provided by the video capture
// source. Simply log the events as happening right now.
@@ -150,7 +149,7 @@ void VideoSender::InsertRawVideoFrame(
"rtp_timestamp", rtp_timestamp.lower_32_bits());
{
- bool new_low_latency_mode = video_frame->metadata()->interactive_content;
+ bool new_low_latency_mode = video_frame->metadata().interactive_content;
if (new_low_latency_mode && !low_latency_mode_) {
VLOG(1) << "Interactive mode playout time " << min_playout_delay_;
playout_delay_change_cb_.Run(min_playout_delay_);
diff --git a/chromium/media/cast/sender/video_sender_unittest.cc b/chromium/media/cast/sender/video_sender_unittest.cc
index f4b4b829173..bcb8b58dfbf 100644
--- a/chromium/media/cast/sender/video_sender_unittest.cc
+++ b/chromium/media/cast/sender/video_sender_unittest.cc
@@ -193,11 +193,12 @@ class VideoSenderTest : public ::testing::Test {
video_sender_.reset(new PeerVideoSender(
cast_environment_, video_config,
base::BindRepeating(&SaveOperationalStatus, &operational_status_),
- base::Bind(
+ base::BindRepeating(
&FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator,
base::Unretained(&vea_factory_)),
- base::Bind(&FakeVideoEncodeAcceleratorFactory::CreateSharedMemory,
- base::Unretained(&vea_factory_)),
+ base::BindRepeating(
+ &FakeVideoEncodeAcceleratorFactory::CreateSharedMemory,
+ base::Unretained(&vea_factory_)),
transport_sender_.get()));
} else {
video_sender_.reset(new PeerVideoSender(
diff --git a/chromium/media/cast/sender/vp8_encoder.cc b/chromium/media/cast/sender/vp8_encoder.cc
index 70fab743a8d..93348f787e3 100644
--- a/chromium/media/cast/sender/vp8_encoder.cc
+++ b/chromium/media/cast/sender/vp8_encoder.cc
@@ -232,7 +232,7 @@ void Vp8Encoder::Encode(scoped_refptr<media::VideoFrame> video_frame,
base::TimeDelta::FromSecondsD(static_cast<double>(kRestartFramePeriods) /
cast_config_.max_frame_rate);
base::TimeDelta predicted_frame_duration =
- video_frame->metadata()->frame_duration.value_or(base::TimeDelta());
+ video_frame->metadata().frame_duration.value_or(base::TimeDelta());
if (predicted_frame_duration <= base::TimeDelta()) {
// The source of the video frame did not provide the frame duration. Use
// the actual amount of time between the current and previous frame as a
diff --git a/chromium/media/cdm/aes_decryptor.cc b/chromium/media/cdm/aes_decryptor.cc
index d18d6ebeccd..7a1ffda30be 100644
--- a/chromium/media/cdm/aes_decryptor.cc
+++ b/chromium/media/cdm/aes_decryptor.cc
@@ -511,12 +511,12 @@ void AesDecryptor::InitializeVideoDecoder(const VideoDecoderConfig& config,
}
void AesDecryptor::DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) {
+ AudioDecodeCB audio_decode_cb) {
NOTREACHED() << "AesDecryptor does not support audio decoding";
}
void AesDecryptor::DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) {
+ VideoDecodeCB video_decode_cb) {
NOTREACHED() << "AesDecryptor does not support video decoding";
}
diff --git a/chromium/media/cdm/aes_decryptor.h b/chromium/media/cdm/aes_decryptor.h
index 228a45593a5..962c27a1aed 100644
--- a/chromium/media/cdm/aes_decryptor.h
+++ b/chromium/media/cdm/aes_decryptor.h
@@ -78,9 +78,9 @@ class MEDIA_EXPORT AesDecryptor : public ContentDecryptionModule,
void InitializeVideoDecoder(const VideoDecoderConfig& config,
DecoderInitCB init_cb) override;
void DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) override;
+ AudioDecodeCB audio_decode_cb) override;
void DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) override;
+ VideoDecodeCB video_decode_cb) override;
void ResetDecoder(StreamType stream_type) override;
void DeinitializeDecoder(StreamType stream_type) override;
bool CanAlwaysDecrypt() override;
diff --git a/chromium/media/cdm/aes_decryptor_unittest.cc b/chromium/media/cdm/aes_decryptor_unittest.cc
index f2b32bb9dec..49a298e4cd8 100644
--- a/chromium/media/cdm/aes_decryptor_unittest.cc
+++ b/chromium/media/cdm/aes_decryptor_unittest.cc
@@ -252,14 +252,15 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
void SetUp() override {
if (GetParam() == TestType::kAesDecryptor) {
OnCdmCreated(
- new AesDecryptor(base::Bind(&MockCdmClient::OnSessionMessage,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionClosed,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionKeysChange,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionExpirationUpdate,
- base::Unretained(&cdm_client_))),
+ new AesDecryptor(
+ base::BindRepeating(&MockCdmClient::OnSessionMessage,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionClosed,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionKeysChange,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionExpirationUpdate,
+ base::Unretained(&cdm_client_))),
std::string());
} else if (GetParam() == TestType::kCdmAdapter) {
#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
@@ -283,18 +284,19 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
std::unique_ptr<CdmAllocator> allocator(new SimpleCdmAllocator());
std::unique_ptr<CdmAuxiliaryHelper> cdm_helper(
new MockCdmAuxiliaryHelper(std::move(allocator)));
- CdmAdapter::Create(helper_->KeySystemName(),
- cdm_config, create_cdm_func, std::move(cdm_helper),
- base::Bind(&MockCdmClient::OnSessionMessage,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionClosed,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionKeysChange,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionExpirationUpdate,
- base::Unretained(&cdm_client_)),
- base::BindOnce(&AesDecryptorTest::OnCdmCreated,
- base::Unretained(this)));
+ CdmAdapter::Create(
+ helper_->KeySystemName(), cdm_config, create_cdm_func,
+ std::move(cdm_helper),
+ base::BindRepeating(&MockCdmClient::OnSessionMessage,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionClosed,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionKeysChange,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionExpirationUpdate,
+ base::Unretained(&cdm_client_)),
+ base::BindOnce(&AesDecryptorTest::OnCdmCreated,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
#else
diff --git a/chromium/media/cdm/cdm_adapter.cc b/chromium/media/cdm/cdm_adapter.cc
index af8394152d7..145c8e69bcc 100644
--- a/chromium/media/cdm/cdm_adapter.cc
+++ b/chromium/media/cdm/cdm_adapter.cc
@@ -226,14 +226,14 @@ CdmAdapter::CdmAdapter(
DCHECK(session_expiration_update_cb_);
helper_->SetFileReadCB(
- base::Bind(&CdmAdapter::OnFileRead, weak_factory_.GetWeakPtr()));
+ base::BindRepeating(&CdmAdapter::OnFileRead, weak_factory_.GetWeakPtr()));
}
CdmAdapter::~CdmAdapter() {
DVLOG(1) << __func__;
// Reject any outstanding promises and close all the existing sessions.
- cdm_promise_adapter_.Clear();
+ cdm_promise_adapter_.Clear(CdmPromiseAdapter::ClearReason::kDestruction);
if (audio_init_cb_)
std::move(audio_init_cb_).Run(false);
@@ -540,7 +540,7 @@ void CdmAdapter::InitializeVideoDecoder(const VideoDecoderConfig& config,
}
void CdmAdapter::DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) {
+ AudioDecodeCB audio_decode_cb) {
DVLOG(3) << __func__ << ": "
<< encrypted->AsHumanReadableString(/*verbose=*/true);
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -562,7 +562,8 @@ void CdmAdapter::DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
const Decryptor::AudioFrames empty_frames;
if (status != cdm::kSuccess) {
DVLOG(1) << __func__ << ": status = " << status;
- audio_decode_cb.Run(ToMediaDecryptorStatus(status), empty_frames);
+ std::move(audio_decode_cb)
+ .Run(ToMediaDecryptorStatus(status), empty_frames);
return;
}
@@ -571,15 +572,15 @@ void CdmAdapter::DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
if (!AudioFramesDataToAudioFrames(std::move(audio_frames),
&audio_frame_list)) {
DVLOG(1) << __func__ << " unable to convert Audio Frames";
- audio_decode_cb.Run(Decryptor::kError, empty_frames);
+ std::move(audio_decode_cb).Run(Decryptor::kError, empty_frames);
return;
}
- audio_decode_cb.Run(Decryptor::kSuccess, audio_frame_list);
+ std::move(audio_decode_cb).Run(Decryptor::kSuccess, audio_frame_list);
}
void CdmAdapter::DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) {
+ VideoDecodeCB video_decode_cb) {
DVLOG(3) << __func__ << ": "
<< encrypted->AsHumanReadableString(/*verbose=*/true);
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -604,7 +605,7 @@ void CdmAdapter::DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
if (status != cdm::kSuccess) {
DVLOG(1) << __func__ << ": status = " << status;
- video_decode_cb.Run(ToMediaDecryptorStatus(status), nullptr);
+ std::move(video_decode_cb).Run(ToMediaDecryptorStatus(status), nullptr);
return;
}
@@ -613,13 +614,13 @@ void CdmAdapter::DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
GetNaturalSize(visible_rect, pixel_aspect_ratio_));
if (!decoded_frame) {
DLOG(ERROR) << __func__ << ": TransformToVideoFrame failed.";
- video_decode_cb.Run(Decryptor::kError, nullptr);
+ std::move(video_decode_cb).Run(Decryptor::kError, nullptr);
return;
}
- decoded_frame->metadata()->protected_video = is_video_encrypted_;
+ decoded_frame->metadata().protected_video = is_video_encrypted_;
- video_decode_cb.Run(Decryptor::kSuccess, decoded_frame);
+ std::move(video_decode_cb).Run(Decryptor::kSuccess, decoded_frame);
}
void CdmAdapter::ResetDecoder(StreamType stream_type) {
diff --git a/chromium/media/cdm/cdm_adapter.h b/chromium/media/cdm/cdm_adapter.h
index 5a8b0e93d77..812f8ff07ab 100644
--- a/chromium/media/cdm/cdm_adapter.h
+++ b/chromium/media/cdm/cdm_adapter.h
@@ -106,9 +106,9 @@ class MEDIA_EXPORT CdmAdapter final : public ContentDecryptionModule,
void InitializeVideoDecoder(const VideoDecoderConfig& config,
DecoderInitCB init_cb) final;
void DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) final;
+ AudioDecodeCB audio_decode_cb) final;
void DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) final;
+ VideoDecodeCB video_decode_cb) final;
void ResetDecoder(StreamType stream_type) final;
void DeinitializeDecoder(StreamType stream_type) final;
diff --git a/chromium/media/cdm/cdm_adapter_unittest.cc b/chromium/media/cdm/cdm_adapter_unittest.cc
index 3b8cc935370..6e27a40a24b 100644
--- a/chromium/media/cdm/cdm_adapter_unittest.cc
+++ b/chromium/media/cdm/cdm_adapter_unittest.cc
@@ -140,18 +140,19 @@ class CdmAdapterTestBase : public testing::Test,
std::unique_ptr<StrictMock<MockCdmAuxiliaryHelper>> cdm_helper(
new StrictMock<MockCdmAuxiliaryHelper>(std::move(allocator)));
cdm_helper_ = cdm_helper.get();
- CdmAdapter::Create(GetKeySystemName(), cdm_config, GetCreateCdmFunc(),
- std::move(cdm_helper),
- base::Bind(&MockCdmClient::OnSessionMessage,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionClosed,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionKeysChange,
- base::Unretained(&cdm_client_)),
- base::Bind(&MockCdmClient::OnSessionExpirationUpdate,
- base::Unretained(&cdm_client_)),
- base::BindOnce(&CdmAdapterTestBase::OnCdmCreated,
- base::Unretained(this), expected_result));
+ CdmAdapter::Create(
+ GetKeySystemName(), cdm_config, GetCreateCdmFunc(),
+ std::move(cdm_helper),
+ base::BindRepeating(&MockCdmClient::OnSessionMessage,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionClosed,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionKeysChange,
+ base::Unretained(&cdm_client_)),
+ base::BindRepeating(&MockCdmClient::OnSessionExpirationUpdate,
+ base::Unretained(&cdm_client_)),
+ base::BindOnce(&CdmAdapterTestBase::OnCdmCreated,
+ base::Unretained(this), expected_result));
RunUntilIdle();
ASSERT_EQ(expected_result == SUCCESS, !!cdm_);
}
diff --git a/chromium/media/cdm/cdm_paths_unittest.cc b/chromium/media/cdm/cdm_paths_unittest.cc
index 2e0fe96ea54..1cc84ce8ba5 100644
--- a/chromium/media/cdm/cdm_paths_unittest.cc
+++ b/chromium/media/cdm/cdm_paths_unittest.cc
@@ -18,7 +18,7 @@
// TODO(crbug.com/971433). Move the CDMs out of the install directory on
// ChromeOS.
#if (defined(OS_MAC) || defined(OS_WIN) || defined(OS_LINUX) || \
- BUILDFLAG(IS_LACROS))
+ BUILDFLAG(IS_CHROMEOS_LACROS))
#define CDM_USE_PLATFORM_SPECIFIC_PATH
#endif
@@ -37,9 +37,9 @@ const char kComponentPlatform[] =
"mac";
#elif defined(OS_WIN)
"win";
-#elif BUILDFLAG(IS_ASH)
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
"cros";
-#elif defined(OS_LINUX)
+#elif defined(OS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
"linux";
#else
"unsupported_platform";
diff --git a/chromium/media/cdm/library_cdm/cdm_paths.gni b/chromium/media/cdm/library_cdm/cdm_paths.gni
index 1842847e7e7..21fd5f6c0f1 100644
--- a/chromium/media/cdm/library_cdm/cdm_paths.gni
+++ b/chromium/media/cdm/library_cdm/cdm_paths.gni
@@ -11,9 +11,9 @@ import("//build/config/chromeos/ui_mode.gni")
# OS name for components is close to "target_os" but has some differences.
# Explicitly define what we use to avoid confusion.
-if (is_ash) {
+if (is_chromeos_ash) {
component_os = "cros"
-} else if (is_linux) {
+} else if (is_linux || is_chromeos_lacros) {
component_os = "linux"
} else if (is_win) {
component_os = "win"
@@ -31,7 +31,7 @@ component_arch = "$current_cpu"
# - |cdm_platform_specific_path| is exported as a BUILDFLAG to cdm_paths.cc.
# - When updating the condition here, also update the condition on the define
# of CDM_USE_PLATFORM_SPECIFIC_PATH in cdm_paths_unittest.cc.
-if (is_win || is_mac || is_linux) {
+if (is_win || is_mac || is_linux || is_chromeos_lacros) {
cdm_platform_specific_path =
"_platform_specific/$component_os" + "_" + "$component_arch"
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn b/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
index 25b1dfee1ef..90e72773781 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/BUILD.gn
@@ -35,7 +35,6 @@ loadable_module("clear_key_cdm") {
"//media:shared_memory_support", # For media::AudioBus.
"//media/cdm:cdm_api", # For content_decryption_module.h
"//media/cdm/library_cdm:cdm_host_proxy",
- "//third_party/libaom:libaom_buildflags",
"//third_party/libyuv",
"//url",
]
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
index c1fc8867088..ddd43745e84 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
@@ -27,7 +27,6 @@
#include "media/cdm/cdm_type_conversion.h"
#include "media/cdm/library_cdm/cdm_host_proxy.h"
#include "media/media_buildflags.h"
-#include "third_party/libaom/libaom_buildflags.h"
#include "third_party/libyuv/include/libyuv/planar_functions.h"
#if BUILDFLAG(ENABLE_LIBVPX)
@@ -257,7 +256,7 @@ class VideoDecoderAdapter final : public CdmVideoDecoder {
void OnVideoFrameReady(scoped_refptr<VideoFrame> video_frame) {
// Do not queue EOS frames, which is not needed.
- if (video_frame->metadata()->end_of_stream)
+ if (video_frame->metadata().end_of_stream)
return;
decoded_video_frames_.push(std::move(video_frame));
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
index 5ad5056e830..1211f1bcd61 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_cdm.cc
@@ -327,11 +327,14 @@ ClearKeyCdm::ClearKeyCdm(HostInterface* host, const std::string& key_system)
cdm_host_proxy_(new CdmHostProxyImpl<HostInterface>(host)),
cdm_(new ClearKeyPersistentSessionCdm(
cdm_host_proxy_.get(),
- base::Bind(&ClearKeyCdm::OnSessionMessage, base::Unretained(this)),
- base::Bind(&ClearKeyCdm::OnSessionClosed, base::Unretained(this)),
- base::Bind(&ClearKeyCdm::OnSessionKeysChange, base::Unretained(this)),
- base::Bind(&ClearKeyCdm::OnSessionExpirationUpdate,
- base::Unretained(this)))),
+ base::BindRepeating(&ClearKeyCdm::OnSessionMessage,
+ base::Unretained(this)),
+ base::BindRepeating(&ClearKeyCdm::OnSessionClosed,
+ base::Unretained(this)),
+ base::BindRepeating(&ClearKeyCdm::OnSessionKeysChange,
+ base::Unretained(this)),
+ base::BindRepeating(&ClearKeyCdm::OnSessionExpirationUpdate,
+ base::Unretained(this)))),
key_system_(key_system) {
DCHECK(g_is_cdm_module_initialized);
}
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
index 6e2dcba4167..c3428123dd4 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/clear_key_persistent_session_cdm.cc
@@ -99,10 +99,10 @@ ClearKeyPersistentSessionCdm::ClearKeyPersistentSessionCdm(
session_message_cb_(session_message_cb),
session_closed_cb_(session_closed_cb) {
cdm_ = base::MakeRefCounted<AesDecryptor>(
- base::Bind(&ClearKeyPersistentSessionCdm::OnSessionMessage,
- weak_factory_.GetWeakPtr()),
- base::Bind(&ClearKeyPersistentSessionCdm::OnSessionClosed,
- weak_factory_.GetWeakPtr()),
+ base::BindRepeating(&ClearKeyPersistentSessionCdm::OnSessionMessage,
+ weak_factory_.GetWeakPtr()),
+ base::BindRepeating(&ClearKeyPersistentSessionCdm::OnSessionClosed,
+ weak_factory_.GetWeakPtr()),
session_keys_change_cb, session_expiration_update_cb);
}
@@ -129,8 +129,8 @@ void ClearKeyPersistentSessionCdm::CreateSessionAndGenerateRequest(
// Since it's a persistent session, we need to save the session ID after
// it's been created.
new_promise = std::make_unique<NewPersistentSessionCdmPromise>(
- base::Bind(&ClearKeyPersistentSessionCdm::AddPersistentSession,
- weak_factory_.GetWeakPtr()),
+ base::BindOnce(&ClearKeyPersistentSessionCdm::AddPersistentSession,
+ weak_factory_.GetWeakPtr()),
std::move(promise));
}
cdm_->CreateSessionAndGenerateRequest(session_type, init_data_type, init_data,
diff --git a/chromium/media/cdm/win/media_foundation_cdm_factory.cc b/chromium/media/cdm/win/media_foundation_cdm_factory.cc
index 3d3a924a06a..c314a5d9175 100644
--- a/chromium/media/cdm/win/media_foundation_cdm_factory.cc
+++ b/chromium/media/cdm/win/media_foundation_cdm_factory.cc
@@ -185,7 +185,7 @@ HRESULT MediaFoundationCdmFactory::CreateMFCdmFactory(
RETURN_IF_FAILED(CoCreateInstance(CLSID_MFMediaEngineClassFactory, nullptr,
CLSCTX_INPROC_SERVER,
IID_PPV_ARGS(&class_factory)));
- auto key_system_str = base::UTF8ToUTF16(key_system);
+ auto key_system_str = base::UTF8ToWide(key_system);
RETURN_IF_FAILED(class_factory->CreateContentDecryptionModuleFactory(
key_system_str.c_str(), IID_PPV_ARGS(&cdm_factory)));
return S_OK;
@@ -198,7 +198,7 @@ HRESULT MediaFoundationCdmFactory::CreateCdmInternal(
ComPtr<IMFContentDecryptionModuleFactory> cdm_factory;
RETURN_IF_FAILED(CreateMFCdmFactory(key_system, cdm_factory));
- auto key_system_str = base::UTF8ToUTF16(key_system);
+ auto key_system_str = base::UTF8ToWide(key_system);
if (!cdm_factory->IsTypeSupported(key_system_str.c_str(), nullptr)) {
DLOG(ERROR) << key_system << " not supported by MF CdmFactory";
return MF_NOT_SUPPORTED_ERR;
diff --git a/chromium/media/cdm/win/media_foundation_cdm_session.cc b/chromium/media/cdm/win/media_foundation_cdm_session.cc
index 37b5666dda8..e5bbe438a5f 100644
--- a/chromium/media/cdm/win/media_foundation_cdm_session.cc
+++ b/chromium/media/cdm/win/media_foundation_cdm_session.cc
@@ -282,7 +282,7 @@ bool MediaFoundationCdmSession::SetSessionId() {
return false;
}
- auto session_id_str = base::UTF16ToUTF8(session_id.get());
+ auto session_id_str = base::WideToUTF8(session_id.get());
if (session_id_str.empty()) {
bool success = std::move(session_id_cb_).Run("");
DCHECK(!success) << "Empty session ID should not be accepted";
diff --git a/chromium/media/ffmpeg/ffmpeg_common.cc b/chromium/media/ffmpeg/ffmpeg_common.cc
index 336edbb492f..a67f5f3fff4 100644
--- a/chromium/media/ffmpeg/ffmpeg_common.cc
+++ b/chromium/media/ffmpeg/ffmpeg_common.cc
@@ -619,6 +619,14 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
color_space = (natural_size.height() < 720) ? VideoColorSpace::REC601()
: VideoColorSpace::REC709();
}
+ } else if (codec_context->codec_id == AV_CODEC_ID_H264 &&
+ codec_context->colorspace == AVCOL_SPC_RGB &&
+ AVPixelFormatToVideoPixelFormat(codec_context->pix_fmt) ==
+ PIXEL_FORMAT_I420) {
+ // Some H.264 videos contain a VUI that specifies a color matrix of GBR,
+ // when they are actually ordinary YUV. Only 4:2:0 formats are checked,
+ // because GBR is reasonable for 4:4:4 content. See crbug.com/1067377.
+ color_space = VideoColorSpace::REC709();
}
// AVCodecContext occasionally has invalid extra data. See
diff --git a/chromium/media/ffmpeg/ffmpeg_regression_tests.cc b/chromium/media/ffmpeg/ffmpeg_regression_tests.cc
index ebd1babb4d3..a450eb827f1 100644
--- a/chromium/media/ffmpeg/ffmpeg_regression_tests.cc
+++ b/chromium/media/ffmpeg/ffmpeg_regression_tests.cc
@@ -84,7 +84,10 @@ class FlakyFFmpegRegressionTest
FFMPEG_TEST_CASE(Cr47325, "security/47325.mp4", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr47761, "crbug47761.ogg", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr50045, "crbug50045.mp4", PIPELINE_OK, PIPELINE_OK);
-FFMPEG_TEST_CASE(Cr62127, "crbug62127.webm", PIPELINE_OK, PIPELINE_OK);
+FFMPEG_TEST_CASE(Cr62127,
+ "crbug62127.webm",
+ PIPELINE_ERROR_DECODE,
+ PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr93620, "security/93620.ogg", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr100492,
"security/100492.webm",
@@ -195,10 +198,7 @@ FFMPEG_TEST_CASE(Cr635422,
"security/635422.ogg",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
-FFMPEG_TEST_CASE(Cr637428,
- "security/637428.ogg",
- PIPELINE_ERROR_DECODE,
- PIPELINE_ERROR_DECODE);
+FFMPEG_TEST_CASE(Cr637428, "security/637428.ogg", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr639961,
"security/639961.flac",
PIPELINE_ERROR_INITIALIZATION_FAILED,
@@ -335,8 +335,8 @@ FFMPEG_TEST_CASE(OGV_16,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_17,
"security/vorbis.482086.ogv",
- PIPELINE_OK,
- PIPELINE_OK);
+ DEMUXER_ERROR_COULD_NOT_OPEN,
+ DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(OGV_18,
"security/wav.711.ogv",
DECODER_ERROR_NOT_SUPPORTED,
diff --git a/chromium/media/filters/BUILD.gn b/chromium/media/filters/BUILD.gn
index 573ef0ee042..3cc37a4379f 100644
--- a/chromium/media/filters/BUILD.gn
+++ b/chromium/media/filters/BUILD.gn
@@ -4,7 +4,6 @@
import("//media/gpu/args.gni")
import("//media/media_options.gni")
-import("//third_party/libaom/options.gni")
source_set("filters") {
# Do not expand the visibility here without double-checking with OWNERS, this
@@ -300,6 +299,7 @@ source_set("unit_tests") {
"video_renderer_algorithm_unittest.cc",
"vp9_parser_unittest.cc",
"vp9_raw_bits_reader_unittest.cc",
+ "vp9_uncompressed_header_parser_unittest.cc",
]
deps = [
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.cc b/chromium/media/filters/android/media_codec_audio_decoder.cc
index 587b78c1a30..5119af0a8bd 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.cc
+++ b/chromium/media/filters/android/media_codec_audio_decoder.cc
@@ -52,6 +52,10 @@ std::string MediaCodecAudioDecoder::GetDisplayName() const {
return "MediaCodecAudioDecoder";
}
+AudioDecoderType MediaCodecAudioDecoder::GetDecoderType() const {
+ return AudioDecoderType::kMediaCodec;
+}
+
void MediaCodecAudioDecoder::Initialize(const AudioDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.h b/chromium/media/filters/android/media_codec_audio_decoder.h
index 54414590150..f5d7238913c 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.h
+++ b/chromium/media/filters/android/media_codec_audio_decoder.h
@@ -86,6 +86,7 @@ class MEDIA_EXPORT MediaCodecAudioDecoder : public AudioDecoder,
// AudioDecoder implementation.
std::string GetDisplayName() const override;
+ AudioDecoderType GetDecoderType() const override;
void Initialize(const AudioDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
diff --git a/chromium/media/filters/audio_decoder_stream_unittest.cc b/chromium/media/filters/audio_decoder_stream_unittest.cc
index 483b0fa0b6d..9e6475c2042 100644
--- a/chromium/media/filters/audio_decoder_stream_unittest.cc
+++ b/chromium/media/filters/audio_decoder_stream_unittest.cc
@@ -110,8 +110,7 @@ class AudioDecoderStreamTest : public testing::Test {
}
void OnAudioBufferReadDone(base::OnceClosure closure,
- AudioDecoderStream::ReadStatus status,
- scoped_refptr<AudioBuffer> audio_buffer) {
+ AudioDecoderStream::ReadResult result) {
std::move(closure).Run();
}
diff --git a/chromium/media/filters/audio_decoder_unittest.cc b/chromium/media/filters/audio_decoder_unittest.cc
index c02f516e192..d54d50b2c85 100644
--- a/chromium/media/filters/audio_decoder_unittest.cc
+++ b/chromium/media/filters/audio_decoder_unittest.cc
@@ -62,7 +62,7 @@ namespace {
// The number of packets to read and then decode from each file.
const size_t kDecodeRuns = 3;
-enum AudioDecoderType {
+enum TestAudioDecoderType {
FFMPEG,
#if defined(OS_ANDROID)
MEDIA_CODEC,
@@ -123,7 +123,7 @@ void SetDiscardPadding(AVPacket* packet,
} // namespace
class AudioDecoderTest
- : public TestWithParam<std::tuple<AudioDecoderType, TestParams>> {
+ : public TestWithParam<std::tuple<TestAudioDecoderType, TestParams>> {
public:
AudioDecoderTest()
: decoder_type_(std::get<0>(GetParam())),
@@ -301,12 +301,12 @@ class AudioDecoderTest
decoded_audio_.push_back(std::move(buffer));
}
- void DecodeFinished(const base::Closure& quit_closure, Status status) {
+ void DecodeFinished(base::OnceClosure quit_closure, Status status) {
EXPECT_TRUE(pending_decode_);
EXPECT_FALSE(pending_reset_);
pending_decode_ = false;
last_decode_status_ = std::move(status);
- quit_closure.Run();
+ std::move(quit_closure).Run();
}
void ResetFinished() {
@@ -392,7 +392,7 @@ class AudioDecoderTest
const Status& last_decode_status() const { return last_decode_status_; }
private:
- const AudioDecoderType decoder_type_;
+ const TestAudioDecoderType decoder_type_;
// Current TestParams used to initialize the test and decoder. The initial
// valie is std::get<1>(GetParam()). Could be overridden by set_param() so
diff --git a/chromium/media/filters/audio_file_reader.cc b/chromium/media/filters/audio_file_reader.cc
index cb81d920def..df95bac9730 100644
--- a/chromium/media/filters/audio_file_reader.cc
+++ b/chromium/media/filters/audio_file_reader.cc
@@ -276,9 +276,25 @@ bool AudioFileReader::OnNewFrame(
sizeof(float) * frames_read);
}
} else {
- audio_bus->FromInterleaved(
- frame->data[0], frames_read,
- av_get_bytes_per_sample(codec_context_->sample_fmt));
+ int bytes_per_sample = av_get_bytes_per_sample(codec_context_->sample_fmt);
+ switch (bytes_per_sample) {
+ case 1:
+ audio_bus->FromInterleaved<UnsignedInt8SampleTypeTraits>(
+ reinterpret_cast<const uint8_t*>(frame->data[0]), frames_read);
+ break;
+ case 2:
+ audio_bus->FromInterleaved<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<const int16_t*>(frame->data[0]), frames_read);
+ break;
+ case 4:
+ audio_bus->FromInterleaved<SignedInt32SampleTypeTraits>(
+ reinterpret_cast<const int32_t*>(frame->data[0]), frames_read);
+ break;
+ default:
+ NOTREACHED() << "Unsupported bytes per sample encountered: "
+ << bytes_per_sample;
+ audio_bus->ZeroFrames(frames_read);
+ }
}
(*total_frames) += frames_read;
diff --git a/chromium/media/filters/audio_video_metadata_extractor_unittest.cc b/chromium/media/filters/audio_video_metadata_extractor_unittest.cc
index df1cb131eb3..b26dd2c3a4c 100644
--- a/chromium/media/filters/audio_video_metadata_extractor_unittest.cc
+++ b/chromium/media/filters/audio_video_metadata_extractor_unittest.cc
@@ -55,6 +55,19 @@ const std::string GetTagValue(
return tag_data->second;
}
+const std::string TagsToString(
+ const media::AudioVideoMetadataExtractor::TagDictionary& tags) {
+ std::string result;
+ for (auto& kv : tags) {
+ if (!result.empty())
+ result += " | ";
+ result += kv.first;
+ result += ": ";
+ result += kv.second;
+ }
+ return result;
+}
+
TEST(AudioVideoMetadataExtractorTest, InvalidFile) {
GetExtractor("ten_byte_file", true, false, 0, -1, -1);
}
@@ -172,16 +185,19 @@ TEST(AudioVideoMetadataExtractorTest, AndroidRotatedMP4Video) {
GetTagValue(extractor->stream_infos()[0].tags, "minor_version"));
EXPECT_EQ("h264", extractor->stream_infos()[1].type);
- EXPECT_EQ(5u, extractor->stream_infos()[1].tags.size());
+ EXPECT_EQ(6u, extractor->stream_infos()[1].tags.size())
+ << "Tags: " << TagsToString(extractor->stream_infos()[1].tags);
EXPECT_EQ("2014-02-11T00:39:25.000000Z",
GetTagValue(extractor->stream_infos()[1].tags, "creation_time"));
EXPECT_EQ("VideoHandle",
GetTagValue(extractor->stream_infos()[1].tags, "handler_name"));
+ EXPECT_EQ("MOTO", GetTagValue(extractor->stream_infos()[1].tags, "encoder"));
EXPECT_EQ("eng", GetTagValue(extractor->stream_infos()[1].tags, "language"));
EXPECT_EQ("90", GetTagValue(extractor->stream_infos()[1].tags, "rotate"));
EXPECT_EQ("aac", extractor->stream_infos()[2].type);
- EXPECT_EQ(3u, extractor->stream_infos()[2].tags.size());
+ EXPECT_EQ(4u, extractor->stream_infos()[2].tags.size())
+ << "Tags: " << TagsToString(extractor->stream_infos()[2].tags);
EXPECT_EQ("2014-02-11T00:39:25.000000Z",
GetTagValue(extractor->stream_infos()[2].tags, "creation_time"));
EXPECT_EQ("SoundHandle",
@@ -258,7 +274,8 @@ TEST(AudioVideoMetadataExtractorTest, AudioFLACInMp4) {
GetTagValue(extractor->stream_infos()[0].tags, "encoder"));
EXPECT_EQ("flac", extractor->stream_infos()[1].type);
- EXPECT_EQ(2u, extractor->stream_infos()[1].tags.size());
+ EXPECT_EQ(3u, extractor->stream_infos()[1].tags.size())
+ << "Tags: " << TagsToString(extractor->stream_infos()[1].tags);
EXPECT_EQ("SoundHandler",
GetTagValue(extractor->stream_infos()[1].tags, "handler_name"));
EXPECT_EQ("und", GetTagValue(extractor->stream_infos()[1].tags, "language"));
diff --git a/chromium/media/filters/chunk_demuxer.cc b/chromium/media/filters/chunk_demuxer.cc
index 9f0f2cc5999..2165cd2f121 100644
--- a/chromium/media/filters/chunk_demuxer.cc
+++ b/chromium/media/filters/chunk_demuxer.cc
@@ -624,6 +624,58 @@ void ChunkDemuxer::CancelPendingSeek(TimeDelta seek_time) {
RunSeekCB_Locked(PIPELINE_OK);
}
+ChunkDemuxer::Status ChunkDemuxer::AddId(
+ const std::string& id,
+ std::unique_ptr<AudioDecoderConfig> audio_config) {
+ DCHECK(audio_config);
+ DVLOG(1) << __func__ << " id="
+ << " audio_config=" << audio_config->AsHumanReadableString();
+ base::AutoLock auto_lock(lock_);
+
+ // Any valid audio config provided by WC is bufferable here, though decode
+ // error may occur later.
+ if (!audio_config->IsValidConfig())
+ return ChunkDemuxer::kNotSupported;
+
+ if ((state_ != WAITING_FOR_INIT && state_ != INITIALIZING) || IsValidId(id))
+ return kReachedIdLimit;
+
+ DCHECK(init_cb_);
+
+ std::string expected_codec = GetCodecName(audio_config->codec());
+ std::unique_ptr<media::StreamParser> stream_parser(
+ media::StreamParserFactory::Create(std::move(audio_config)));
+ DCHECK(stream_parser);
+
+ return AddIdInternal(id, std::move(stream_parser), expected_codec);
+}
+
+ChunkDemuxer::Status ChunkDemuxer::AddId(
+ const std::string& id,
+ std::unique_ptr<VideoDecoderConfig> video_config) {
+ DCHECK(video_config);
+ DVLOG(1) << __func__ << " id="
+ << " video_config=" << video_config->AsHumanReadableString();
+ base::AutoLock auto_lock(lock_);
+
+ // Any valid video config provided by WC is bufferable here, though decode
+ // error may occur later.
+ if (!video_config->IsValidConfig())
+ return ChunkDemuxer::kNotSupported;
+
+ if ((state_ != WAITING_FOR_INIT && state_ != INITIALIZING) || IsValidId(id))
+ return kReachedIdLimit;
+
+ DCHECK(init_cb_);
+
+ std::string expected_codec = GetCodecName(video_config->codec());
+ std::unique_ptr<media::StreamParser> stream_parser(
+ media::StreamParserFactory::Create(std::move(video_config)));
+ DCHECK(stream_parser);
+
+ return AddIdInternal(id, std::move(stream_parser), expected_codec);
+}
+
ChunkDemuxer::Status ChunkDemuxer::AddId(const std::string& id,
const std::string& content_type,
const std::string& codecs) {
@@ -646,6 +698,18 @@ ChunkDemuxer::Status ChunkDemuxer::AddId(const std::string& id,
return ChunkDemuxer::kNotSupported;
}
+ return AddIdInternal(id, std::move(stream_parser),
+ ExpectedCodecs(content_type, codecs));
+}
+
+ChunkDemuxer::Status ChunkDemuxer::AddIdInternal(
+ const std::string& id,
+ std::unique_ptr<media::StreamParser> stream_parser,
+ std::string expected_codecs) {
+ DVLOG(2) << __func__ << " id=" << id
+ << " expected_codecs=" << expected_codecs;
+ lock_.AssertAcquired();
+
std::unique_ptr<FrameProcessor> frame_processor =
std::make_unique<FrameProcessor>(
base::BindRepeating(&ChunkDemuxer::IncreaseDurationIfNecessary,
@@ -670,8 +734,8 @@ ChunkDemuxer::Status ChunkDemuxer::AddId(const std::string& id,
source_state->Init(base::BindOnce(&ChunkDemuxer::OnSourceInitDone,
base::Unretained(this), id),
- ExpectedCodecs(content_type, codecs),
- encrypted_media_init_data_cb_, base::NullCallback());
+ expected_codecs, encrypted_media_init_data_cb_,
+ base::NullCallback());
// TODO(wolenetz): Change to DCHECKs once less verification in release build
// is needed. See https://crbug.com/786975.
@@ -900,6 +964,66 @@ bool ChunkDemuxer::AppendData(const std::string& id,
return true;
}
+bool ChunkDemuxer::AppendChunks(
+ const std::string& id,
+ std::unique_ptr<StreamParser::BufferQueue> buffer_queue,
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset) {
+ DCHECK(buffer_queue);
+ DVLOG(1) << __func__ << ": " << id
+ << ", buffer_queue size()=" << buffer_queue->size();
+
+ DCHECK(!id.empty());
+ DCHECK(timestamp_offset);
+
+ Ranges<TimeDelta> ranges;
+
+ {
+ base::AutoLock auto_lock(lock_);
+ DCHECK_NE(state_, ENDED);
+
+ // Capture if any of the SourceBuffers are waiting for data before we start
+ // buffering new chunks.
+ bool old_waiting_for_data = IsSeekWaitingForData_Locked();
+
+ if (buffer_queue->size() == 0u)
+ return true;
+
+ switch (state_) {
+ case INITIALIZING:
+ case INITIALIZED:
+ DCHECK(IsValidId(id));
+ if (!source_state_map_[id]->AppendChunks(
+ std::move(buffer_queue), append_window_start, append_window_end,
+ timestamp_offset)) {
+ ReportError_Locked(CHUNK_DEMUXER_ERROR_APPEND_FAILED);
+ return false;
+ }
+ break;
+
+ case PARSE_ERROR:
+ case WAITING_FOR_INIT:
+ case ENDED:
+ case SHUTDOWN:
+ DVLOG(1) << "AppendChunks(): called in unexpected state " << state_;
+ return false;
+ }
+
+ // Check to see if data was appended at the pending seek point. This
+ // indicates we have parsed enough data to complete the seek. Work is still
+ // in progress at this point, but it's okay since |seek_cb_| will post.
+ if (old_waiting_for_data && !IsSeekWaitingForData_Locked() && seek_cb_)
+ RunSeekCB_Locked(PIPELINE_OK);
+
+ ranges = GetBufferedRanges_Locked();
+ }
+
+ host_->OnBufferedTimeRangesChanged(ranges);
+ progress_cb_.Run();
+ return true;
+}
+
void ChunkDemuxer::ResetParserState(const std::string& id,
TimeDelta append_window_start,
TimeDelta append_window_end,
@@ -1294,7 +1418,7 @@ ChunkDemuxerStream* ChunkDemuxer::CreateDemuxerStream(
DemuxerStream::Type type) {
// New ChunkDemuxerStreams can be created only during initialization segment
// processing, which happens when a new chunk of data is appended and the
- // lock_ must be held by ChunkDemuxer::AppendData.
+ // lock_ must be held by ChunkDemuxer::AppendData/Chunks.
lock_.AssertAcquired();
MediaTrack::Id media_track_id = GenerateMediaTrackId();
diff --git a/chromium/media/filters/chunk_demuxer.h b/chromium/media/filters/chunk_demuxer.h
index f86d931e9a0..68a2488a13a 100644
--- a/chromium/media/filters/chunk_demuxer.h
+++ b/chromium/media/filters/chunk_demuxer.h
@@ -33,6 +33,9 @@ class MEDIA_EXPORT SourceBufferStream;
namespace media {
+class AudioDecoderConfig;
+class VideoDecoderConfig;
+
class MEDIA_EXPORT ChunkDemuxerStream : public DemuxerStream {
public:
using BufferQueue = base::circular_deque<scoped_refptr<StreamParserBuffer>>;
@@ -199,7 +202,7 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
};
// |open_cb| Run when Initialize() is called to signal that the demuxer
- // is ready to receive media data via AppendData().
+ // is ready to receive media data via AppendData/Chunks().
// |progress_cb| Run each time data is appended.
// |encrypted_media_init_data_cb| Run when the demuxer determines that an
// encryption key is needed to decrypt the content.
@@ -233,17 +236,25 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
void StartWaitingForSeek(base::TimeDelta seek_time) override;
void CancelPendingSeek(base::TimeDelta seek_time) override;
- // Registers a new |id| to use for AppendData() calls. |content_type|
+ // Registers a new |id| to use for AppendData/Chunks() calls. |content_type|
// indicates the MIME type's ContentType and |codecs| indicates the MIME
// type's "codecs" parameter string (if any) for the data that we intend to
// append for this ID. kOk is returned if the demuxer has enough resources to
// support another ID and supports the format indicated by |content_type| and
- // |codecs|. kReachedIdLimit is returned if the demuxer cannot handle another
- // ID right now. kNotSupported is returned if |content_type| and |codecs| is
+ // |codecs|. kReachedIdLimit is returned if the demuxer cannot handle another
+ // ID right now. kNotSupported is returned if |content_type| and |codecs| is
// not a supported format.
+ // The |audio_config| and |video_config| overloads behave similarly, except
+ // the caller must provide valid, supported decoder configs; those overloads'
+ // usage indicates that we intend to append WebCodecs encoded audio or video
+ // chunks for this ID.
Status AddId(const std::string& id,
const std::string& content_type,
const std::string& codecs);
+ Status AddId(const std::string& id,
+ std::unique_ptr<AudioDecoderConfig> audio_config);
+ Status AddId(const std::string& id,
+ std::unique_ptr<VideoDecoderConfig> video_config);
// Notifies a caller via |tracks_updated_cb| that the set of media tracks
// for a given |id| has changed.
@@ -286,6 +297,16 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
base::TimeDelta append_window_end,
base::TimeDelta* timestamp_offset);
+ // Appends webcodecs encoded chunks (already converted by caller into a
+ // BufferQueue of StreamParserBuffers) to the source buffer associated with
+ // |id|, with same semantic for other parameters and return value as
+ // AppendData().
+ bool AppendChunks(const std::string& id,
+ std::unique_ptr<StreamParser::BufferQueue> buffer_queue,
+ base::TimeDelta append_window_start,
+ base::TimeDelta append_window_end,
+ base::TimeDelta* timestamp_offset);
+
// Aborts parsing the current segment and reset the parser to a state where
// it can accept a new segment.
// Some pending frames can be emitted during that process. These frames are
@@ -393,6 +414,14 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
SHUTDOWN,
};
+ // Helper for AddId's creation of FrameProcessor, and
+ // SourceBufferState creation, initialization and tracking in
+ // source_state_map_.
+ ChunkDemuxer::Status AddIdInternal(
+ const std::string& id,
+ std::unique_ptr<media::StreamParser> stream_parser,
+ std::string expected_codecs);
+
// Helper for vide and audio track changing.
void FindAndEnableProperTracks(const std::vector<MediaTrack::Id>& track_ids,
base::TimeDelta curr_time,
@@ -499,7 +528,7 @@ class MEDIA_EXPORT ChunkDemuxer : public Demuxer {
base::TimeDelta duration_;
// The duration passed to the last SetDuration(). If
- // SetDuration() is never called or an AppendData() call or
+ // SetDuration() is never called or an AppendData/Chunks() call or
// a EndOfStream() call changes |duration_|, then this
// variable is set to < 0 to indicate that the |duration_| represents
// the actual duration instead of a user specified value.
diff --git a/chromium/media/filters/chunk_demuxer_unittest.cc b/chromium/media/filters/chunk_demuxer_unittest.cc
index 2ed13c82723..a3de1346858 100644
--- a/chromium/media/filters/chunk_demuxer_unittest.cc
+++ b/chromium/media/filters/chunk_demuxer_unittest.cc
@@ -4388,10 +4388,10 @@ TEST_F(ChunkDemuxerTest,
}
namespace {
-void QuitLoop(base::Closure quit_closure,
+void QuitLoop(base::OnceClosure quit_closure,
DemuxerStream::Type type,
const std::vector<DemuxerStream*>& streams) {
- quit_closure.Run();
+ std::move(quit_closure).Run();
}
void DisableAndEnableDemuxerTracks(
diff --git a/chromium/media/filters/dav1d_video_decoder.cc b/chromium/media/filters/dav1d_video_decoder.cc
index bb31801e439..7e5eaf5b245 100644
--- a/chromium/media/filters/dav1d_video_decoder.cc
+++ b/chromium/media/filters/dav1d_video_decoder.cc
@@ -129,6 +129,16 @@ struct ScopedDav1dPictureFree {
}
};
+// static
+SupportedVideoDecoderConfigs Dav1dVideoDecoder::SupportedConfigs() {
+ return {{/*profile_min=*/AV1PROFILE_PROFILE_MAIN,
+ /*profile_max=*/AV1PROFILE_PROFILE_HIGH,
+ /*coded_size_min=*/kDefaultSwDecodeSizeMin,
+ /*coded_size_max=*/kDefaultSwDecodeSizeMax,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false}};
+}
+
Dav1dVideoDecoder::Dav1dVideoDecoder(MediaLog* media_log,
OffloadState offload_state)
: media_log_(media_log),
@@ -145,6 +155,10 @@ std::string Dav1dVideoDecoder::GetDisplayName() const {
return "Dav1dVideoDecoder";
}
+VideoDecoderType Dav1dVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kDav1d;
+}
+
void Dav1dVideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* /* cdm_context */,
@@ -368,7 +382,7 @@ bool Dav1dVideoDecoder::DecodeBuffer(scoped_refptr<DecoderBuffer> buffer) {
color_space = config_.color_space_info();
frame->set_color_space(color_space.ToGfxColorSpace());
- frame->metadata()->power_efficient = false;
+ frame->metadata().power_efficient = false;
frame->set_hdr_metadata(config_.hdr_metadata());
// When we use bind mode, our image data is dependent on the Dav1dPicture,
diff --git a/chromium/media/filters/dav1d_video_decoder.h b/chromium/media/filters/dav1d_video_decoder.h
index 22c4e1fd381..689628ae07e 100644
--- a/chromium/media/filters/dav1d_video_decoder.h
+++ b/chromium/media/filters/dav1d_video_decoder.h
@@ -11,6 +11,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted_memory.h"
#include "base/sequence_checker.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
@@ -24,11 +25,14 @@ class MediaLog;
class MEDIA_EXPORT Dav1dVideoDecoder : public OffloadableVideoDecoder {
public:
+ static SupportedVideoDecoderConfigs SupportedConfigs();
+
Dav1dVideoDecoder(MediaLog* media_log,
OffloadState offload_state = OffloadState::kNormal);
~Dav1dVideoDecoder() override;
// VideoDecoder implementation.
+ VideoDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
diff --git a/chromium/media/filters/dav1d_video_decoder_unittest.cc b/chromium/media/filters/dav1d_video_decoder_unittest.cc
index 19572a06873..0711b2f9e7e 100644
--- a/chromium/media/filters/dav1d_video_decoder_unittest.cc
+++ b/chromium/media/filters/dav1d_video_decoder_unittest.cc
@@ -169,7 +169,7 @@ class Dav1dVideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
output_frames_.push_back(std::move(frame));
}
diff --git a/chromium/media/filters/decoder_selector.cc b/chromium/media/filters/decoder_selector.cc
index adba332ab89..ac99eae91ca 100644
--- a/chromium/media/filters/decoder_selector.cc
+++ b/chromium/media/filters/decoder_selector.cc
@@ -33,9 +33,25 @@ namespace {
const char kSelectDecoderTrace[] = "DecoderSelector::SelectDecoder";
+bool SkipDecoderForRTC(const AudioDecoderConfig& /*config*/,
+ const AudioDecoder& /*decoder*/) {
+ return false;
+}
+
+bool SkipDecoderForRTC(const VideoDecoderConfig& config,
+ const VideoDecoder& decoder) {
+ // For now, we assume that RTC decoders are able to decode non-RTC streams,
+ // presumably by configuring themselves based on the config's rtc bit. Since
+ // no decoders take any action at all based on it, this is as good as any.
+ return config.is_rtc() && !decoder.IsOptimizedForRTC();
+}
+
template <typename ConfigT, typename DecoderT>
-DecoderPriority NormalDecoderPriority(const ConfigT& /*config*/,
- const DecoderT& /*decoder*/) {
+DecoderPriority NormalDecoderPriority(const ConfigT& config,
+ const DecoderT& decoder) {
+ if (SkipDecoderForRTC(config, decoder))
+ return DecoderPriority::kSkipped;
+
return DecoderPriority::kNormal;
}
@@ -43,12 +59,15 @@ DecoderPriority ResolutionBasedDecoderPriority(const VideoDecoderConfig& config,
const VideoDecoder& decoder) {
#if defined(OS_ANDROID)
constexpr auto kSoftwareDecoderHeightCutoff = 360;
-#elif BUILDFLAG(IS_ASH)
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
constexpr auto kSoftwareDecoderHeightCutoff = 360;
#else
constexpr auto kSoftwareDecoderHeightCutoff = 720;
#endif
+ if (SkipDecoderForRTC(config, decoder))
+ return DecoderPriority::kSkipped;
+
// We only do a height check to err on the side of prioritizing platform
// decoders.
const auto at_or_above_software_cutoff =
@@ -62,8 +81,11 @@ DecoderPriority ResolutionBasedDecoderPriority(const VideoDecoderConfig& config,
}
template <typename ConfigT, typename DecoderT>
-DecoderPriority SkipNonPlatformDecoders(const ConfigT& /*config*/,
+DecoderPriority SkipNonPlatformDecoders(const ConfigT& config,
const DecoderT& decoder) {
+ if (SkipDecoderForRTC(config, decoder))
+ return DecoderPriority::kSkipped;
+
return decoder.IsPlatformDecoder() ? DecoderPriority::kNormal
: DecoderPriority::kSkipped;
}
diff --git a/chromium/media/filters/decoder_selector_unittest.cc b/chromium/media/filters/decoder_selector_unittest.cc
index db84317a22c..e32c8d0e32c 100644
--- a/chromium/media/filters/decoder_selector_unittest.cc
+++ b/chromium/media/filters/decoder_selector_unittest.cc
@@ -164,6 +164,8 @@ class AudioDecoderSelectorTestParam {
static void ExpectNotInitialize(MockDecoder* decoder) {
EXPECT_CALL(*decoder, Initialize_(_, _, _, _, _)).Times(0);
}
+
+ static void SetRTCDecoderness(MockDecoder* decoder, bool is_rtc_decoder) {}
};
// Allocate storage for the member variables.
@@ -246,6 +248,11 @@ class VideoDecoderSelectorTestParam {
static void ExpectNotInitialize(MockDecoder* decoder) {
EXPECT_CALL(*decoder, Initialize_(_, _, _, _, _, _)).Times(0);
}
+
+ static void SetRTCDecoderness(MockDecoder* decoder, bool is_optimized) {
+ EXPECT_CALL(*decoder, IsOptimizedForRTC())
+ .WillRepeatedly(Return(is_optimized));
+ }
};
// Allocate storate for the member variables.
@@ -289,6 +296,7 @@ class DecoderSelectorTest : public ::testing::Test {
bool supports_decryption;
bool is_platform_decoder;
bool expect_not_initialized;
+ bool is_rtc_decoder = false;
};
DecoderSelectorTest()
@@ -330,6 +338,14 @@ class DecoderSelectorTest : public ::testing::Test {
AddMockDecoder(std::move(args));
}
+ void AddMockRTCPlatformDecoder(const std::string& decoder_name,
+ DecoderCapability capability) {
+ auto args = MockDecoderArgs::Create(std::move(decoder_name), capability);
+ args.is_rtc_decoder = true;
+ args.is_platform_decoder = true;
+ AddMockDecoder(std::move(args));
+ }
+
void AddMockDecoder(MockDecoderArgs args) {
// Actual decoders are created in CreateDecoders(), which may be called
// multiple times by the DecoderSelector.
@@ -357,6 +373,7 @@ class DecoderSelectorTest : public ::testing::Test {
} else {
TypeParam::ExpectInitialize(decoder.get(), args.capability);
}
+ TypeParam::SetRTCDecoderness(decoder.get(), args.is_rtc_decoder);
decoders.push_back(std::move(decoder));
}
@@ -1049,4 +1066,54 @@ TEST_F(VideoDecoderSelectorTest, EncryptedStream_PrioritizePlatformDecoders) {
this->SelectDecoder();
}
+// Tests that the normal decoder selector rule skips non-RTC decoders for RTC.
+TEST_F(VideoDecoderSelectorTest, RTC_NormalPriority) {
+ base::test::ScopedFeatureList features;
+
+ this->AddMockDecoder(kDecoder1, kAlwaysSucceed);
+ this->AddMockRTCPlatformDecoder(kDecoder2, kAlwaysSucceed);
+
+ auto config = TestVideoConfig::Custom(gfx::Size(4096, 4096));
+ config.set_is_rtc(true);
+ this->demuxer_stream_.set_video_decoder_config(config);
+ this->CreateDecoderSelector();
+
+ EXPECT_CALL(*this, OnDecoderSelected(kDecoder2, IsNull()));
+ this->SelectDecoder();
+}
+
+// Tests that the resolution-based rule skips non-RTC decoders for RTC.
+TEST_F(VideoDecoderSelectorTest, RTC_DecoderBasedPriority) {
+ base::test::ScopedFeatureList features;
+ features.InitAndEnableFeature(kResolutionBasedDecoderPriority);
+
+ this->AddMockDecoder(kDecoder1, kAlwaysSucceed);
+ this->AddMockRTCPlatformDecoder(kDecoder2, kAlwaysSucceed);
+
+ auto config = TestVideoConfig::Custom(gfx::Size(4096, 4096));
+ config.set_is_rtc(true);
+ this->demuxer_stream_.set_video_decoder_config(config);
+ this->CreateDecoderSelector();
+
+ EXPECT_CALL(*this, OnDecoderSelected(kDecoder2, IsNull()));
+ this->SelectDecoder();
+}
+
+// Tests that the hardware-based rule skips non-RTC decoders for RTC.
+TEST_F(VideoDecoderSelectorTest, RTC_ForceHardwareDecoders) {
+ base::test::ScopedFeatureList features;
+ features.InitAndEnableFeature(kForceHardwareVideoDecoders);
+
+ this->AddMockPlatformDecoder(kDecoder1, kAlwaysSucceed);
+ this->AddMockRTCPlatformDecoder(kDecoder2, kAlwaysSucceed);
+
+ auto config = TestVideoConfig::Custom(gfx::Size(4096, 4096));
+ config.set_is_rtc(true);
+ this->demuxer_stream_.set_video_decoder_config(config);
+ this->CreateDecoderSelector();
+
+ EXPECT_CALL(*this, OnDecoderSelected(kDecoder2, IsNull()));
+ this->SelectDecoder();
+}
+
} // namespace media
diff --git a/chromium/media/filters/decoder_stream.cc b/chromium/media/filters/decoder_stream.cc
index c4b4c806d6c..bf80bb13684 100644
--- a/chromium/media/filters/decoder_stream.cc
+++ b/chromium/media/filters/decoder_stream.cc
@@ -78,17 +78,14 @@ const char* GetPrepareTraceString<DemuxerStream::AUDIO>() {
return "AudioDecoderStream::PrepareOutput";
}
-template <DemuxerStream::Type StreamType>
-const char* GetStatusString(
- typename DecoderStream<StreamType>::ReadStatus status) {
- switch (status) {
- case DecoderStream<StreamType>::OK:
+const char* GetStatusString(const Status& status) {
+ // TODO(crbug.com/1129662): Replace this with generic Status-to-string.
+ switch (status.code()) {
+ case StatusCode::kOk:
return "okay";
- case DecoderStream<StreamType>::ABORTED:
+ case StatusCode::kAborted:
return "aborted";
- case DecoderStream<StreamType>::DEMUXER_READ_ABORTED:
- return "demuxer_read_aborted";
- case DecoderStream<StreamType>::DECODE_ERROR:
+ default:
return "decode_error";
}
@@ -130,7 +127,7 @@ DecoderStream<StreamType>::~DecoderStream() {
}
if (read_cb_) {
read_cb_ = BindToCurrentLoop(std::move(read_cb_));
- SatisfyRead(ABORTED, nullptr);
+ SatisfyRead(StatusCode::kAborted);
}
if (reset_cb_)
task_runner_->PostTask(FROM_HERE, std::move(reset_cb_));
@@ -189,20 +186,22 @@ void DecoderStream<StreamType>::Read(ReadCB read_cb) {
TRACE_EVENT_ASYNC_BEGIN0("media", GetReadTraceString<StreamType>(), this);
if (state_ == STATE_ERROR) {
read_cb_ = BindToCurrentLoop(std::move(read_cb));
- SatisfyRead(DECODE_ERROR, nullptr);
+ // TODO(crbug.com/1129662): Consider attaching a caused-by of the original
+ // error as well.
+ SatisfyRead(StatusCode::kDecoderStreamInErrorState);
return;
}
if (state_ == STATE_END_OF_STREAM && ready_outputs_.empty() &&
unprepared_outputs_.empty()) {
read_cb_ = BindToCurrentLoop(std::move(read_cb));
- SatisfyRead(OK, StreamTraits::CreateEOSOutput());
+ SatisfyRead(StreamTraits::CreateEOSOutput());
return;
}
if (!ready_outputs_.empty()) {
read_cb_ = BindToCurrentLoop(std::move(read_cb));
- SatisfyRead(OK, ready_outputs_.front());
+ SatisfyRead(ready_outputs_.front());
ready_outputs_.pop_front();
MaybePrepareAnotherOutput();
} else {
@@ -224,7 +223,7 @@ void DecoderStream<StreamType>::Reset(base::OnceClosure closure) {
if (read_cb_) {
read_cb_ = BindToCurrentLoop(std::move(read_cb_));
- SatisfyRead(ABORTED, nullptr);
+ SatisfyRead(StatusCode::kAborted);
}
ClearOutputs();
@@ -411,7 +410,7 @@ void DecoderStream<StreamType>::OnDecoderSelected(
media_log_->SetProperty<StreamTraits::kIsDecryptingDemuxerStream>(
!!decrypting_demuxer_stream_);
media_log_->SetProperty<StreamTraits::kDecoderName>(
- decoder_->GetDisplayName());
+ decoder_->GetDecoderType());
media_log_->SetProperty<StreamTraits::kIsPlatformDecoder>(
decoder_->IsPlatformDecoder());
@@ -440,12 +439,11 @@ void DecoderStream<StreamType>::OnDecoderSelected(
}
template <DemuxerStream::Type StreamType>
-void DecoderStream<StreamType>::SatisfyRead(ReadStatus status,
- scoped_refptr<Output> output) {
+void DecoderStream<StreamType>::SatisfyRead(ReadResult result) {
DCHECK(read_cb_);
TRACE_EVENT_ASYNC_END1("media", GetReadTraceString<StreamType>(), this,
- "status", GetStatusString<StreamType>(status));
- std::move(read_cb_).Run(status, std::move(output));
+ "status", GetStatusString(result.code()));
+ std::move(read_cb_).Run(std::move(result));
}
template <DemuxerStream::Type StreamType>
@@ -505,7 +503,7 @@ void DecoderStream<StreamType>::DecodeInternal(
std::move(buffer),
base::BindOnce(&DecoderStream<StreamType>::OnDecodeDone,
fallback_weak_factory_.GetWeakPtr(), buffer_size,
- decoding_eos_, base::Passed(&trace_event)));
+ decoding_eos_, std::move(trace_event)));
}
template <DemuxerStream::Type StreamType>
@@ -567,7 +565,7 @@ void DecoderStream<StreamType>::OnDecodeDone(
if (end_of_stream) {
state_ = STATE_END_OF_STREAM;
if (ready_outputs_.empty() && unprepared_outputs_.empty() && read_cb_)
- SatisfyRead(OK, StreamTraits::CreateEOSOutput());
+ SatisfyRead(StreamTraits::CreateEOSOutput());
return;
}
@@ -594,14 +592,14 @@ void DecoderStream<StreamType>::OnDecodeDone(
state_ = STATE_REINITIALIZING_DECODER;
SelectDecoder();
} else {
- media_log_->NotifyError(std::move(status));
+ media_log_->NotifyError(status);
MEDIA_LOG(ERROR, media_log_)
<< GetStreamTypeString() << " decode error!";
state_ = STATE_ERROR;
ClearOutputs();
if (read_cb_)
- SatisfyRead(DECODE_ERROR, nullptr);
+ SatisfyRead(std::move(status));
}
return;
}
@@ -653,7 +651,7 @@ void DecoderStream<StreamType>::OnDecodeOutputReady(
// If |ready_outputs_| was non-empty, the read would have already been
// satisifed by Read().
DCHECK(ready_outputs_.empty());
- SatisfyRead(OK, std::move(output));
+ SatisfyRead(std::move(output));
return;
}
@@ -743,7 +741,7 @@ void DecoderStream<StreamType>::OnBufferReady(
pending_buffers_.clear();
ClearOutputs();
if (read_cb_)
- SatisfyRead(DECODE_ERROR, nullptr);
+ SatisfyRead(StatusCode::kDecoderStreamDemuxerError);
}
// Decoding has been stopped.
@@ -819,7 +817,7 @@ void DecoderStream<StreamType>::OnBufferReady(
if (status == DemuxerStream::kAborted) {
if (read_cb_)
- SatisfyRead(DEMUXER_READ_ABORTED, nullptr);
+ SatisfyRead(StatusCode::kAborted);
return;
}
@@ -862,7 +860,7 @@ void DecoderStream<StreamType>::CompleteDecoderReinitialization(bool success) {
if (state_ == STATE_ERROR) {
MEDIA_LOG(ERROR, media_log_)
<< GetStreamTypeString() << " decoder reinitialization failed";
- SatisfyRead(DECODE_ERROR, nullptr);
+ SatisfyRead(StatusCode::kDecoderStreamReinitFailed);
return;
}
@@ -981,7 +979,7 @@ void DecoderStream<StreamType>::OnPreparedOutputReady(
if (!read_cb_)
ready_outputs_.emplace_back(std::move(output));
else
- SatisfyRead(OK, std::move(output));
+ SatisfyRead(std::move(output));
MaybePrepareAnotherOutput();
diff --git a/chromium/media/filters/decoder_stream.h b/chromium/media/filters/decoder_stream.h
index 0d881689024..6fe5739c8d3 100644
--- a/chromium/media/filters/decoder_stream.h
+++ b/chromium/media/filters/decoder_stream.h
@@ -14,7 +14,7 @@
#include "base/containers/circular_deque.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
-#include "base/util/type_safety/pass_key.h"
+#include "base/types/pass_key.h"
#include "media/base/audio_decoder.h"
#include "media/base/audio_timestamp_helper.h"
#include "media/base/demuxer_stream.h"
@@ -46,13 +46,6 @@ class MEDIA_EXPORT DecoderStream {
using Output = typename StreamTraits::OutputType;
using DecoderConfig = typename StreamTraits::DecoderConfigType;
- enum ReadStatus {
- OK, // Everything went as planned.
- ABORTED, // Read aborted due to Reset() during pending read.
- DEMUXER_READ_ABORTED, // Demuxer returned aborted read.
- DECODE_ERROR, // Decoder returned decode error.
- };
-
// Callback to create a list of decoders.
using CreateDecodersCB =
base::RepeatingCallback<std::vector<std::unique_ptr<Decoder>>()>;
@@ -61,7 +54,8 @@ class MEDIA_EXPORT DecoderStream {
using InitCB = base::OnceCallback<void(bool success)>;
// Indicates completion of a DecoderStream read.
- using ReadCB = base::OnceCallback<void(ReadStatus, scoped_refptr<Output>)>;
+ using ReadResult = StatusOr<scoped_refptr<Output>>;
+ using ReadCB = base::OnceCallback<void(ReadResult)>;
DecoderStream(std::unique_ptr<DecoderStreamTraits<StreamType>> traits,
scoped_refptr<base::SequencedTaskRunner> task_runner,
@@ -131,9 +125,10 @@ class MEDIA_EXPORT DecoderStream {
config_change_observer_cb_ = config_change_observer;
}
- // Allows tests to keep track the currently selected decoder.
+ // Allow interested folks to keep track the currently selected decoder. The
+ // provided decoder is valid only during the scope of the callback.
using DecoderChangeObserverCB = base::RepeatingCallback<void(Decoder*)>;
- void set_decoder_change_observer_for_testing(
+ void set_decoder_change_observer(
DecoderChangeObserverCB decoder_change_observer_cb) {
decoder_change_observer_cb_ = std::move(decoder_change_observer_cb);
}
@@ -149,7 +144,7 @@ class MEDIA_EXPORT DecoderStream {
bool is_demuxer_read_pending() const { return pending_demuxer_read_; }
DecoderSelector<StreamType>& GetDecoderSelectorForTesting(
- util::PassKey<class VideoDecoderStreamTest>) {
+ base::PassKey<class VideoDecoderStreamTest>) {
return decoder_selector_;
}
@@ -185,8 +180,8 @@ class MEDIA_EXPORT DecoderStream {
std::unique_ptr<Decoder> selected_decoder,
std::unique_ptr<DecryptingDemuxerStream> decrypting_demuxer_stream);
- // Satisfy pending |read_cb_| with |status| and |output|.
- void SatisfyRead(ReadStatus status, scoped_refptr<Output> output);
+ // Satisfy pending |read_cb_| with |result|.
+ void SatisfyRead(ReadResult result);
// Decodes |buffer| and returns the result via OnDecodeOutputReady().
// Saves |buffer| into |pending_buffers_| if appropriate.
diff --git a/chromium/media/filters/decoder_stream_traits.cc b/chromium/media/filters/decoder_stream_traits.cc
index c9e20bfd654..1222c802007 100644
--- a/chromium/media/filters/decoder_stream_traits.cc
+++ b/chromium/media/filters/decoder_stream_traits.cc
@@ -48,7 +48,9 @@ void DecoderStreamTraits<DemuxerStream::AUDIO>::SetIsDecryptingDemuxerStream(
DecoderStreamTraits<DemuxerStream::AUDIO>::DecoderStreamTraits(
MediaLog* media_log,
ChannelLayout initial_hw_layout)
- : media_log_(media_log), initial_hw_layout_(initial_hw_layout) {}
+ : media_log_(media_log), initial_hw_layout_(initial_hw_layout) {
+ weak_this_ = weak_factory_.GetWeakPtr();
+}
DecoderStreamTraits<DemuxerStream::AUDIO>::DecoderConfigType
DecoderStreamTraits<DemuxerStream::AUDIO>::GetDecoderConfig(
@@ -80,9 +82,24 @@ void DecoderStreamTraits<DemuxerStream::AUDIO>::InitializeDecoder(
OnConfigChanged(config);
config_ = config;
- stats_.audio_decoder_info.decoder_name = decoder->GetDisplayName();
- decoder->Initialize(config, cdm_context, std::move(init_cb), output_cb,
- waiting_cb);
+ stats_.audio_decoder_info.decoder_type = AudioDecoderType::kUnknown;
+ // Both |this| and |decoder| are owned by a DecoderSelector and will stay
+ // alive at least until |init_cb| is finished executing.
+ decoder->Initialize(
+ config, cdm_context,
+ base::BindOnce(
+ &DecoderStreamTraits<DemuxerStream::AUDIO>::OnDecoderInitialized,
+ weak_this_, base::Unretained(decoder), std::move(init_cb)),
+ output_cb, waiting_cb);
+}
+
+void DecoderStreamTraits<DemuxerStream::AUDIO>::OnDecoderInitialized(
+ DecoderType* decoder,
+ InitCB cb,
+ Status result) {
+ if (result.is_ok())
+ stats_.audio_decoder_info.decoder_type = decoder->GetDecoderType();
+ std::move(cb).Run(result);
}
void DecoderStreamTraits<DemuxerStream::AUDIO>::OnStreamReset(
@@ -147,7 +164,9 @@ void DecoderStreamTraits<DemuxerStream::VIDEO>::SetIsDecryptingDemuxerStream(
DecoderStreamTraits<DemuxerStream::VIDEO>::DecoderStreamTraits(
MediaLog* media_log)
// Randomly selected number of samples to keep.
- : keyframe_distance_average_(16) {}
+ : keyframe_distance_average_(16) {
+ weak_this_ = weak_factory_.GetWeakPtr();
+}
DecoderStreamTraits<DemuxerStream::VIDEO>::DecoderConfigType
DecoderStreamTraits<DemuxerStream::VIDEO>::GetDecoderConfig(
@@ -181,10 +200,25 @@ void DecoderStreamTraits<DemuxerStream::VIDEO>::InitializeDecoder(
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
DCHECK(config.IsValidConfig());
- stats_.video_decoder_info.decoder_name = decoder->GetDisplayName();
- DVLOG(2) << stats_.video_decoder_info.decoder_name;
- decoder->Initialize(config, low_delay, cdm_context, std::move(init_cb),
- output_cb, waiting_cb);
+ stats_.video_decoder_info.decoder_type = VideoDecoderType::kUnknown;
+ DVLOG(2) << decoder->GetDisplayName();
+ // |decoder| is owned by a DecoderSelector and will stay
+ // alive at least until |init_cb| is finished executing.
+ decoder->Initialize(
+ config, low_delay, cdm_context,
+ base::BindOnce(
+ &DecoderStreamTraits<DemuxerStream::VIDEO>::OnDecoderInitialized,
+ weak_this_, base::Unretained(decoder), std::move(init_cb)),
+ output_cb, waiting_cb);
+}
+
+void DecoderStreamTraits<DemuxerStream::VIDEO>::OnDecoderInitialized(
+ DecoderType* decoder,
+ InitCB cb,
+ Status result) {
+ if (result.is_ok())
+ stats_.video_decoder_info.decoder_type = decoder->GetDecoderType();
+ std::move(cb).Run(result);
}
void DecoderStreamTraits<DemuxerStream::VIDEO>::OnStreamReset(
@@ -234,8 +268,8 @@ PostDecodeAction DecoderStreamTraits<DemuxerStream::VIDEO>::OnDecodeDone(
return PostDecodeAction::DELIVER;
// Add a timestamp here to enable buffering delay measurements down the line.
- buffer->metadata()->decode_begin_time = it->second.decode_begin_time;
- buffer->metadata()->decode_end_time = base::TimeTicks::Now();
+ buffer->metadata().decode_begin_time = it->second.decode_begin_time;
+ buffer->metadata().decode_end_time = base::TimeTicks::Now();
auto action = it->second.should_drop ? PostDecodeAction::DROP
: PostDecodeAction::DELIVER;
@@ -243,7 +277,7 @@ PostDecodeAction DecoderStreamTraits<DemuxerStream::VIDEO>::OnDecodeDone(
// Provide duration information to help the rendering algorithm on the very
// first and very last frames.
if (it->second.duration != kNoTimestamp)
- buffer->metadata()->frame_duration = it->second.duration;
+ buffer->metadata().frame_duration = it->second.duration;
// We erase from the beginning onward to our target frame since frames should
// be returned in presentation order. It's possible to accumulate entries in
@@ -255,12 +289,12 @@ PostDecodeAction DecoderStreamTraits<DemuxerStream::VIDEO>::OnDecodeDone(
void DecoderStreamTraits<DemuxerStream::VIDEO>::OnOutputReady(
OutputType* buffer) {
- if (!buffer->metadata()->decode_begin_time.has_value())
+ if (!buffer->metadata().decode_begin_time.has_value())
return;
// Tag buffer with elapsed time since creation.
- buffer->metadata()->processing_time =
- base::TimeTicks::Now() - *buffer->metadata()->decode_begin_time;
+ buffer->metadata().processing_time =
+ base::TimeTicks::Now() - *buffer->metadata().decode_begin_time;
}
} // namespace media
diff --git a/chromium/media/filters/decoder_stream_traits.h b/chromium/media/filters/decoder_stream_traits.h
index 66fe7cd85e9..08aa2112881 100644
--- a/chromium/media/filters/decoder_stream_traits.h
+++ b/chromium/media/filters/decoder_stream_traits.h
@@ -65,6 +65,7 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::AUDIO> {
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb);
+ void OnDecoderInitialized(DecoderType* decoder, InitCB cb, Status status);
DecoderConfigType GetDecoderConfig(DemuxerStream* stream);
void OnDecode(const DecoderBuffer& buffer);
PostDecodeAction OnDecodeDone(OutputType* buffer);
@@ -84,6 +85,10 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::AUDIO> {
ChannelLayout initial_hw_layout_;
PipelineStatistics stats_;
AudioDecoderConfig config_;
+
+ base::WeakPtr<DecoderStreamTraits<DemuxerStream::AUDIO>> weak_this_;
+ base::WeakPtrFactory<DecoderStreamTraits<DemuxerStream::AUDIO>> weak_factory_{
+ this};
};
template <>
@@ -118,6 +123,7 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::VIDEO> {
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb);
+ void OnDecoderInitialized(DecoderType* decoder, InitCB cb, Status status);
void OnDecode(const DecoderBuffer& buffer);
PostDecodeAction OnDecodeDone(OutputType* buffer);
void OnStreamReset(DemuxerStream* stream);
@@ -136,6 +142,10 @@ class MEDIA_EXPORT DecoderStreamTraits<DemuxerStream::VIDEO> {
base::flat_map<base::TimeDelta, FrameMetadata> frame_metadata_;
PipelineStatistics stats_;
+
+ base::WeakPtr<DecoderStreamTraits<DemuxerStream::VIDEO>> weak_this_;
+ base::WeakPtrFactory<DecoderStreamTraits<DemuxerStream::VIDEO>> weak_factory_{
+ this};
};
} // namespace media
diff --git a/chromium/media/filters/decrypting_audio_decoder.cc b/chromium/media/filters/decrypting_audio_decoder.cc
index e42c00d8c6a..9415f532058 100644
--- a/chromium/media/filters/decrypting_audio_decoder.cc
+++ b/chromium/media/filters/decrypting_audio_decoder.cc
@@ -46,6 +46,10 @@ std::string DecryptingAudioDecoder::GetDisplayName() const {
return "DecryptingAudioDecoder";
}
+AudioDecoderType DecryptingAudioDecoder::GetDecoderType() const {
+ return AudioDecoderType::kDecrypting;
+}
+
void DecryptingAudioDecoder::Initialize(const AudioDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
diff --git a/chromium/media/filters/decrypting_audio_decoder.h b/chromium/media/filters/decrypting_audio_decoder.h
index aff41e2c9b7..9c77c521e1f 100644
--- a/chromium/media/filters/decrypting_audio_decoder.h
+++ b/chromium/media/filters/decrypting_audio_decoder.h
@@ -42,6 +42,7 @@ class MEDIA_EXPORT DecryptingAudioDecoder : public AudioDecoder {
// Decoder implementation
bool SupportsDecryption() const override;
+ AudioDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
// AudioDecoder implementation.
diff --git a/chromium/media/filters/decrypting_audio_decoder_unittest.cc b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
index b88ec863f1f..c796c8f302d 100644
--- a/chromium/media/filters/decrypting_audio_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
@@ -12,6 +12,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/test/gmock_callback_support.h"
+#include "base/test/gmock_move_support.h"
#include "base/test/task_environment.h"
#include "media/base/audio_buffer.h"
#include "media/base/decoder_buffer.h"
@@ -23,12 +24,10 @@
#include "media/filters/decrypting_audio_decoder.h"
#include "testing/gmock/include/gmock/gmock.h"
-using ::base::test::RunCallback;
using ::base::test::RunOnceCallback;
using ::testing::_;
using ::testing::AtMost;
using ::testing::Return;
-using ::testing::SaveArg;
using ::testing::StrictMock;
namespace media {
@@ -148,20 +147,21 @@ class DecryptingAudioDecoderTest : public testing::Test {
// Helper function to simulate the decrypting and decoding process in the
// |decryptor_| with a decoding delay of kDecodingDelay buffers.
void DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const Decryptor::AudioDecodeCB& audio_decode_cb) {
+ Decryptor::AudioDecodeCB audio_decode_cb) {
num_decrypt_and_decode_calls_++;
if (!encrypted->end_of_stream())
num_frames_in_decryptor_++;
if (num_decrypt_and_decode_calls_ <= kDecodingDelay ||
num_frames_in_decryptor_ == 0) {
- audio_decode_cb.Run(Decryptor::kNeedMoreData, Decryptor::AudioFrames());
+ std::move(audio_decode_cb)
+ .Run(Decryptor::kNeedMoreData, Decryptor::AudioFrames());
return;
}
num_frames_in_decryptor_--;
- audio_decode_cb.Run(Decryptor::kSuccess,
- Decryptor::AudioFrames(1, decoded_frame_));
+ std::move(audio_decode_cb)
+ .Run(Decryptor::kSuccess, Decryptor::AudioFrames(1, decoded_frame_));
}
// Sets up expectations and actions to put DecryptingAudioDecoder in an
@@ -189,7 +189,7 @@ class DecryptingAudioDecoderTest : public testing::Test {
void EnterPendingDecodeState() {
EXPECT_TRUE(!pending_audio_decode_cb_);
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(encrypted_buffer_, _))
- .WillOnce(SaveArg<1>(&pending_audio_decode_cb_));
+ .WillOnce(MoveArg<1>(&pending_audio_decode_cb_));
decoder_->Decode(encrypted_buffer_,
base::BindOnce(&DecryptingAudioDecoderTest::DecodeDone,
@@ -203,7 +203,7 @@ class DecryptingAudioDecoderTest : public testing::Test {
void EnterWaitingForKeyState() {
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(encrypted_buffer_, _))
.WillRepeatedly(
- RunCallback<1>(Decryptor::kNoKey, Decryptor::AudioFrames()));
+ RunOnceCallback<1>(Decryptor::kNoKey, Decryptor::AudioFrames()));
EXPECT_CALL(*this, OnWaiting(WaitingReason::kNoDecryptionKey));
decoder_->Decode(encrypted_buffer_,
base::BindOnce(&DecryptingAudioDecoderTest::DecodeDone,
@@ -326,7 +326,7 @@ TEST_F(DecryptingAudioDecoderTest, DecryptAndDecode_DecodeError) {
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(_, _))
.WillRepeatedly(
- RunCallback<1>(Decryptor::kError, Decryptor::AudioFrames()));
+ RunOnceCallback<1>(Decryptor::kError, Decryptor::AudioFrames()));
DecodeAndExpect(encrypted_buffer_, DecodeStatus::DECODE_ERROR);
}
@@ -347,7 +347,7 @@ TEST_F(DecryptingAudioDecoderTest, DecryptAndDecode_MultipleFrames) {
decoded_frame_list_.push_back(frame_b);
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(_, _))
- .WillOnce(RunCallback<1>(Decryptor::kSuccess, decoded_frame_list_));
+ .WillOnce(RunOnceCallback<1>(Decryptor::kSuccess, decoded_frame_list_));
EXPECT_CALL(*this, FrameReady(decoded_frame_));
EXPECT_CALL(*this, FrameReady(frame_a));
@@ -413,7 +413,8 @@ TEST_F(DecryptingAudioDecoderTest, KeyAdded_DuringWaitingForKey) {
EnterWaitingForKeyState();
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kSuccess, decoded_frame_list_));
+ .WillRepeatedly(
+ RunOnceCallback<1>(Decryptor::kSuccess, decoded_frame_list_));
EXPECT_CALL(*this, FrameReady(decoded_frame_));
EXPECT_CALL(*this, DecodeDone(IsOkStatus()));
event_cb_.Run(CdmContext::Event::kHasAdditionalUsableKey);
@@ -427,7 +428,8 @@ TEST_F(DecryptingAudioDecoderTest, KeyAdded_DruingPendingDecode) {
EnterPendingDecodeState();
EXPECT_CALL(*decryptor_, DecryptAndDecodeAudio(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kSuccess, decoded_frame_list_));
+ .WillRepeatedly(
+ RunOnceCallback<1>(Decryptor::kSuccess, decoded_frame_list_));
EXPECT_CALL(*this, FrameReady(decoded_frame_));
EXPECT_CALL(*this, DecodeDone(IsOkStatus()));
// The audio decode callback is returned after the correct decryption key is
diff --git a/chromium/media/filters/decrypting_video_decoder.cc b/chromium/media/filters/decrypting_video_decoder.cc
index 43601b56c5d..548d9ba61b4 100644
--- a/chromium/media/filters/decrypting_video_decoder.cc
+++ b/chromium/media/filters/decrypting_video_decoder.cc
@@ -28,6 +28,10 @@ DecryptingVideoDecoder::DecryptingVideoDecoder(
DETACH_FROM_SEQUENCE(sequence_checker_);
}
+VideoDecoderType DecryptingVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kDecrypting;
+}
+
std::string DecryptingVideoDecoder::GetDisplayName() const {
return kDecoderName;
}
@@ -298,7 +302,7 @@ void DecryptingVideoDecoder::DeliverFrame(Decryptor::Status status,
CHECK(frame);
// Frame returned with kSuccess should not be an end-of-stream frame.
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
// If color space is not set, use the color space in the |config_|.
if (!frame->ColorSpace().IsValid()) {
diff --git a/chromium/media/filters/decrypting_video_decoder.h b/chromium/media/filters/decrypting_video_decoder.h
index 6f7c92402ec..e8715db88be 100644
--- a/chromium/media/filters/decrypting_video_decoder.h
+++ b/chromium/media/filters/decrypting_video_decoder.h
@@ -41,6 +41,7 @@ class MEDIA_EXPORT DecryptingVideoDecoder : public VideoDecoder {
bool SupportsDecryption() const override;
// VideoDecoder implementation.
+ VideoDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
diff --git a/chromium/media/filters/decrypting_video_decoder_unittest.cc b/chromium/media/filters/decrypting_video_decoder_unittest.cc
index 525fdb247f5..82cc59ed390 100644
--- a/chromium/media/filters/decrypting_video_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_video_decoder_unittest.cc
@@ -12,6 +12,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/test/gmock_callback_support.h"
+#include "base/test/gmock_move_support.h"
#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
@@ -22,12 +23,10 @@
#include "media/filters/decrypting_video_decoder.h"
#include "testing/gmock/include/gmock/gmock.h"
-using ::base::test::RunCallback;
using ::base::test::RunOnceCallback;
using ::testing::_;
using ::testing::Invoke;
using ::testing::Return;
-using ::testing::SaveArg;
using ::testing::StrictMock;
using ::testing::WithArg;
@@ -135,20 +134,20 @@ class DecryptingVideoDecoderTest : public testing::Test {
// Helper function to simulate the decrypting and decoding process in the
// |decryptor_| with a decoding delay of kDecodingDelay buffers.
void DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const Decryptor::VideoDecodeCB& video_decode_cb) {
+ Decryptor::VideoDecodeCB video_decode_cb) {
num_decrypt_and_decode_calls_++;
if (!encrypted->end_of_stream())
num_frames_in_decryptor_++;
if (num_decrypt_and_decode_calls_ <= kDecodingDelay ||
num_frames_in_decryptor_ == 0) {
- video_decode_cb.Run(Decryptor::kNeedMoreData,
- scoped_refptr<VideoFrame>());
+ std::move(video_decode_cb)
+ .Run(Decryptor::kNeedMoreData, scoped_refptr<VideoFrame>());
return;
}
num_frames_in_decryptor_--;
- video_decode_cb.Run(Decryptor::kSuccess, decoded_video_frame_);
+ std::move(video_decode_cb).Run(Decryptor::kSuccess, decoded_video_frame_);
}
// Sets up expectations and actions to put DecryptingVideoDecoder in an
@@ -176,7 +175,7 @@ class DecryptingVideoDecoderTest : public testing::Test {
void EnterPendingDecodeState() {
EXPECT_TRUE(!pending_video_decode_cb_);
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(encrypted_buffer_, _))
- .WillOnce(SaveArg<1>(&pending_video_decode_cb_));
+ .WillOnce(MoveArg<1>(&pending_video_decode_cb_));
decoder_->Decode(encrypted_buffer_,
base::BindOnce(&DecryptingVideoDecoderTest::DecodeDone,
@@ -189,7 +188,8 @@ class DecryptingVideoDecoderTest : public testing::Test {
void EnterWaitingForKeyState() {
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kNoKey, null_video_frame_));
+ .WillRepeatedly(
+ RunOnceCallback<1>(Decryptor::kNoKey, null_video_frame_));
EXPECT_CALL(*this, OnWaiting(WaitingReason::kNoDecryptionKey));
decoder_->Decode(encrypted_buffer_,
base::BindOnce(&DecryptingVideoDecoderTest::DecodeDone,
@@ -319,8 +319,8 @@ TEST_F(DecryptingVideoDecoderTest, DecryptAndDecode_DecodeError) {
Initialize();
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
- .WillRepeatedly(RunCallback<1>(Decryptor::kError,
- scoped_refptr<VideoFrame>(nullptr)));
+ .WillRepeatedly(RunOnceCallback<1>(Decryptor::kError,
+ scoped_refptr<VideoFrame>(nullptr)));
DecodeAndExpectError(encrypted_buffer_);
@@ -343,7 +343,7 @@ TEST_F(DecryptingVideoDecoderTest, KeyAdded_DuringWaitingForKey) {
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
.WillRepeatedly(
- RunCallback<1>(Decryptor::kSuccess, decoded_video_frame_));
+ RunOnceCallback<1>(Decryptor::kSuccess, decoded_video_frame_));
EXPECT_CALL(*this, FrameReady(decoded_video_frame_));
EXPECT_CALL(*this, DecodeDone(IsOkStatus()));
event_cb_.Run(CdmContext::Event::kHasAdditionalUsableKey);
@@ -358,7 +358,7 @@ TEST_F(DecryptingVideoDecoderTest, KeyAdded_DuringPendingDecode) {
EXPECT_CALL(*decryptor_, DecryptAndDecodeVideo(_, _))
.WillRepeatedly(
- RunCallback<1>(Decryptor::kSuccess, decoded_video_frame_));
+ RunOnceCallback<1>(Decryptor::kSuccess, decoded_video_frame_));
EXPECT_CALL(*this, FrameReady(decoded_video_frame_));
EXPECT_CALL(*this, DecodeDone(IsOkStatus()));
// The video decode callback is returned after the correct decryption key is
diff --git a/chromium/media/filters/demuxer_perftest.cc b/chromium/media/filters/demuxer_perftest.cc
index d444f4d06f9..3bb2619c3e8 100644
--- a/chromium/media/filters/demuxer_perftest.cc
+++ b/chromium/media/filters/demuxer_perftest.cc
@@ -45,10 +45,10 @@ class DemuxerHostImpl : public media::DemuxerHost {
DISALLOW_COPY_AND_ASSIGN(DemuxerHostImpl);
};
-static void QuitLoopWithStatus(base::Closure quit_cb,
+static void QuitLoopWithStatus(base::OnceClosure quit_cb,
media::PipelineStatus status) {
CHECK_EQ(status, media::PIPELINE_OK);
- quit_cb.Run();
+ std::move(quit_cb).Run();
}
static void OnEncryptedMediaInitData(EmeInitDataType init_data_type,
@@ -81,7 +81,7 @@ class StreamReader {
private:
void OnReadDone(scoped_refptr<base::SingleThreadTaskRunner> task_runner,
- const base::Closure& quit_when_idle_closure,
+ base::OnceClosure quit_when_idle_closure,
bool* end_of_stream,
base::TimeDelta* timestamp,
media::DemuxerStream::Status status,
@@ -139,7 +139,7 @@ bool StreamReader::IsDone() {
void StreamReader::OnReadDone(
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
- const base::Closure& quit_when_idle_closure,
+ base::OnceClosure quit_when_idle_closure,
bool* end_of_stream,
base::TimeDelta* timestamp,
media::DemuxerStream::Status status,
@@ -148,7 +148,7 @@ void StreamReader::OnReadDone(
CHECK(buffer);
*end_of_stream = buffer->end_of_stream();
*timestamp = *end_of_stream ? media::kNoTimestamp : buffer->timestamp();
- task_runner->PostTask(FROM_HERE, quit_when_idle_closure);
+ task_runner->PostTask(FROM_HERE, std::move(quit_when_idle_closure));
}
int StreamReader::GetNextStreamIndexToRead() {
diff --git a/chromium/media/filters/fake_video_decoder.cc b/chromium/media/filters/fake_video_decoder.cc
index e2f4726f28f..4e8593fd530 100644
--- a/chromium/media/filters/fake_video_decoder.cc
+++ b/chromium/media/filters/fake_video_decoder.cc
@@ -68,6 +68,10 @@ std::string FakeVideoDecoder::GetDisplayName() const {
return decoder_name_;
}
+VideoDecoderType FakeVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kUnknown;
+}
+
void FakeVideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
diff --git a/chromium/media/filters/fake_video_decoder.h b/chromium/media/filters/fake_video_decoder.h
index 898f4dab778..45e920a9496 100644
--- a/chromium/media/filters/fake_video_decoder.h
+++ b/chromium/media/filters/fake_video_decoder.h
@@ -49,6 +49,7 @@ class FakeVideoDecoder : public VideoDecoder {
bool SupportsDecryption() const override;
bool IsPlatformDecoder() const override;
std::string GetDisplayName() const override;
+ VideoDecoderType GetDecoderType() const override;
// VideoDecoder implementation
void Initialize(const VideoDecoderConfig& config,
diff --git a/chromium/media/filters/fake_video_decoder_unittest.cc b/chromium/media/filters/fake_video_decoder_unittest.cc
index 49130158ce1..58baa499a53 100644
--- a/chromium/media/filters/fake_video_decoder_unittest.cc
+++ b/chromium/media/filters/fake_video_decoder_unittest.cc
@@ -91,7 +91,7 @@ class FakeVideoDecoderTest
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
last_decoded_frame_ = std::move(frame);
num_decoded_frames_++;
}
diff --git a/chromium/media/filters/ffmpeg_audio_decoder.cc b/chromium/media/filters/ffmpeg_audio_decoder.cc
index e5daea40328..7ff037ac847 100644
--- a/chromium/media/filters/ffmpeg_audio_decoder.cc
+++ b/chromium/media/filters/ffmpeg_audio_decoder.cc
@@ -69,6 +69,10 @@ std::string FFmpegAudioDecoder::GetDisplayName() const {
return "FFmpegAudioDecoder";
}
+AudioDecoderType FFmpegAudioDecoder::GetDecoderType() const {
+ return AudioDecoderType::kFFmpeg;
+}
+
void FFmpegAudioDecoder::Initialize(const AudioDecoderConfig& config,
CdmContext* /* cdm_context */,
InitCB init_cb,
diff --git a/chromium/media/filters/ffmpeg_audio_decoder.h b/chromium/media/filters/ffmpeg_audio_decoder.h
index 2111b89a797..5b3d20380e7 100644
--- a/chromium/media/filters/ffmpeg_audio_decoder.h
+++ b/chromium/media/filters/ffmpeg_audio_decoder.h
@@ -40,6 +40,7 @@ class MEDIA_EXPORT FFmpegAudioDecoder : public AudioDecoder {
~FFmpegAudioDecoder() override;
// AudioDecoder implementation.
+ AudioDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
void Initialize(const AudioDecoderConfig& config,
CdmContext* cdm_context,
diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
index fa39c5698ed..5d8134ceaa6 100644
--- a/chromium/media/filters/ffmpeg_demuxer.cc
+++ b/chromium/media/filters/ffmpeg_demuxer.cc
@@ -12,6 +12,7 @@
#include "base/base64.h"
#include "base/bind.h"
#include "base/callback_helpers.h"
+#include "base/feature_list.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_functions.h"
@@ -32,6 +33,7 @@
#include "media/base/decrypt_config.h"
#include "media/base/demuxer_memory_limit.h"
#include "media/base/limits.h"
+#include "media/base/media_switches.h"
#include "media/base/media_tracks.h"
#include "media/base/media_types.h"
#include "media/base/sample_rates.h"
@@ -214,6 +216,17 @@ std::unique_ptr<FFmpegDemuxerStream> FFmpegDemuxerStream::Create(
std::unique_ptr<AudioDecoderConfig> audio_config;
std::unique_ptr<VideoDecoderConfig> video_config;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (base::FeatureList::IsEnabled(kDeprecateLowUsageCodecs)) {
+ const auto codec_id = stream->codecpar->codec_id;
+ if (codec_id == AV_CODEC_ID_AMR_NB || codec_id == AV_CODEC_ID_AMR_WB ||
+ codec_id == AV_CODEC_ID_GSM_MS) {
+ MEDIA_LOG(ERROR, media_log) << "AMR and GSM are deprecated on ChromeOS.";
+ return nullptr;
+ }
+ }
+#endif
+
if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
audio_config = std::make_unique<AudioDecoderConfig>();
diff --git a/chromium/media/filters/ffmpeg_demuxer_unittest.cc b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
index 5bdd717becc..698990ed696 100644
--- a/chromium/media/filters/ffmpeg_demuxer_unittest.cc
+++ b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
@@ -779,7 +779,7 @@ TEST_F(FFmpegDemuxerTest, Read_AudioNegativeStartTimeAndOpusDiscard_Sync) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
TEST_F(FFmpegDemuxerTest, TestAudioNegativeTimestamps) {
// Note: This test will _crash_ the browser if negative timestamp
// values are skipped, since this file is heavily truncated to avoid
@@ -795,7 +795,7 @@ TEST_F(FFmpegDemuxerTest, TestAudioNegativeTimestamps) {
Read(audio, FROM_HERE, 104, 77619, true);
Read(audio, FROM_HERE, 104, 103492, true);
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// Similar to the test above, but using an opus clip plus h264 b-frames to
// ensure we don't apply chained ogg workarounds to other content.
@@ -1653,10 +1653,10 @@ TEST_F(FFmpegDemuxerTest, Seek_FallbackToDisabledAudioStream) {
}
namespace {
-void QuitLoop(base::Closure quit_closure,
+void QuitLoop(base::OnceClosure quit_closure,
DemuxerStream::Type type,
const std::vector<DemuxerStream*>& streams) {
- quit_closure.Run();
+ std::move(quit_closure).Run();
}
void DisableAndEnableDemuxerTracks(
diff --git a/chromium/media/filters/ffmpeg_glue_unittest.cc b/chromium/media/filters/ffmpeg_glue_unittest.cc
index 53db86c0dc8..e37437d683b 100644
--- a/chromium/media/filters/ffmpeg_glue_unittest.cc
+++ b/chromium/media/filters/ffmpeg_glue_unittest.cc
@@ -316,7 +316,7 @@ TEST_F(FFmpegGlueContainerTest, AAC) {
ExpectContainer(container_names::CONTAINER_AAC);
}
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
TEST_F(FFmpegGlueContainerTest, AVI) {
InitializeAndOpen("bear.avi");
ExpectContainer(container_names::CONTAINER_AVI);
@@ -326,7 +326,7 @@ TEST_F(FFmpegGlueContainerTest, AMR) {
InitializeAndOpen("bear.amr");
ExpectContainer(container_names::CONTAINER_AMR);
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
// Probe something unsupported to ensure we fall back to the our internal guess.
diff --git a/chromium/media/filters/ffmpeg_video_decoder.cc b/chromium/media/filters/ffmpeg_video_decoder.cc
index d703189571a..6e4abe2ae76 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder.cc
@@ -83,6 +83,30 @@ bool FFmpegVideoDecoder::IsCodecSupported(VideoCodec codec) {
return avcodec_find_decoder(VideoCodecToCodecID(codec)) != nullptr;
}
+// static
+SupportedVideoDecoderConfigs FFmpegVideoDecoder::SupportedConfigsForWebRTC() {
+ SupportedVideoDecoderConfigs supported_configs;
+
+ if (IsCodecSupported(kCodecH264)) {
+ supported_configs.emplace_back(/*profile_min=*/H264PROFILE_BASELINE,
+ /*profile_max=*/H264PROFILE_HIGH,
+ /*coded_size_min=*/kDefaultSwDecodeSizeMin,
+ /*coded_size_max=*/kDefaultSwDecodeSizeMax,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false);
+ }
+ if (IsCodecSupported(kCodecVP8)) {
+ supported_configs.emplace_back(/*profile_min=*/VP8PROFILE_ANY,
+ /*profile_max=*/VP8PROFILE_ANY,
+ /*coded_size_min=*/kDefaultSwDecodeSizeMin,
+ /*coded_size_max=*/kDefaultSwDecodeSizeMax,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false);
+ }
+
+ return supported_configs;
+}
+
FFmpegVideoDecoder::FFmpegVideoDecoder(MediaLog* media_log)
: media_log_(media_log), state_(kUninitialized), decode_nalus_(false) {
DVLOG(1) << __func__;
@@ -161,6 +185,13 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
if (codec_context->color_range == AVCOL_RANGE_JPEG) {
video_frame->set_color_space(gfx::ColorSpace::CreateJpeg());
}
+ } else if (codec_context->codec_id == AV_CODEC_ID_H264 &&
+ codec_context->colorspace == AVCOL_SPC_RGB &&
+ format == PIXEL_FORMAT_I420) {
+ // Some H.264 videos contain a VUI that specifies a color matrix of GBR,
+ // when they are actually ordinary YUV. Only 4:2:0 formats are checked,
+ // because GBR is reasonable for 4:4:4 content. See crbug.com/1067377.
+ video_frame->set_color_space(gfx::ColorSpace::CreateREC709());
} else if (codec_context->color_primaries != AVCOL_PRI_UNSPECIFIED ||
codec_context->color_trc != AVCOL_TRC_UNSPECIFIED ||
codec_context->colorspace != AVCOL_SPC_UNSPECIFIED) {
@@ -196,6 +227,10 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
return 0;
}
+VideoDecoderType FFmpegVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kFFmpeg;
+}
+
std::string FFmpegVideoDecoder::GetDisplayName() const {
return "FFmpegVideoDecoder";
}
@@ -359,7 +394,7 @@ bool FFmpegVideoDecoder::OnNewFrame(AVFrame* frame) {
reinterpret_cast<VideoFrame*>(av_buffer_get_opaque(frame->buf[0]));
video_frame->set_timestamp(
base::TimeDelta::FromMicroseconds(frame->reordered_opaque));
- video_frame->metadata()->power_efficient = false;
+ video_frame->metadata().power_efficient = false;
output_cb_.Run(video_frame);
return true;
}
diff --git a/chromium/media/filters/ffmpeg_video_decoder.h b/chromium/media/filters/ffmpeg_video_decoder.h
index 4ea30459b48..de1ff5390d0 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.h
+++ b/chromium/media/filters/ffmpeg_video_decoder.h
@@ -12,6 +12,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/sequence_checker.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame_pool.h"
@@ -29,6 +30,7 @@ class MediaLog;
class MEDIA_EXPORT FFmpegVideoDecoder : public VideoDecoder {
public:
static bool IsCodecSupported(VideoCodec codec);
+ static SupportedVideoDecoderConfigs SupportedConfigsForWebRTC();
explicit FFmpegVideoDecoder(MediaLog* media_log);
~FFmpegVideoDecoder() override;
@@ -38,6 +40,7 @@ class MEDIA_EXPORT FFmpegVideoDecoder : public VideoDecoder {
void set_decode_nalus(bool decode_nalus) { decode_nalus_ = decode_nalus; }
// VideoDecoder implementation.
+ VideoDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
diff --git a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
index 599fb12c7fe..3a07a387ad1 100644
--- a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -197,7 +197,7 @@ class FFmpegVideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
output_frames_.push_back(std::move(frame));
}
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
index d17b79cb657..76149fddb7b 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
@@ -123,7 +123,7 @@ class OutputMailbox {
coded_size, visible_rect, natural_size, timestamp);
// Request a fence we'll wait on before reusing the buffer.
- frame->metadata()->read_lock_fences_enabled = true;
+ frame->metadata().read_lock_fences_enabled = true;
return frame;
}
@@ -134,7 +134,7 @@ class OutputMailbox {
// The mailbox is referenced by a VideoFrame. It will be deleted as soon
// as the frame is destroyed.
DCHECK(reuse_callback_);
- reuse_callback_ = base::Closure();
+ reuse_callback_ = base::OnceClosure();
} else {
delete this;
}
@@ -197,6 +197,7 @@ class FuchsiaVideoDecoder : public VideoDecoder,
bool IsPlatformDecoder() const override;
bool SupportsDecryption() const override;
std::string GetDisplayName() const override;
+ VideoDecoderType GetDecoderType() const override;
// VideoDecoder implementation.
void Initialize(const VideoDecoderConfig& config,
@@ -334,6 +335,7 @@ FuchsiaVideoDecoder::FuchsiaVideoDecoder(
enable_sw_decoding_(enable_sw_decoding),
use_overlays_for_video_(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseOverlaysForVideo)),
+ sysmem_allocator_("CrFuchsiaVideoDecoder"),
client_native_pixmap_factory_(ui::CreateClientNativePixmapFactoryOzone()),
weak_factory_(this) {
DCHECK(raster_context_provider_);
@@ -361,6 +363,10 @@ std::string FuchsiaVideoDecoder::GetDisplayName() const {
return "FuchsiaVideoDecoder";
}
+VideoDecoderType FuchsiaVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kFuchsia;
+}
+
void FuchsiaVideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
@@ -684,8 +690,9 @@ void FuchsiaVideoDecoder::SendInputPacket(
DCHECK(in_flight_input_packets_.find(packet.buffer_index()) ==
in_flight_input_packets_.end());
+ const size_t buffer_index = packet.buffer_index();
in_flight_input_packets_.insert_or_assign(
- packet.buffer_index(), InputDecoderPacket{std::move(packet)});
+ buffer_index, InputDecoderPacket{std::move(packet)});
}
void FuchsiaVideoDecoder::ProcessEndOfStream() {
@@ -765,14 +772,18 @@ void FuchsiaVideoDecoder::OnOutputConstraints(
fuchsia::sysmem::BufferCollectionTokenPtr collection_token;
sysmem_allocator_.raw()->AllocateSharedCollection(
collection_token.NewRequest());
+ collection_token->SetName(100u, "ChromiumVideoDecoderOutput");
+ collection_token->SetDebugClientInfo("chromium_video_decoder", 0u);
// Create sysmem tokens for the gpu process and the codec.
fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_codec;
collection_token->Duplicate(ZX_RIGHT_SAME_RIGHTS,
collection_token_for_codec.NewRequest());
+ collection_token_for_codec->SetDebugClientInfo("codec", 0u);
fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_gpu;
collection_token->Duplicate(ZX_RIGHT_SAME_RIGHTS,
collection_token_for_gpu.NewRequest());
+ collection_token_for_gpu->SetDebugClientInfo("chromium_gpu", 0u);
// Convert the token to a BufferCollection connection.
sysmem_allocator_.raw()->BindSharedCollection(
@@ -944,11 +955,11 @@ void FuchsiaVideoDecoder::OnOutputPacket(fuchsia::media::Packet output_packet,
// codec may still decode on hardware even when |enable_sw_decoding_| is set
// (i.e. power_efficient flag would not be set correctly in that case). It
// doesn't matter because software decoders can be enabled only for tests.
- frame->metadata()->power_efficient = !enable_sw_decoding_;
+ frame->metadata().power_efficient = !enable_sw_decoding_;
// Allow this video frame to be promoted as an overlay, because it was
// registered with an ImagePipe.
- frame->metadata()->allow_overlay = use_overlays_for_video_;
+ frame->metadata().allow_overlay = use_overlays_for_video_;
output_cb_.Run(std::move(frame));
}
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
index 5e4c36998e3..2b319eae494 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
@@ -13,6 +13,7 @@
#include "base/containers/flat_set.h"
#include "base/fuchsia/fuchsia_logging.h"
#include "base/fuchsia/process_context.h"
+#include "base/process/process_handle.h"
#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "components/viz/common/gpu/raster_context_provider.h"
@@ -24,6 +25,7 @@
#include "media/base/video_decoder.h"
#include "media/base/video_frame.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/gpu_fence.h"
#include "ui/gfx/gpu_memory_buffer.h"
namespace media {
@@ -40,6 +42,8 @@ class TestBufferCollection {
ZX_LOG(FATAL, status)
<< "The fuchsia.sysmem.Allocator channel was terminated.";
});
+ sysmem_allocator_->SetDebugClientInfo("CrTestBufferCollection",
+ base::GetCurrentProcId());
sysmem_allocator_->BindSharedCollection(
fidl::InterfaceHandle<fuchsia::sysmem::BufferCollectionToken>(
@@ -212,8 +216,8 @@ class TestRasterContextProvider
TestRasterContextProvider(TestRasterContextProvider&) = delete;
TestRasterContextProvider& operator=(TestRasterContextProvider&) = delete;
- void SetOnDestroyedClosure(base::Closure on_destroyed) {
- on_destroyed_ = on_destroyed;
+ void SetOnDestroyedClosure(base::OnceClosure on_destroyed) {
+ on_destroyed_ = std::move(on_destroyed);
}
// viz::RasterContextProvider implementation;
@@ -277,7 +281,7 @@ class TestRasterContextProvider
TestSharedImageInterface shared_image_interface_;
viz::TestContextSupport gpu_context_support_;
- base::Closure on_destroyed_;
+ base::OnceClosure on_destroyed_;
};
} // namespace
diff --git a/chromium/media/filters/gav1_video_decoder.cc b/chromium/media/filters/gav1_video_decoder.cc
index 8e0e69c3aac..729c562c123 100644
--- a/chromium/media/filters/gav1_video_decoder.cc
+++ b/chromium/media/filters/gav1_video_decoder.cc
@@ -224,13 +224,23 @@ scoped_refptr<VideoFrame> FormatVideoFrame(
color_space = container_color_space;
frame->set_color_space(color_space.ToGfxColorSpace());
- frame->metadata()->power_efficient = false;
+ frame->metadata().power_efficient = false;
return frame;
}
} // namespace
+// static
+SupportedVideoDecoderConfigs Gav1VideoDecoder::SupportedConfigs() {
+ return {{/*profile_min=*/AV1PROFILE_PROFILE_MAIN,
+ /*profile_max=*/AV1PROFILE_PROFILE_HIGH,
+ /*coded_size_min=*/kDefaultSwDecodeSizeMin,
+ /*coded_size_max=*/kDefaultSwDecodeSizeMax,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false}};
+}
+
Gav1VideoDecoder::Gav1VideoDecoder(MediaLog* media_log,
OffloadState offload_state)
: media_log_(media_log),
@@ -247,6 +257,10 @@ std::string Gav1VideoDecoder::GetDisplayName() const {
return "Gav1VideoDecoder";
}
+VideoDecoderType Gav1VideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kGav1;
+}
+
void Gav1VideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* /* cdm_context */,
diff --git a/chromium/media/filters/gav1_video_decoder.h b/chromium/media/filters/gav1_video_decoder.h
index 22cd0053923..b4e78e07f04 100644
--- a/chromium/media/filters/gav1_video_decoder.h
+++ b/chromium/media/filters/gav1_video_decoder.h
@@ -15,6 +15,7 @@
#include "base/memory/scoped_refptr.h"
#include "base/sequence_checker.h"
#include "media/base/media_export.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame_pool.h"
#include "media/filters/offloading_video_decoder.h"
@@ -28,6 +29,8 @@ class MediaLog;
class MEDIA_EXPORT Gav1VideoDecoder : public OffloadableVideoDecoder {
public:
+ static SupportedVideoDecoderConfigs SupportedConfigs();
+
explicit Gav1VideoDecoder(MediaLog* media_log,
OffloadState offload_state = OffloadState::kNormal);
~Gav1VideoDecoder() override;
@@ -36,6 +39,7 @@ class MEDIA_EXPORT Gav1VideoDecoder : public OffloadableVideoDecoder {
// VideoDecoder implementation.
std::string GetDisplayName() const override;
+ VideoDecoderType GetDecoderType() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
diff --git a/chromium/media/filters/gav1_video_decoder_unittest.cc b/chromium/media/filters/gav1_video_decoder_unittest.cc
index 4169f610d4f..33ccf13e2c4 100644
--- a/chromium/media/filters/gav1_video_decoder_unittest.cc
+++ b/chromium/media/filters/gav1_video_decoder_unittest.cc
@@ -194,7 +194,7 @@ class Gav1VideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
output_frames_.push_back(std::move(frame));
}
diff --git a/chromium/media/filters/memory_data_source.h b/chromium/media/filters/memory_data_source.h
index 716cb776157..d78dee1530a 100644
--- a/chromium/media/filters/memory_data_source.h
+++ b/chromium/media/filters/memory_data_source.h
@@ -41,7 +41,10 @@ class MEDIA_EXPORT MemoryDataSource final : public DataSource {
const uint8_t* data_ = nullptr;
const size_t size_ = 0;
- bool is_stopped_ = false;
+ // Stop may be called from the render thread while this class is being used by
+ // the media thread. It's harmless if we fulfill a read after Stop() has been
+ // called, so an atomic without a lock is safe.
+ std::atomic<bool> is_stopped_{false};
DISALLOW_COPY_AND_ASSIGN(MemoryDataSource);
};
diff --git a/chromium/media/filters/offloading_video_decoder.cc b/chromium/media/filters/offloading_video_decoder.cc
index 242190f8009..c5379ebf0ec 100644
--- a/chromium/media/filters/offloading_video_decoder.cc
+++ b/chromium/media/filters/offloading_video_decoder.cc
@@ -75,6 +75,11 @@ OffloadingVideoDecoder::~OffloadingVideoDecoder() {
offload_task_runner_->DeleteSoon(FROM_HERE, std::move(helper_));
}
+VideoDecoderType OffloadingVideoDecoder::GetDecoderType() const {
+ // This call is expected to be static and safe to call from any thread.
+ return helper_->decoder()->GetDecoderType();
+}
+
std::string OffloadingVideoDecoder::GetDisplayName() const {
// This call is expected to be static and safe to call from any thread.
return helper_->decoder()->GetDisplayName();
diff --git a/chromium/media/filters/offloading_video_decoder.h b/chromium/media/filters/offloading_video_decoder.h
index 0ff14bb3d45..5caf47895f8 100644
--- a/chromium/media/filters/offloading_video_decoder.h
+++ b/chromium/media/filters/offloading_video_decoder.h
@@ -88,6 +88,7 @@ class MEDIA_EXPORT OffloadingVideoDecoder : public VideoDecoder {
~OffloadingVideoDecoder() override;
// VideoDecoder implementation.
+ VideoDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
diff --git a/chromium/media/filters/offloading_video_decoder_unittest.cc b/chromium/media/filters/offloading_video_decoder_unittest.cc
index 29813ad3120..b6d2abf4971 100644
--- a/chromium/media/filters/offloading_video_decoder_unittest.cc
+++ b/chromium/media/filters/offloading_video_decoder_unittest.cc
@@ -39,6 +39,11 @@ class MockOffloadableVideoDecoder : public OffloadableVideoDecoder {
std::string GetDisplayName() const override {
return "MockOffloadableVideoDecoder";
}
+
+ VideoDecoderType GetDecoderType() const override {
+ return VideoDecoderType::kUnknown;
+ }
+
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
diff --git a/chromium/media/filters/pipeline_controller.cc b/chromium/media/filters/pipeline_controller.cc
index 97e430651a7..68b39ebe31a 100644
--- a/chromium/media/filters/pipeline_controller.cc
+++ b/chromium/media/filters/pipeline_controller.cc
@@ -393,6 +393,10 @@ void PipelineController::SetPreservesPitch(bool preserves_pitch) {
pipeline_->SetPreservesPitch(preserves_pitch);
}
+void PipelineController::SetAutoplayInitiated(bool autoplay_initiated) {
+ pipeline_->SetAutoplayInitiated(autoplay_initiated);
+}
+
base::TimeDelta PipelineController::GetMediaTime() const {
return pipeline_->GetMediaTime();
}
diff --git a/chromium/media/filters/pipeline_controller.h b/chromium/media/filters/pipeline_controller.h
index 08db9dcf214..a77ab00085b 100644
--- a/chromium/media/filters/pipeline_controller.h
+++ b/chromium/media/filters/pipeline_controller.h
@@ -133,6 +133,7 @@ class MEDIA_EXPORT PipelineController {
void SetVolume(float volume);
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint);
void SetPreservesPitch(bool preserves_pitch);
+ void SetAutoplayInitiated(bool autoplay_initiated);
base::TimeDelta GetMediaTime() const;
Ranges<base::TimeDelta> GetBufferedTimeRanges() const;
base::TimeDelta GetMediaDuration() const;
diff --git a/chromium/media/filters/pipeline_controller_unittest.cc b/chromium/media/filters/pipeline_controller_unittest.cc
index 0bced42367d..390dbc69409 100644
--- a/chromium/media/filters/pipeline_controller_unittest.cc
+++ b/chromium/media/filters/pipeline_controller_unittest.cc
@@ -152,8 +152,8 @@ class PipelineControllerTest : public ::testing::Test, public Pipeline::Client {
void OnVideoOpacityChange(bool opaque) override {}
void OnVideoFrameRateChange(base::Optional<int>) override {}
void OnVideoAverageKeyframeDistanceUpdate() override {}
- void OnAudioDecoderChange(const PipelineDecoderInfo& info) override {}
- void OnVideoDecoderChange(const PipelineDecoderInfo& info) override {}
+ void OnAudioDecoderChange(const AudioDecoderInfo& info) override {}
+ void OnVideoDecoderChange(const VideoDecoderInfo& info) override {}
base::test::SingleThreadTaskEnvironment task_environment_;
diff --git a/chromium/media/filters/source_buffer_state.cc b/chromium/media/filters/source_buffer_state.cc
index 9ce7a5934b6..3b4c9e16f79 100644
--- a/chromium/media/filters/source_buffer_state.cc
+++ b/chromium/media/filters/source_buffer_state.cc
@@ -16,6 +16,7 @@
#include "media/filters/chunk_demuxer.h"
#include "media/filters/frame_processor.h"
#include "media/filters/source_buffer_stream.h"
+#include "media/media_buildflags.h"
namespace media {
@@ -214,8 +215,8 @@ bool SourceBufferState::Append(const uint8_t* data,
append_window_end_during_append_ = append_window_end;
timestamp_offset_during_append_ = timestamp_offset;
- // TODO(wolenetz/acolwell): Curry and pass a NewBuffersCB here bound with
- // append window and timestamp offset pointer. See http://crbug.com/351454.
+ // TODO(wolenetz): Curry and pass a NewBuffersCB here bound with append window
+ // and timestamp offset pointer. See http://crbug.com/351454.
bool result = stream_parser_->Parse(data, length);
if (!result) {
MEDIA_LOG(ERROR, media_log_)
@@ -229,6 +230,31 @@ bool SourceBufferState::Append(const uint8_t* data,
return result;
}
+bool SourceBufferState::AppendChunks(
+ std::unique_ptr<StreamParser::BufferQueue> buffer_queue,
+ TimeDelta append_window_start,
+ TimeDelta append_window_end,
+ TimeDelta* timestamp_offset) {
+ append_in_progress_ = true;
+ DCHECK(timestamp_offset);
+ DCHECK(!timestamp_offset_during_append_);
+ append_window_start_during_append_ = append_window_start;
+ append_window_end_during_append_ = append_window_end;
+ timestamp_offset_during_append_ = timestamp_offset;
+
+ // TODO(wolenetz): Curry and pass a NewBuffersCB here bound with append window
+ // and timestamp offset pointer. See http://crbug.com/351454.
+ bool result = stream_parser_->ProcessChunks(std::move(buffer_queue));
+ if (!result) {
+ MEDIA_LOG(ERROR, media_log_)
+ << __func__ << ": Processing encoded chunks for buffering failed.";
+ }
+
+ timestamp_offset_during_append_ = nullptr;
+ append_in_progress_ = false;
+ return result;
+}
+
void SourceBufferState::ResetParserState(TimeDelta append_window_start,
TimeDelta append_window_end,
base::TimeDelta* timestamp_offset) {
@@ -689,6 +715,31 @@ bool SourceBufferState::OnNewConfigs(
<< " config: " << video_config.AsHumanReadableString();
DCHECK(video_config.IsValidConfig());
+ if (video_config.codec() == kCodecHEVC) {
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+#if BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+ // On ChromeOS, HEVC is only supported through EME, so require the
+ // config to be for an encrypted track if on ChromeOS. Even so,
+ // conditionally allow clear HEVC on ChromeOS if cmdline has test
+ // override.
+ if (video_config.encryption_scheme() ==
+ EncryptionScheme::kUnencrypted &&
+ !base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableClearHevcForTesting)) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "MSE playback of HEVC on ChromeOS is only supported via "
+ "platform decryptor, but the provided HEVC track is not "
+ "encrypted.";
+ return false;
+ }
+#endif // BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+#else
+ NOTREACHED()
+ << "MSE parser must not emit HEVC tracks on build configurations "
+ "that do not support HEVC playback via platform.";
+#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
+ }
+
const auto& it = std::find(expected_vcodecs.begin(),
expected_vcodecs.end(), video_config.codec());
if (it == expected_vcodecs.end()) {
diff --git a/chromium/media/filters/source_buffer_state.h b/chromium/media/filters/source_buffer_state.h
index ae4c76abbc2..fdf6be2f9c8 100644
--- a/chromium/media/filters/source_buffer_state.h
+++ b/chromium/media/filters/source_buffer_state.h
@@ -62,11 +62,16 @@ class MEDIA_EXPORT SourceBufferState {
// append. |append_window_start| and |append_window_end| correspond to the MSE
// spec's similarly named source buffer attributes that are used in coded
// frame processing.
+ // AppendChunks appends the provided BufferQueue.
bool Append(const uint8_t* data,
size_t length,
TimeDelta append_window_start,
TimeDelta append_window_end,
TimeDelta* timestamp_offset);
+ bool AppendChunks(std::unique_ptr<StreamParser::BufferQueue> buffer_queue,
+ TimeDelta append_window_start,
+ TimeDelta append_window_end,
+ TimeDelta* timestamp_offset);
// Aborts the current append sequence and resets the parser.
void ResetParserState(TimeDelta append_window_start,
diff --git a/chromium/media/filters/stream_parser_factory.cc b/chromium/media/filters/stream_parser_factory.cc
index 126c1814a16..29fb715ec7e 100644
--- a/chromium/media/filters/stream_parser_factory.cc
+++ b/chromium/media/filters/stream_parser_factory.cc
@@ -14,13 +14,16 @@
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
#include "build/build_config.h"
+#include "media/base/audio_decoder_config.h"
#include "media/base/media.h"
#include "media/base/media_switches.h"
#include "media/base/media_util.h"
#include "media/base/video_codecs.h"
+#include "media/base/video_decoder_config.h"
#include "media/formats/mp4/mp4_stream_parser.h"
#include "media/formats/mpeg/adts_stream_parser.h"
#include "media/formats/mpeg/mpeg1_audio_stream_parser.h"
+#include "media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h"
#include "media/formats/webm/webm_stream_parser.h"
#include "media/media_buildflags.h"
@@ -517,6 +520,7 @@ static SupportsType CheckTypeAndCodecs(
return IsNotSupported;
}
+// static
SupportsType StreamParserFactory::IsTypeSupported(
const std::string& type,
const std::vector<std::string>& codecs) {
@@ -526,6 +530,7 @@ SupportsType StreamParserFactory::IsTypeSupported(
nullptr);
}
+// static
std::unique_ptr<StreamParser> StreamParserFactory::Create(
const std::string& type,
const std::vector<std::string>& codecs,
@@ -535,18 +540,19 @@ std::unique_ptr<StreamParser> StreamParserFactory::Create(
std::vector<CodecInfo::HistogramTag> audio_codecs;
std::vector<CodecInfo::HistogramTag> video_codecs;
- if (IsSupported == CheckTypeAndCodecs(type, codecs, media_log,
- &factory_function, &audio_codecs,
- &video_codecs)) {
+ // TODO(crbug.com/535738): Relax the requirement for specific codecs (allow
+ // MayBeSupported here), and relocate the logging to the parser configuration
+ // callback. This creation method is called in AddId(), and also in
+ // CanChangeType() and ChangeType(), so potentially overlogs codecs leading to
+ // disproportion versus actually parsed codec configurations from
+ // initialization segments. For this work and also recording when implicit
+ // codec switching occurs (without explicit ChangeType), see
+ // https://crbug.com/535738.
+ SupportsType supportsType = CheckTypeAndCodecs(
+ type, codecs, media_log, &factory_function, &audio_codecs, &video_codecs);
+
+ if (IsSupported == supportsType) {
// Log the expected codecs.
- // TODO(wolenetz): Relax the requirement for specific codecs (allow
- // MayBeSupported here), and relocate the logging to the parser
- // configuration callback. This creation method is called in AddId(), and
- // also in CanChangeType() and ChangeType(), so potentially overlogs codecs
- // leading to disproportion versus actually parsed codec configurations from
- // initialization segments. For this work and also recording when implicit
- // codec switching occurs (without explicit ChangeType), see
- // https://crbug.com/535738.
for (size_t i = 0; i < audio_codecs.size(); ++i) {
UMA_HISTOGRAM_ENUMERATION("Media.MSE.AudioCodec", audio_codecs[i],
CodecInfo::HISTOGRAM_MAX + 1);
@@ -569,4 +575,28 @@ std::unique_ptr<StreamParser> StreamParserFactory::Create(
return stream_parser;
}
+// static
+std::unique_ptr<StreamParser> StreamParserFactory::Create(
+ std::unique_ptr<AudioDecoderConfig> audio_config) {
+ DCHECK(audio_config);
+
+ // TODO(crbug.com/1144908): Histogram-log the codec used for buffering
+ // WebCodecs in MSE?
+
+ return std::make_unique<media::WebCodecsEncodedChunkStreamParser>(
+ std::move(audio_config));
+}
+
+// static
+std::unique_ptr<StreamParser> StreamParserFactory::Create(
+ std::unique_ptr<VideoDecoderConfig> video_config) {
+ DCHECK(video_config);
+
+ // TODO(crbug.com/1144908): Histogram-log the codec used for buffering
+ // WebCodecs in MSE?
+
+ return std::make_unique<media::WebCodecsEncodedChunkStreamParser>(
+ std::move(video_config));
+}
+
} // namespace media
diff --git a/chromium/media/filters/stream_parser_factory.h b/chromium/media/filters/stream_parser_factory.h
index 50544a5a93e..0f28977ab73 100644
--- a/chromium/media/filters/stream_parser_factory.h
+++ b/chromium/media/filters/stream_parser_factory.h
@@ -15,7 +15,9 @@
namespace media {
+class AudioDecoderConfig;
class StreamParser;
+class VideoDecoderConfig;
class MEDIA_EXPORT StreamParserFactory {
public:
@@ -36,10 +38,28 @@ class MEDIA_EXPORT StreamParserFactory {
// Returns a new StreamParser object if |type| and all codecs listed in
// |codecs| are supported.
// Returns NULL otherwise.
+ // The |audio_config| and |video_config| overloads behave similarly, except
+ // the caller must provide a valid, supported decoder config; those overloads'
+ // usage indicates that we intend to buffer WebCodecs encoded audio or video
+ // chunks with this parser's ProcessChunks() method. Note that
+ // these overloads do not check support, unlike the |type| and |codecs|
+ // version. Support checking for WebCodecs-originated decoder configs could be
+ // async, and should be done by the caller if necessary as part of the decoder
+ // config creation rather than relying upon parser creation to do this
+ // potentially expensive step (this step is typically done in a synchronous
+ // API call by the web app, such as addSourceBuffer().) Like |type| and
+ // |codecs| versions, basic IsValidConfig() is done on configs emitted from
+ // the parser. Failing that catching an unsupported config, eventual pipeline
+ // error should occur for unsupported or invalid decoder configs during
+ // attempted decode.
static std::unique_ptr<StreamParser> Create(
const std::string& type,
const std::vector<std::string>& codecs,
MediaLog* media_log);
+ static std::unique_ptr<StreamParser> Create(
+ std::unique_ptr<AudioDecoderConfig> audio_config);
+ static std::unique_ptr<StreamParser> Create(
+ std::unique_ptr<VideoDecoderConfig> video_config);
};
} // namespace media
diff --git a/chromium/media/filters/video_decoder_stream_unittest.cc b/chromium/media/filters/video_decoder_stream_unittest.cc
index 75d0f304c78..9293b3b772e 100644
--- a/chromium/media/filters/video_decoder_stream_unittest.cc
+++ b/chromium/media/filters/video_decoder_stream_unittest.cc
@@ -11,6 +11,7 @@
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "base/test/gmock_callback_support.h"
+#include "base/test/mock_callback.h"
#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/fake_demuxer_stream.h"
@@ -100,11 +101,10 @@ class VideoDecoderStreamTest
base::BindRepeating(&VideoDecoderStreamTest::CreateVideoDecodersForTest,
base::Unretained(this)),
&media_log_));
- video_decoder_stream_->set_decoder_change_observer_for_testing(
- base::BindRepeating(&VideoDecoderStreamTest::OnDecoderChanged,
- base::Unretained(this)));
+ video_decoder_stream_->set_decoder_change_observer(base::BindRepeating(
+ &VideoDecoderStreamTest::OnDecoderChanged, base::Unretained(this)));
video_decoder_stream_
- ->GetDecoderSelectorForTesting(util::PassKey<VideoDecoderStreamTest>())
+ ->GetDecoderSelectorForTesting(base::PassKey<VideoDecoderStreamTest>())
.OverrideDecoderPriorityCBForTesting(
base::BindRepeating(MockDecoderPriority));
if (GetParam().has_prepare) {
@@ -354,14 +354,15 @@ class VideoDecoderStreamTest
}
// Callback for VideoDecoderStream::Read().
- void FrameReady(VideoDecoderStream::ReadStatus status,
- scoped_refptr<VideoFrame> frame) {
+ void FrameReady(VideoDecoderStream::ReadResult result) {
DCHECK(pending_read_);
+ last_read_status_code_ = result.code();
+ scoped_refptr<VideoFrame> frame = last_read_status_code_ == StatusCode::kOk
+ ? std::move(result).value()
+ : nullptr;
frame_read_ = frame;
- last_read_status_ = status;
- if (frame && !frame->metadata()->end_of_stream) {
- EXPECT_EQ(*frame->metadata()->frame_duration,
- demuxer_stream_->duration());
+ if (frame && !frame->metadata().end_of_stream) {
+ EXPECT_EQ(*frame->metadata().frame_duration, demuxer_stream_->duration());
num_decoded_frames_++;
}
@@ -391,7 +392,7 @@ class VideoDecoderStreamTest
void ReadAllFrames(int expected_decoded_frames) {
do {
ReadOneFrame();
- } while (frame_read_.get() && !frame_read_->metadata()->end_of_stream);
+ } while (frame_read_.get() && !frame_read_->metadata().end_of_stream);
DCHECK_EQ(expected_decoded_frames, num_decoded_frames_);
}
@@ -541,7 +542,7 @@ class VideoDecoderStreamTest
bool pending_stop_;
int num_decoded_bytes_unreported_;
scoped_refptr<VideoFrame> frame_read_;
- VideoDecoderStream::ReadStatus last_read_status_;
+ StatusCode last_read_status_code_;
// Decryptor has no key to decrypt a frame.
bool has_no_key_;
@@ -692,17 +693,17 @@ TEST_P(VideoDecoderStreamTest, Read_ProperMetadata) {
EXPECT_TRUE(frame_read_);
- auto* metadata = frame_read_->metadata();
+ const VideoFrameMetadata& metadata = frame_read_->metadata();
// Verify the decoding metadata is accurate.
- EXPECT_EQ(*metadata->decode_end_time - *metadata->decode_begin_time,
+ EXPECT_EQ(*metadata.decode_end_time - *metadata.decode_begin_time,
kDecodeDelay);
// Verify the processing metadata is accurate.
const base::TimeDelta expected_processing_time =
GetParam().has_prepare ? (kDecodeDelay + kPrepareDelay) : kDecodeDelay;
- EXPECT_EQ(*metadata->processing_time, expected_processing_time);
+ EXPECT_EQ(*metadata.processing_time, expected_processing_time);
}
TEST_P(VideoDecoderStreamTest, Read_BlockedDemuxer) {
@@ -815,11 +816,11 @@ TEST_P(VideoDecoderStreamTest, Read_DuringEndOfStreamDecode) {
decoder_->SatisfySingleDecode();
base::RunLoop().RunUntilIdle();
ASSERT_FALSE(pending_read_);
- EXPECT_EQ(last_read_status_, VideoDecoderStream::OK);
+ EXPECT_EQ(last_read_status_code_, StatusCode::kOk);
// The read output should indicate end of stream.
ASSERT_TRUE(frame_read_.get());
- EXPECT_TRUE(frame_read_->metadata()->end_of_stream);
+ EXPECT_TRUE(frame_read_->metadata().end_of_stream);
}
TEST_P(VideoDecoderStreamTest, Read_DemuxerStreamReadError) {
@@ -838,7 +839,8 @@ TEST_P(VideoDecoderStreamTest, Read_DemuxerStreamReadError) {
base::RunLoop().RunUntilIdle();
ASSERT_FALSE(pending_read_);
- EXPECT_EQ(last_read_status_, VideoDecoderStream::DECODE_ERROR);
+ EXPECT_NE(last_read_status_code_, StatusCode::kOk);
+ EXPECT_NE(last_read_status_code_, StatusCode::kAborted);
}
// No Reset() before initialization is successfully completed.
@@ -1026,7 +1028,7 @@ TEST_P(VideoDecoderStreamTest, FallbackDecoder_DecodeError) {
ASSERT_EQ(GetDecoderName(1), decoder_->GetDisplayName());
ASSERT_FALSE(pending_read_);
- ASSERT_EQ(VideoDecoderStream::OK, last_read_status_);
+ ASSERT_EQ(last_read_status_code_, StatusCode::kOk);
// Check that we fell back to Decoder2.
ASSERT_GT(decoder_->total_bytes_decoded(), 0);
@@ -1066,15 +1068,15 @@ TEST_P(VideoDecoderStreamTest,
// A frame should have been emitted.
EXPECT_FALSE(pending_read_);
- EXPECT_EQ(last_read_status_, VideoDecoderStream::OK);
- EXPECT_FALSE(frame_read_->metadata()->end_of_stream);
+ EXPECT_EQ(last_read_status_code_, StatusCode::kOk);
+ EXPECT_FALSE(frame_read_->metadata().end_of_stream);
EXPECT_GT(decoder_->total_bytes_decoded(), 0);
ReadOneFrame();
EXPECT_FALSE(pending_read_);
EXPECT_EQ(0, video_decoder_stream_->get_fallback_buffers_size_for_testing());
- EXPECT_TRUE(frame_read_->metadata()->end_of_stream);
+ EXPECT_TRUE(frame_read_->metadata().end_of_stream);
}
TEST_P(VideoDecoderStreamTest,
@@ -1140,7 +1142,8 @@ TEST_P(VideoDecoderStreamTest, FallbackDecoder_DecodeErrorRepeated) {
// No decoders left, expect failure.
EXPECT_EQ(decoder_, nullptr);
EXPECT_FALSE(pending_read_);
- EXPECT_EQ(VideoDecoderStream::DECODE_ERROR, last_read_status_);
+ EXPECT_NE(last_read_status_code_, StatusCode::kOk);
+ EXPECT_NE(last_read_status_code_, StatusCode::kAborted);
}
// This tests verifies that we properly fallback to a new decoder if the first
@@ -1161,7 +1164,7 @@ TEST_P(VideoDecoderStreamTest,
// Verify that the first frame was decoded successfully.
EXPECT_FALSE(pending_read_);
EXPECT_GT(decoder_->total_bytes_decoded(), 0);
- EXPECT_EQ(VideoDecoderStream::OK, last_read_status_);
+ EXPECT_EQ(last_read_status_code_, StatusCode::kOk);
// Continue up to the point of reinitialization.
EnterPendingState(DEMUXER_READ_CONFIG_CHANGE);
@@ -1185,7 +1188,7 @@ TEST_P(VideoDecoderStreamTest,
// Verify that fallback happened.
EXPECT_EQ(GetDecoderName(0), decoder_->GetDisplayName());
EXPECT_FALSE(pending_read_);
- EXPECT_EQ(VideoDecoderStream::OK, last_read_status_);
+ EXPECT_EQ(last_read_status_code_, StatusCode::kOk);
EXPECT_GT(decoder_->total_bytes_decoded(), 0);
}
@@ -1222,7 +1225,8 @@ TEST_P(VideoDecoderStreamTest,
// No decoders left.
EXPECT_EQ(decoder_, nullptr);
EXPECT_FALSE(pending_read_);
- EXPECT_EQ(VideoDecoderStream::DECODE_ERROR, last_read_status_);
+ EXPECT_NE(last_read_status_code_, StatusCode::kOk);
+ EXPECT_NE(last_read_status_code_, StatusCode::kAborted);
}
TEST_P(VideoDecoderStreamTest,
@@ -1375,7 +1379,7 @@ TEST_P(VideoDecoderStreamTest, FallbackDecoder_SelectedOnDecodeThenInitErrors) {
ASSERT_EQ(GetDecoderName(2), decoder_->GetDisplayName());
ASSERT_FALSE(pending_read_);
- ASSERT_EQ(VideoDecoderStream::OK, last_read_status_);
+ ASSERT_EQ(last_read_status_code_, StatusCode::kOk);
// Can't check previously selected decoder(s) right now, they might have been
// destroyed already.
@@ -1400,7 +1404,7 @@ TEST_P(VideoDecoderStreamTest, FallbackDecoder_SelectedOnInitThenDecodeErrors) {
ASSERT_EQ(GetDecoderName(2), decoder_->GetDisplayName());
ASSERT_FALSE(pending_read_);
- ASSERT_EQ(VideoDecoderStream::OK, last_read_status_);
+ ASSERT_EQ(last_read_status_code_, StatusCode::kOk);
// Can't check previously selected decoder(s) right now, they might have been
// destroyed already.
@@ -1422,7 +1426,7 @@ TEST_P(VideoDecoderStreamTest,
decoder_->SimulateError();
// The error must surface from Read() as DECODE_ERROR.
- while (last_read_status_ == VideoDecoderStream::OK) {
+ while (last_read_status_code_ == StatusCode::kOk) {
ReadOneFrame();
base::RunLoop().RunUntilIdle();
EXPECT_FALSE(pending_read_);
@@ -1431,7 +1435,8 @@ TEST_P(VideoDecoderStreamTest,
// Verify the error was surfaced, rather than falling back to other decoders.
ASSERT_EQ(GetDecoderName(0), decoder_->GetDisplayName());
EXPECT_FALSE(pending_read_);
- ASSERT_EQ(VideoDecoderStream::DECODE_ERROR, last_read_status_);
+ EXPECT_NE(last_read_status_code_, StatusCode::kOk);
+ EXPECT_NE(last_read_status_code_, StatusCode::kAborted);
}
TEST_P(VideoDecoderStreamTest, DecoderErrorWhenNotReading) {
@@ -1450,12 +1455,13 @@ TEST_P(VideoDecoderStreamTest, DecoderErrorWhenNotReading) {
decoder_->SimulateError();
// The error must surface from Read() as DECODE_ERROR.
- while (last_read_status_ == VideoDecoderStream::OK) {
+ while (last_read_status_code_ == StatusCode::kOk) {
ReadOneFrame();
base::RunLoop().RunUntilIdle();
EXPECT_FALSE(pending_read_);
}
- EXPECT_EQ(VideoDecoderStream::DECODE_ERROR, last_read_status_);
+ EXPECT_NE(last_read_status_code_, StatusCode::kOk);
+ EXPECT_NE(last_read_status_code_, StatusCode::kAborted);
}
TEST_P(VideoDecoderStreamTest, ReinitializeFailure_Once) {
@@ -1516,12 +1522,13 @@ TEST_P(VideoDecoderStreamTest, ReinitializeFailure_NoSupportedDecoder) {
ReadUntilDecoderReinitialized();
// The error will surface from Read() as DECODE_ERROR.
- while (last_read_status_ == VideoDecoderStream::OK) {
+ while (last_read_status_code_ == StatusCode::kOk) {
ReadOneFrame();
base::RunLoop().RunUntilIdle();
EXPECT_FALSE(pending_read_);
}
- EXPECT_EQ(VideoDecoderStream::DECODE_ERROR, last_read_status_);
+ EXPECT_NE(last_read_status_code_, StatusCode::kOk);
+ EXPECT_NE(last_read_status_code_, StatusCode::kAborted);
}
TEST_P(VideoDecoderStreamTest, Destroy_DuringFallbackDecoderSelection) {
diff --git a/chromium/media/filters/video_renderer_algorithm.cc b/chromium/media/filters/video_renderer_algorithm.cc
index 0cef397855b..863359fd01a 100644
--- a/chromium/media/filters/video_renderer_algorithm.cc
+++ b/chromium/media/filters/video_renderer_algorithm.cc
@@ -327,12 +327,12 @@ int64_t VideoRendererAlgorithm::GetMemoryUsage() const {
void VideoRendererAlgorithm::EnqueueFrame(scoped_refptr<VideoFrame> frame) {
DCHECK(frame);
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
// Note: Not all frames have duration. E.g., this class is used with WebRTC
// which does not provide duration information for its frames.
base::TimeDelta metadata_frame_duration =
- frame->metadata()->frame_duration.value_or(base::TimeDelta());
+ frame->metadata().frame_duration.value_or(base::TimeDelta());
auto timestamp = frame->timestamp();
ReadyFrame ready_frame(std::move(frame));
auto it = frame_queue_.empty()
@@ -399,7 +399,7 @@ void VideoRendererAlgorithm::EnqueueFrame(scoped_refptr<VideoFrame> frame) {
wallclock_duration = ready_frame.end_time - ready_frame.start_time;
}
- ready_frame.frame->metadata()->wallclock_frame_duration = wallclock_duration;
+ ready_frame.frame->metadata().wallclock_frame_duration = wallclock_duration;
// The vast majority of cases should always append to the back, but in rare
// circumstance we get out of order timestamps, http://crbug.com/386551.
@@ -481,7 +481,7 @@ void VideoRendererAlgorithm::UpdateFrameStatistics() {
{
const auto& last_frame = frame_queue_.back().frame;
base::TimeDelta metadata_frame_duration =
- last_frame->metadata()->frame_duration.value_or(base::TimeDelta());
+ last_frame->metadata().frame_duration.value_or(base::TimeDelta());
if (metadata_frame_duration > base::TimeDelta()) {
have_metadata_duration = true;
media_timestamps.push_back(last_frame->timestamp() +
diff --git a/chromium/media/filters/video_renderer_algorithm_unittest.cc b/chromium/media/filters/video_renderer_algorithm_unittest.cc
index b3e7b5b9d99..a1511eda89d 100644
--- a/chromium/media/filters/video_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/video_renderer_algorithm_unittest.cc
@@ -1212,7 +1212,7 @@ TEST_F(VideoRendererAlgorithmTest, RemoveExpiredFramesWithoutRendering) {
// as effective since we know the duration of it. It is not removed since we
// only have one frame in the queue though.
auto frame = CreateFrame(tg.interval(0));
- frame->metadata()->frame_duration = tg.interval(1);
+ frame->metadata().frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
ASSERT_EQ(0u, algorithm_.RemoveExpiredFrames(tg.current() + tg.interval(3)));
EXPECT_EQ(0u, EffectiveFramesQueued());
@@ -1623,7 +1623,7 @@ TEST_F(VideoRendererAlgorithmTest, InfiniteDurationMetadata) {
TickGenerator tg(tick_clock_->NowTicks(), 50);
auto frame = CreateFrame(kInfiniteDuration);
- frame->metadata()->frame_duration = tg.interval(1);
+ frame->metadata().frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
// This should not crash or fail.
@@ -1636,7 +1636,7 @@ TEST_F(VideoRendererAlgorithmTest, UsesFrameDuration) {
TickGenerator tg(tick_clock_->NowTicks(), 50);
auto frame = CreateFrame(tg.interval(0));
- frame->metadata()->frame_duration = tg.interval(1);
+ frame->metadata().frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
// This should not crash or fail.
@@ -1648,7 +1648,7 @@ TEST_F(VideoRendererAlgorithmTest, UsesFrameDuration) {
constexpr base::TimeDelta kLongDuration = base::TimeDelta::FromSeconds(3);
for (int i = 1; i < 4; ++i) {
frame = CreateFrame(tg.interval(i));
- frame->metadata()->frame_duration = i == 3 ? kLongDuration : tg.interval(1);
+ frame->metadata().frame_duration = i == 3 ? kLongDuration : tg.interval(1);
algorithm_.EnqueueFrame(frame);
}
@@ -1670,7 +1670,7 @@ TEST_F(VideoRendererAlgorithmTest, WallClockDurationMetadataSet) {
for (int i = 0; i < frame_count; i++) {
auto frame = CreateFrame(tg.interval(i));
- frame->metadata()->frame_duration = tg.interval(1);
+ frame->metadata().frame_duration = tg.interval(1);
algorithm_.EnqueueFrame(frame);
}
@@ -1680,7 +1680,7 @@ TEST_F(VideoRendererAlgorithmTest, WallClockDurationMetadataSet) {
SCOPED_TRACE(base::StringPrintf("Frame #%d", i));
- EXPECT_EQ(*frame->metadata()->wallclock_frame_duration, intended_duration);
+ EXPECT_EQ(*frame->metadata().wallclock_frame_duration, intended_duration);
EXPECT_EQ(algorithm_.average_frame_duration(), intended_duration);
}
}
diff --git a/chromium/media/filters/vp9_parser.h b/chromium/media/filters/vp9_parser.h
index 4f360385062..f85b5217836 100644
--- a/chromium/media/filters/vp9_parser.h
+++ b/chromium/media/filters/vp9_parser.h
@@ -300,7 +300,7 @@ class MEDIA_EXPORT Vp9Parser {
// The parsing context that persists across frames.
class Context {
public:
- class Vp9FrameContextManager {
+ class MEDIA_EXPORT Vp9FrameContextManager {
public:
Vp9FrameContextManager();
~Vp9FrameContextManager();
diff --git a/chromium/media/filters/vp9_uncompressed_header_parser.cc b/chromium/media/filters/vp9_uncompressed_header_parser.cc
index 6c99ef5b468..294c709ae57 100644
--- a/chromium/media/filters/vp9_uncompressed_header_parser.cc
+++ b/chromium/media/filters/vp9_uncompressed_header_parser.cc
@@ -622,6 +622,11 @@ Vp9UncompressedHeaderParser::Vp9UncompressedHeaderParser(
Vp9Parser::Context* context)
: context_(context) {}
+const Vp9FrameContext&
+Vp9UncompressedHeaderParser::GetVp9DefaultFrameContextForTesting() const {
+ return kVp9DefaultFrameContext;
+}
+
uint8_t Vp9UncompressedHeaderParser::ReadProfile() {
uint8_t profile = 0;
@@ -765,6 +770,8 @@ Vp9InterpolationFilter Vp9UncompressedHeaderParser::ReadInterpolationFilter() {
void Vp9UncompressedHeaderParser::SetupPastIndependence(Vp9FrameHeader* fhdr) {
memset(&context_->segmentation_, 0, sizeof(context_->segmentation_));
+ memset(fhdr->ref_frame_sign_bias, 0, sizeof(fhdr->ref_frame_sign_bias));
+
ResetLoopfilter();
fhdr->frame_context = kVp9DefaultFrameContext;
DCHECK(fhdr->frame_context.IsValid());
@@ -1069,7 +1076,7 @@ bool Vp9UncompressedHeaderParser::Parse(const uint8_t* stream,
fhdr->frame_context_idx_to_save_probs = fhdr->frame_context_idx =
reader_.ReadLiteral(kVp9NumFrameContextsLog2);
- if (fhdr->IsIntra()) {
+ if (fhdr->IsIntra() || fhdr->error_resilient_mode) {
SetupPastIndependence(fhdr);
if (fhdr->IsKeyframe() || fhdr->error_resilient_mode ||
fhdr->reset_frame_context == 3) {
diff --git a/chromium/media/filters/vp9_uncompressed_header_parser.h b/chromium/media/filters/vp9_uncompressed_header_parser.h
index ea85af4769b..1cf5a408eb3 100644
--- a/chromium/media/filters/vp9_uncompressed_header_parser.h
+++ b/chromium/media/filters/vp9_uncompressed_header_parser.h
@@ -8,9 +8,11 @@
#include "media/filters/vp9_parser.h"
#include "media/filters/vp9_raw_bits_reader.h"
+#include "media/base/media_export.h"
+
namespace media {
-class Vp9UncompressedHeaderParser {
+class MEDIA_EXPORT Vp9UncompressedHeaderParser {
public:
Vp9UncompressedHeaderParser(Vp9Parser::Context* context);
@@ -18,7 +20,11 @@ class Vp9UncompressedHeaderParser {
// Returns true if no error.
bool Parse(const uint8_t* stream, off_t frame_size, Vp9FrameHeader* fhdr);
+ const Vp9FrameContext& GetVp9DefaultFrameContextForTesting() const;
+
private:
+ friend class Vp9UncompressedHeaderParserTest;
+
uint8_t ReadProfile();
bool VerifySyncCode();
bool ReadColorConfig(Vp9FrameHeader* fhdr);
diff --git a/chromium/media/filters/vp9_uncompressed_header_parser_unittest.cc b/chromium/media/filters/vp9_uncompressed_header_parser_unittest.cc
new file mode 100644
index 00000000000..53fbfafe261
--- /dev/null
+++ b/chromium/media/filters/vp9_uncompressed_header_parser_unittest.cc
@@ -0,0 +1,65 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/filters/vp9_uncompressed_header_parser.h"
+
+#include "media/filters/vp9_parser.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+class Vp9UncompressedHeaderParserTest : public testing::Test {
+ public:
+ void SetupPastIndependence(Vp9FrameHeader* fhdr) {
+ vp9_uncompressed_header_parser_.SetupPastIndependence(fhdr);
+ }
+
+ const Vp9FrameContext& GetVp9DefaultFrameContextForTesting() const {
+ return vp9_uncompressed_header_parser_
+ .GetVp9DefaultFrameContextForTesting();
+ }
+
+ Vp9UncompressedHeaderParserTest()
+ : vp9_uncompressed_header_parser_((&vp9_parser_context_)) {}
+
+ protected:
+ const Vp9LoopFilterParams& GetLoopFilter() const {
+ return vp9_parser_context_.loop_filter();
+ }
+
+ Vp9Parser::Context vp9_parser_context_;
+ Vp9UncompressedHeaderParser vp9_uncompressed_header_parser_;
+};
+
+TEST_F(Vp9UncompressedHeaderParserTest, SetupPastIndependence) {
+ Vp9FrameHeader frame_header = {};
+
+ SetupPastIndependence(&frame_header);
+
+ EXPECT_EQ(0, frame_header.ref_frame_sign_bias[VP9_FRAME_INTRA]);
+ EXPECT_EQ(0, frame_header.ref_frame_sign_bias[VP9_FRAME_LAST]);
+ EXPECT_EQ(0, frame_header.ref_frame_sign_bias[VP9_FRAME_GOLDEN]);
+ EXPECT_EQ(0, frame_header.ref_frame_sign_bias[VP9_FRAME_ALTREF]);
+
+ // Verify ResetLoopfilter() result
+ const Vp9LoopFilterParams& lf = GetLoopFilter();
+ EXPECT_TRUE(lf.delta_enabled);
+ EXPECT_TRUE(lf.delta_update);
+ EXPECT_EQ(1, lf.ref_deltas[VP9_FRAME_INTRA]);
+ EXPECT_EQ(0, lf.ref_deltas[VP9_FRAME_LAST]);
+ EXPECT_EQ(-1, lf.ref_deltas[VP9_FRAME_GOLDEN]);
+ EXPECT_EQ(-1, lf.ref_deltas[VP9_FRAME_ALTREF]);
+ EXPECT_EQ(0, lf.mode_deltas[0]);
+ EXPECT_EQ(0, lf.mode_deltas[1]);
+
+ EXPECT_TRUE(frame_header.frame_context.IsValid());
+
+ static_assert(std::is_pod<Vp9FrameContext>::value,
+ "Vp9FrameContext is not POD, rewrite the next EXPECT_TRUE");
+ EXPECT_TRUE(std::memcmp(&frame_header.frame_context,
+ &GetVp9DefaultFrameContextForTesting(),
+ sizeof(GetVp9DefaultFrameContextForTesting())) == 0);
+}
+
+} // namespace media
diff --git a/chromium/media/filters/vpx_video_decoder.cc b/chromium/media/filters/vpx_video_decoder.cc
index 7cac1f7fe78..3351761ff07 100644
--- a/chromium/media/filters/vpx_video_decoder.cc
+++ b/chromium/media/filters/vpx_video_decoder.cc
@@ -98,6 +98,25 @@ static int32_t ReleaseVP9FrameBuffer(void* user_priv,
return 0;
}
+// static
+SupportedVideoDecoderConfigs VpxVideoDecoder::SupportedConfigs() {
+ SupportedVideoDecoderConfigs supported_configs;
+ supported_configs.emplace_back(/*profile_min=*/VP8PROFILE_ANY,
+ /*profile_max=*/VP8PROFILE_ANY,
+ /*coded_size_min=*/kDefaultSwDecodeSizeMin,
+ /*coded_size_max=*/kDefaultSwDecodeSizeMax,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false);
+
+ supported_configs.emplace_back(/*profile_min=*/VP9PROFILE_PROFILE0,
+ /*profile_max=*/VP9PROFILE_PROFILE2,
+ /*coded_size_min=*/kDefaultSwDecodeSizeMin,
+ /*coded_size_max=*/kDefaultSwDecodeSizeMax,
+ /*allow_encrypted=*/false,
+ /*require_encrypted=*/false);
+ return supported_configs;
+}
+
VpxVideoDecoder::VpxVideoDecoder(OffloadState offload_state)
: bind_callbacks_(offload_state == OffloadState::kNormal) {
DETACH_FROM_SEQUENCE(sequence_checker_);
@@ -108,6 +127,10 @@ VpxVideoDecoder::~VpxVideoDecoder() {
CloseDecoder();
}
+VideoDecoderType VpxVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kVpx;
+}
+
std::string VpxVideoDecoder::GetDisplayName() const {
return "VpxVideoDecoder";
}
@@ -182,7 +205,7 @@ void VpxVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
// We might get a successful VpxDecode but not a frame if only a partial
// decode happened.
if (video_frame) {
- video_frame->metadata()->power_efficient = false;
+ video_frame->metadata().power_efficient = false;
output_cb_.Run(video_frame);
}
@@ -338,6 +361,7 @@ bool VpxVideoDecoder::VpxDecode(const DecoderBuffer* buffer,
}
(*video_frame)->set_timestamp(buffer->timestamp());
+ (*video_frame)->set_hdr_metadata(config_.hdr_metadata());
// Prefer the color space from the config if available. It generally comes
// from the color tag which is more expressive than the vp8 and vp9 bitstream.
@@ -395,7 +419,6 @@ bool VpxVideoDecoder::VpxDecode(const DecoderBuffer* buffer,
(*video_frame)
->set_color_space(gfx::ColorSpace(primaries, transfer, matrix, range));
}
- (*video_frame)->set_hdr_metadata(config_.hdr_metadata());
return true;
}
diff --git a/chromium/media/filters/vpx_video_decoder.h b/chromium/media/filters/vpx_video_decoder.h
index 98eff915fe8..d4e5f74acb9 100644
--- a/chromium/media/filters/vpx_video_decoder.h
+++ b/chromium/media/filters/vpx_video_decoder.h
@@ -8,6 +8,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/sequence_checker.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
@@ -29,10 +30,13 @@ class FrameBufferPool;
// [1] http://wiki.webmproject.org/alpha-channel
class MEDIA_EXPORT VpxVideoDecoder : public OffloadableVideoDecoder {
public:
+ static SupportedVideoDecoderConfigs SupportedConfigs();
+
explicit VpxVideoDecoder(OffloadState offload_state = OffloadState::kNormal);
~VpxVideoDecoder() override;
// VideoDecoder implementation.
+ VideoDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
diff --git a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
index 85cc43f6adc..087d46547e4 100644
--- a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
+++ b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
@@ -30,15 +30,15 @@ struct Env {
base::test::SingleThreadTaskEnvironment task_environment;
};
-void OnDecodeComplete(const base::Closure& quit_closure, media::Status status) {
- quit_closure.Run();
+void OnDecodeComplete(base::OnceClosure quit_closure, media::Status status) {
+ std::move(quit_closure).Run();
}
-void OnInitDone(const base::Closure& quit_closure,
+void OnInitDone(base::OnceClosure quit_closure,
bool* success_dest,
media::Status status) {
*success_dest = status.is_ok();
- quit_closure.Run();
+ std::move(quit_closure).Run();
}
void OnOutputComplete(scoped_refptr<media::VideoFrame> frame) {}
diff --git a/chromium/media/filters/vpx_video_decoder_unittest.cc b/chromium/media/filters/vpx_video_decoder_unittest.cc
index be6824bf297..0eab44301f2 100644
--- a/chromium/media/filters/vpx_video_decoder_unittest.cc
+++ b/chromium/media/filters/vpx_video_decoder_unittest.cc
@@ -160,7 +160,7 @@ class VpxVideoDecoderTest : public testing::Test {
}
void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
output_frames_.push_back(std::move(frame));
}
diff --git a/chromium/media/formats/BUILD.gn b/chromium/media/formats/BUILD.gn
index fbb2170607d..1a9089f2b1a 100644
--- a/chromium/media/formats/BUILD.gn
+++ b/chromium/media/formats/BUILD.gn
@@ -45,6 +45,8 @@ source_set("formats") {
"mpeg/mpeg1_audio_stream_parser.h",
"mpeg/mpeg_audio_stream_parser_base.cc",
"mpeg/mpeg_audio_stream_parser_base.h",
+ "webcodecs/webcodecs_encoded_chunk_stream_parser.cc",
+ "webcodecs/webcodecs_encoded_chunk_stream_parser.h",
"webm/webm_audio_client.cc",
"webm/webm_audio_client.h",
"webm/webm_cluster_parser.cc",
diff --git a/chromium/media/formats/mp4/h264_annex_b_fuzz_corpus/pps_neq_sps_config_idr.bin b/chromium/media/formats/mp4/h264_annex_b_fuzz_corpus/pps_neq_sps_config_idr.bin
new file mode 100644
index 00000000000..77706919f14
--- /dev/null
+++ b/chromium/media/formats/mp4/h264_annex_b_fuzz_corpus/pps_neq_sps_config_idr.bin
Binary files differ
diff --git a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc
index f3c4e8ef0e8..621bd8b7e49 100644
--- a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc
+++ b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc
@@ -78,7 +78,7 @@ Status H264AnnexBToAvcBitstreamConverter::ConvertChunk(
}
case H264NALU::kSPSExt: {
- NOTREACHED() << "SPS extensions are not supported yet.";
+ // SPS extensions are not supported yet.
break;
}
@@ -95,7 +95,7 @@ Status H264AnnexBToAvcBitstreamConverter::ConvertChunk(
blob(nalu.data, nalu.data + nalu.size));
pps_to_include.insert(pps_id);
if (auto* pps = parser_.GetPPS(pps_id))
- pps_to_include.insert(pps->seq_parameter_set_id);
+ sps_to_include.insert(pps->seq_parameter_set_id);
config_changed = true;
break;
}
diff --git a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter_unittest.cc b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter_unittest.cc
index 8fadf60e6f2..176dc31063f 100644
--- a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter_unittest.cc
+++ b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter_unittest.cc
@@ -37,10 +37,11 @@ std::vector<uint8_t> ReadTestFile(std::string name) {
namespace media {
TEST(H264AnnexBToAvcBitstreamConverterTest, Success) {
- std::string chunks[] = {"chunk1-config-idr.bin", "chunk2-non-idr.bin",
- "chunk3-non-idr.bin", "chunk4-non-idr.bin",
- "chunk5-non-idr.bin", "chunk6-config-idr.bin",
- "chunk7-non-idr.bin"};
+ std::string chunks[] = {
+ "chunk1-config-idr.bin", "chunk2-non-idr.bin",
+ "chunk3-non-idr.bin", "chunk4-non-idr.bin",
+ "chunk5-non-idr.bin", "chunk6-config-idr.bin",
+ "chunk7-non-idr.bin", "pps_neq_sps_config_idr.bin"};
H264AnnexBToAvcBitstreamConverter converter;
for (std::string& name : chunks) {
diff --git a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
index 05a84624583..d12f7cacee4 100644
--- a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
+++ b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
@@ -12,28 +12,6 @@ namespace {
constexpr uint32_t kMPEG1StartCodeMask = 0xffe00000;
-// Map that determines which bitrate_index & channel_mode combinations
-// are allowed.
-// Derived from: http://mpgedit.org/mpgedit/mpeg_format/MP3Format.html
-constexpr bool kIsAllowed[17][4] = {
- {true, true, true, true}, // free
- {true, false, false, false}, // 32
- {true, false, false, false}, // 48
- {true, false, false, false}, // 56
- {true, true, true, true}, // 64
- {true, false, false, false}, // 80
- {true, true, true, true}, // 96
- {true, true, true, true}, // 112
- {true, true, true, true}, // 128
- {true, true, true, true}, // 160
- {true, true, true, true}, // 192
- {false, true, true, true}, // 224
- {false, true, true, true}, // 256
- {false, true, true, true}, // 320
- {false, true, true, true}, // 384
- {false, false, false, false} // bad
-};
-
// Maps version and layer information in the frame header
// into an index for the |kBitrateMap|.
// Derived from: http://mpgedit.org/mpgedit/mpeg_format/MP3Format.html
@@ -125,15 +103,8 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
return false;
}
- if (layer == kLayer2 && !kIsAllowed[bitrate_index][channel_mode]) {
- if (media_log) {
- LIMITED_MEDIA_LOG(DEBUG, media_log, *media_log_limit, 5)
- << "Invalid MP3 (bitrate_index, channel_mode)"
- << " combination :" << std::hex << " bitrate_index " << bitrate_index
- << " channel_mode " << channel_mode;
- }
- return false;
- }
+ // Note: For MPEG2 we don't check if a given bitrate or channel layout is
+ // allowed per spec since all tested decoders don't seem to care.
int bitrate = kBitrateMap[bitrate_index][kVersionLayerMap[version][layer]];
diff --git a/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.cc b/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.cc
new file mode 100644
index 00000000000..cb09be7e0bf
--- /dev/null
+++ b/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.cc
@@ -0,0 +1,178 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h"
+
+#include <string>
+
+#include "base/callback.h"
+#include "base/logging.h"
+#include "base/notreached.h"
+#include "media/base/media_log.h"
+#include "media/base/media_track.h"
+#include "media/base/media_tracks.h"
+#include "media/base/stream_parser_buffer.h"
+#include "media/base/text_track_config.h"
+#include "media/base/timestamp_constants.h"
+
+namespace {
+
+// TODO(crbug.com/1144908): Since these must be identical to those generated
+// in the SourceBuffer, consider moving these to possibly stream_parser.h.
+// Meanwhile, must be kept in sync with similar constexpr in SourceBuffer
+// manually.
+constexpr media::StreamParser::TrackId kWebCodecsAudioTrackId = 1;
+constexpr media::StreamParser::TrackId kWebCodecsVideoTrackId = 2;
+
+} // namespace
+
+namespace media {
+
+WebCodecsEncodedChunkStreamParser::WebCodecsEncodedChunkStreamParser(
+ std::unique_ptr<AudioDecoderConfig> audio_config)
+ : state_(kWaitingForInit), audio_config_(std::move(audio_config)) {
+ DCHECK(audio_config_ && !video_config_);
+}
+
+WebCodecsEncodedChunkStreamParser::WebCodecsEncodedChunkStreamParser(
+ std::unique_ptr<VideoDecoderConfig> video_config)
+ : state_(kWaitingForInit), video_config_(std::move(video_config)) {
+ DCHECK(video_config_ && !audio_config_);
+}
+
+WebCodecsEncodedChunkStreamParser::~WebCodecsEncodedChunkStreamParser() =
+ default;
+
+void WebCodecsEncodedChunkStreamParser::Init(
+ InitCB init_cb,
+ const NewConfigCB& config_cb,
+ const NewBuffersCB& new_buffers_cb,
+ bool /* ignore_text_tracks */,
+ const EncryptedMediaInitDataCB& /* ignored */,
+ const NewMediaSegmentCB& new_segment_cb,
+ const EndMediaSegmentCB& end_of_segment_cb,
+ MediaLog* media_log) {
+ DCHECK_EQ(state_, kWaitingForInit);
+ DCHECK(!init_cb_);
+ DCHECK(init_cb);
+ DCHECK(config_cb);
+ DCHECK(new_buffers_cb);
+ DCHECK(new_segment_cb);
+ DCHECK(end_of_segment_cb);
+
+ ChangeState(kWaitingForConfigEmission);
+ init_cb_ = std::move(init_cb);
+ config_cb_ = config_cb;
+ new_buffers_cb_ = new_buffers_cb;
+ new_segment_cb_ = new_segment_cb;
+ end_of_segment_cb_ = end_of_segment_cb;
+ media_log_ = media_log;
+}
+
+void WebCodecsEncodedChunkStreamParser::Flush() {
+ DCHECK_NE(state_, kWaitingForInit);
+ if (state_ == kWaitingForEncodedChunks)
+ ChangeState(kWaitingForConfigEmission);
+}
+
+bool WebCodecsEncodedChunkStreamParser::GetGenerateTimestampsFlag() const {
+ return false;
+}
+
+bool WebCodecsEncodedChunkStreamParser::Parse(const uint8_t* /* buf */,
+ int /* size */) {
+ // TODO(crbug.com/1144908): Protect against app reaching this (and similer
+ // inverse case in other parsers) simply by using the wrong append method on
+ // the SourceBuffer. Maybe a better MEDIA_LOG here would be sufficient? Or
+ // instead have the top-level SourceBuffer throw synchronous exception when
+ // attempting the wrong append method, without causing parse/decode error?
+ NOTREACHED(); // ProcessChunks() is the method to use instead for this
+ // parser.
+ return false;
+}
+
+bool WebCodecsEncodedChunkStreamParser::ProcessChunks(
+ std::unique_ptr<BufferQueue> buffer_queue) {
+ DCHECK_NE(state_, kWaitingForInit);
+
+ if (state_ == kError)
+ return false;
+
+ if (state_ == kWaitingForConfigEmission) {
+ // Must (still) have only one config. We'll retain ownership.
+ // MediaTracks::AddAudio/VideoTrack copies the config.
+ DCHECK((audio_config_ && !video_config_) ||
+ (video_config_ && !audio_config_));
+ auto media_tracks = std::make_unique<MediaTracks>();
+ if (audio_config_) {
+ media_tracks->AddAudioTrack(
+ *audio_config_, kWebCodecsAudioTrackId, MediaTrack::Kind("main"),
+ MediaTrack::Label(""), MediaTrack::Language(""));
+ } else if (video_config_) {
+ media_tracks->AddVideoTrack(
+ *video_config_, kWebCodecsVideoTrackId, MediaTrack::Kind("main"),
+ MediaTrack::Label(""), MediaTrack::Language(""));
+ }
+
+ if (!config_cb_.Run(std::move(media_tracks), TextTrackConfigMap())) {
+ ChangeState(kError);
+ return false;
+ }
+
+ if (init_cb_) {
+ InitParameters params(kInfiniteDuration);
+ params.liveness = DemuxerStream::LIVENESS_UNKNOWN;
+ if (audio_config_)
+ params.detected_audio_track_count = 1;
+ if (video_config_)
+ params.detected_video_track_count = 1;
+ params.detected_text_track_count = 0;
+ std::move(init_cb_).Run(params);
+ }
+
+ ChangeState(kWaitingForEncodedChunks);
+ }
+
+ DCHECK_EQ(state_, kWaitingForEncodedChunks);
+
+ // All of |buffer_queue| must be of the media type (audio or video)
+ // corresponding to the exactly one type of decoder config we have. Otherwise,
+ // the caller has provided encoded chunks for the wrong kind of config.
+ DemuxerStream::Type expected_type =
+ audio_config_ ? DemuxerStream::AUDIO : DemuxerStream::VIDEO;
+ for (const auto& it : *buffer_queue) {
+ if (it->type() != expected_type) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "Incorrect EncodedChunk type (audio vs video) appended";
+ ChangeState(kError);
+ return false;
+ }
+ }
+
+ // TODO(crbug.com/1144908): Add a different new_buffers_cb type for us to use
+ // so that we can just std::move the buffer_queue, and avoid potential issues
+ // with out-of-order timestamps in the caller-provided queue that would
+ // otherwise cause parse failure in MergeBufferQueues with the current, legacy
+ // style of new_buffers_cb that depends on parsers to emit sanely time-ordered
+ // groups of frames from *muxed* multi-track bytestreams. FrameProcessor is
+ // capable of handling our buffer_queue verbatim.
+ BufferQueueMap buffers;
+ if (audio_config_)
+ buffers.insert(std::make_pair(kWebCodecsAudioTrackId, *buffer_queue));
+ else
+ buffers.insert(std::make_pair(kWebCodecsVideoTrackId, *buffer_queue));
+ new_segment_cb_.Run();
+ if (!new_buffers_cb_.Run(buffers))
+ return false;
+ end_of_segment_cb_.Run();
+
+ return true;
+}
+
+void WebCodecsEncodedChunkStreamParser::ChangeState(State new_state) {
+ DVLOG(1) << __func__ << ": " << state_ << " -> " << new_state;
+ state_ = new_state;
+}
+
+} // namespace media
diff --git a/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h b/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h
new file mode 100644
index 00000000000..c5dafee217a
--- /dev/null
+++ b/chromium/media/formats/webcodecs/webcodecs_encoded_chunk_stream_parser.h
@@ -0,0 +1,79 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FORMATS_WEBCODECS_WEBCODECS_ENCODED_CHUNK_STREAM_PARSER_H_
+#define MEDIA_FORMATS_WEBCODECS_WEBCODECS_ENCODED_CHUNK_STREAM_PARSER_H_
+
+#include <stdint.h>
+
+#include <memory>
+
+#include "base/callback_forward.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "media/base/audio_decoder_config.h"
+#include "media/base/media_export.h"
+#include "media/base/stream_parser.h"
+#include "media/base/video_decoder_config.h"
+
+namespace media {
+
+class MEDIA_EXPORT WebCodecsEncodedChunkStreamParser : public StreamParser {
+ public:
+ explicit WebCodecsEncodedChunkStreamParser(
+ std::unique_ptr<AudioDecoderConfig> audio_config);
+ explicit WebCodecsEncodedChunkStreamParser(
+ std::unique_ptr<VideoDecoderConfig> video_config);
+ ~WebCodecsEncodedChunkStreamParser() override;
+
+ // StreamParser implementation.
+ void Init(InitCB init_cb,
+ const NewConfigCB& config_cb,
+ const NewBuffersCB& new_buffers_cb,
+ bool ignore_text_tracks /* must be true */,
+ const EncryptedMediaInitDataCB& encrypted_media_init_data_cb,
+ const NewMediaSegmentCB& new_segment_cb,
+ const EndMediaSegmentCB& end_of_segment_cb,
+ MediaLog* media_log) override;
+ void Flush() override;
+ bool GetGenerateTimestampsFlag() const override;
+ bool Parse(const uint8_t* buf, int size) override;
+
+ // Processes and emits buffers from |buffer_queue|. If state is
+ // kWaitingForConfigEmission, first emit the config.
+ bool ProcessChunks(std::unique_ptr<BufferQueue> buffer_queue) override;
+
+ private:
+ enum State {
+ kWaitingForInit,
+ kWaitingForConfigEmission,
+ kWaitingForEncodedChunks,
+ kError
+ };
+
+ void ChangeState(State new_state);
+
+ State state_;
+
+ // These configs are populated during ctor. A copy of the appropriate config
+ // is emitted on demand when "parsing" newly appended encoded chunks if that
+ // append occurs when state is kWaitingForConfigEmission. Note, only one type
+ // of config can be emitted (not both), for an instance of this parser.
+ std::unique_ptr<AudioDecoderConfig> audio_config_;
+ std::unique_ptr<VideoDecoderConfig> video_config_;
+
+ InitCB init_cb_;
+ NewConfigCB config_cb_;
+ NewBuffersCB new_buffers_cb_;
+
+ NewMediaSegmentCB new_segment_cb_;
+ EndMediaSegmentCB end_of_segment_cb_;
+ MediaLog* media_log_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebCodecsEncodedChunkStreamParser);
+};
+
+} // namespace media
+
+#endif // MEDIA_FORMATS_WEBCODECS_WEBCODECS_ENCODED_CHUNK_STREAM_PARSER_H_
diff --git a/chromium/media/fuchsia/audio/fake_audio_consumer.h b/chromium/media/fuchsia/audio/fake_audio_consumer.h
index c42469a0e90..da1fcefb88c 100644
--- a/chromium/media/fuchsia/audio/fake_audio_consumer.h
+++ b/chromium/media/fuchsia/audio/fake_audio_consumer.h
@@ -156,8 +156,7 @@ class FakeAudioConsumerService
// Not-implemented handler for SessionAudioConsumerFactory_TestBase.
void NotImplemented_(const std::string& name) final;
- base::fuchsia::ScopedServiceBinding<
- fuchsia::media::SessionAudioConsumerFactory>
+ base::ScopedServiceBinding<fuchsia::media::SessionAudioConsumerFactory>
binding_;
std::vector<std::unique_ptr<FakeAudioConsumer>> audio_consumers_;
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
index e71abab05ec..871c566c89f 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
@@ -255,6 +255,8 @@ void FuchsiaAudioRenderer::SetLatencyHint(
void FuchsiaAudioRenderer::SetPreservesPitch(bool preserves_pitch) {}
+void FuchsiaAudioRenderer::SetAutoplayInitiated(bool autoplay_initiated) {}
+
void FuchsiaAudioRenderer::StartTicking() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -285,7 +287,7 @@ void FuchsiaAudioRenderer::StopTicking() {
audio_consumer_->Stop();
base::AutoLock lock(timeline_lock_);
- UpdateTimelineAfterStop();
+ UpdateTimelineOnStop();
SetPlaybackState(PlaybackState::kStopped);
}
@@ -294,9 +296,17 @@ void FuchsiaAudioRenderer::SetPlaybackRate(double playback_rate) {
audio_consumer_->SetRate(playback_rate);
+ // AudioConsumer will update media timeline asynchronously. That update is
+ // processed in OnAudioConsumerStatusChanged(). This might cause the clock to
+ // go back. It's not desirable, e.g. because VideoRenderer could drop some
+ // video frames that should be shown when the stream is resumed. To avoid this
+ // issue update the timeline synchronously. OnAudioConsumerStatusChanged()
+ // will still process the update from AudioConsumer to save the position when
+ // the stream was actually paused, but that update would not move the clock
+ // backward.
if (playback_rate == 0.0) {
base::AutoLock lock(timeline_lock_);
- UpdateTimelineAfterStop();
+ UpdateTimelineOnStop();
}
}
@@ -310,8 +320,8 @@ void FuchsiaAudioRenderer::SetMediaTime(base::TimeDelta time) {
// Reset reference timestamp. This is necessary to ensure that the correct
// value is returned from GetWallClockTimes() until playback is resumed:
- // the interface requires to return 0 wall clock between SetMediaTime() and
- // StartTicking().
+ // GetWallClockTimes() is required to return 0 wall clock between
+ // SetMediaTime() and StartTicking().
reference_time_ = base::TimeTicks();
}
@@ -460,7 +470,7 @@ void FuchsiaAudioRenderer::ScheduleReadDemuxerStream() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (!demuxer_stream_ || read_timer_.IsRunning() || is_demuxer_read_pending_ ||
- GetPlaybackState() == PlaybackState::kEndOfStream ||
+ is_at_end_of_stream_ ||
num_pending_packets_ >= stream_sink_buffers_.size()) {
return;
}
@@ -525,13 +535,10 @@ void FuchsiaAudioRenderer::OnDemuxerStreamReadDone(
}
if (buffer->end_of_stream()) {
- {
- base::AutoLock lock(timeline_lock_);
- SetPlaybackState(PlaybackState::kEndOfStream);
- }
+ is_at_end_of_stream_ = true;
stream_sink_->EndOfStream();
- // No more data is going to be biffered. Update buffering state to ensure
+ // No more data is going to be buffered. Update buffering state to ensure
// RendererImpl starts playback in case it was waiting for buffering to
// finish.
SetBufferState(BUFFERING_HAVE_ENOUGH);
@@ -608,13 +615,13 @@ void FuchsiaAudioRenderer::SetBufferState(BufferingState buffer_state) {
void FuchsiaAudioRenderer::FlushInternal() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- DCHECK(GetPlaybackState() == PlaybackState::kStopped ||
- GetPlaybackState() == PlaybackState::kEndOfStream);
+ DCHECK(GetPlaybackState() == PlaybackState::kStopped || is_at_end_of_stream_);
stream_sink_->DiscardAllPacketsNoReply();
SetBufferState(BUFFERING_HAVE_NOTHING);
last_packet_timestamp_ = base::TimeDelta::Min();
read_timer_.Stop();
+ is_at_end_of_stream_ = false;
if (is_demuxer_read_pending_) {
drop_next_demuxer_read_result_ = true;
@@ -627,12 +634,10 @@ void FuchsiaAudioRenderer::OnEndOfStream() {
}
bool FuchsiaAudioRenderer::IsTimeMoving() {
- return (state_ == PlaybackState::kPlaying ||
- state_ == PlaybackState::kEndOfStream) &&
- (media_delta_ > 0);
+ return state_ == PlaybackState::kPlaying && media_delta_ > 0;
}
-void FuchsiaAudioRenderer::UpdateTimelineAfterStop() {
+void FuchsiaAudioRenderer::UpdateTimelineOnStop() {
if (!IsTimeMoving())
return;
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
index faa75cc9ab9..65dd995cd9e 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
@@ -41,6 +41,7 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
void SetVolume(float volume) final;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) final;
void SetPreservesPitch(bool preserves_pitch) final;
+ void SetAutoplayInitiated(bool autoplay_initiated) final;
// TimeSource implementation.
void StartTicking() final;
@@ -59,11 +60,9 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
// should not be used yet.
kStarting,
+ // Playback is active. When the stream reaches EOS it stays in the kPlaying
+ // state.
kPlaying,
-
- // Received end-of-stream packet from the |demuxer_stream_|. Waiting for
- // EndOfStream event from |audio_consumer_|.
- kEndOfStream,
};
// Struct used to store state of an input buffer shared with the
@@ -123,13 +122,13 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
// Updates TimelineFunction parameters after StopTicking() or
// SetPlaybackRate(0.0). Normally these parameters are provided by
- // AudioConsumer, but this happens asynchronously, while we need to make sure
+ // AudioConsumer, but this happens asynchronously and we need to make sure
// that StopTicking() and SetPlaybackRate(0.0) stop the media clock
- // synchronously.
- void UpdateTimelineAfterStop() EXCLUSIVE_LOCKS_REQUIRED(timeline_lock_);
+ // synchronously. Must be called before updating the |state_|.
+ void UpdateTimelineOnStop() EXCLUSIVE_LOCKS_REQUIRED(timeline_lock_);
// Calculates media position based on the TimelineFunction returned from
- // AudioConsumer. Should be called only when IsTimeMoving() is true.
+ // AudioConsumer. Must be called only when IsTimeMoving() is true.
base::TimeDelta CurrentMediaTimeLocked()
EXCLUSIVE_LOCKS_REQUIRED(timeline_lock_);
@@ -169,6 +168,10 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
base::TimeDelta min_lead_time_;
base::TimeDelta max_lead_time_;
+ // Set to true after we've received end-of-stream from the |demuxer_stream_|.
+ // The renderer may be restarted after Flush().
+ bool is_at_end_of_stream_ = false;
+
// TimeSource interface is not single-threaded. The lock is used to guard
// fields that are accessed in the TimeSource implementation. Note that these
// fields are updated only on the main thread (which corresponds to the
diff --git a/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc b/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc
index e62e58ddc87..e769995eea4 100644
--- a/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc
+++ b/chromium/media/fuchsia/camera/fake_fuchsia_camera.cc
@@ -10,6 +10,7 @@
#include "base/fuchsia/process_context.h"
#include "base/memory/platform_shared_memory_region.h"
#include "base/memory/writable_shared_memory_region.h"
+#include "base/process/process_handle.h"
#include "base/task/current_thread.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -153,7 +154,10 @@ FakeCameraStream::FakeCameraStream()
: binding_(this),
sysmem_allocator_(base::ComponentContextForProcess()
->svc()
- ->Connect<fuchsia::sysmem::Allocator>()) {}
+ ->Connect<fuchsia::sysmem::Allocator>()) {
+ sysmem_allocator_->SetDebugClientInfo("ChromiumFakeCameraStream",
+ base::GetCurrentProcId());
+}
FakeCameraStream::~FakeCameraStream() = default;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.cc b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
index fea4129883f..34cd0cf2642 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
@@ -60,6 +60,18 @@ fuchsia::media::drm::LicenseServerMessage CreateLicenseServerMessage(
return message;
}
+fuchsia::media::drm::LicenseSessionType ToFuchsiaLicenseSessionType(
+ CdmSessionType session_type) {
+ switch (session_type) {
+ case CdmSessionType::kTemporary:
+ return fuchsia::media::drm::LicenseSessionType::TEMPORARY;
+ case CdmSessionType::kPersistentLicense:
+ return fuchsia::media::drm::LicenseSessionType::PERSISTENT_LICENSE;
+ case CdmSessionType::kPersistentUsageRecord:
+ return fuchsia::media::drm::LicenseSessionType::PERSISTENT_USAGE_RECORD;
+ }
+}
+
CdmMessageType ToCdmMessageType(fuchsia::media::drm::LicenseMessageType type) {
switch (type) {
case fuchsia::media::drm::LicenseMessageType::REQUEST:
@@ -119,6 +131,7 @@ class FuchsiaCdm::CdmSession {
public:
using ResultCB =
base::OnceCallback<void(base::Optional<CdmPromise::Exception>)>;
+ using SessionReadyCB = base::OnceCallback<void(bool success)>;
CdmSession(const FuchsiaCdm::SessionCallbacks* callbacks,
base::RepeatingClosure on_new_key)
@@ -157,6 +170,15 @@ class FuchsiaCdm::CdmSession {
result) { ProcessResult(result); });
}
+ void GenerateLicenseRelease(ResultCB generate_license_release_cb) {
+ DCHECK(!result_cb_);
+ result_cb_ = std::move(generate_license_release_cb);
+ pending_release_ = true;
+ session_->GenerateLicenseRelease(
+ [this](fuchsia::media::drm::LicenseSession_GenerateLicenseRelease_Result
+ result) { ProcessResult(result); });
+ }
+
void ProcessLicenseResponse(const std::vector<uint8_t>& response,
ResultCB process_license_response_cb) {
DCHECK(!result_cb_);
@@ -172,7 +194,20 @@ class FuchsiaCdm::CdmSession {
}
const std::string& session_id() const { return session_id_; }
+ void set_session_ready_cb(SessionReadyCB session_ready_cb) {
+ session_ready_cb_ = std::move(session_ready_cb);
+ session_.events().OnReady =
+ fit::bind_member(this, &CdmSession::OnSessionReady);
+ }
+
+ bool pending_release() const { return pending_release_; }
+
private:
+ void OnSessionReady() {
+ DCHECK(session_ready_cb_);
+ std::move(session_ready_cb_).Run(true);
+ }
+
void OnLicenseMessageGenerated(fuchsia::media::drm::LicenseMessage message) {
DCHECK(!session_id_.empty());
std::string session_msg;
@@ -212,6 +247,10 @@ class FuchsiaCdm::CdmSession {
void OnSessionError(zx_status_t status) {
ZX_LOG(ERROR, status) << "Session error.";
+
+ if (session_ready_cb_)
+ std::move(session_ready_cb_).Run(false);
+
if (result_cb_)
std::move(result_cb_).Run(CdmPromise::Exception::TYPE_ERROR);
}
@@ -231,9 +270,16 @@ class FuchsiaCdm::CdmSession {
fuchsia::media::drm::LicenseSessionPtr session_;
std::string session_id_;
+ // Callback for OnReady.
+ SessionReadyCB session_ready_cb_;
+
// Callback for license operation.
ResultCB result_cb_;
+ // `GenerateLicenseRelease` has been called and the session is waiting for
+ // license release response from server.
+ bool pending_release_ = false;
+
DISALLOW_COPY_AND_ASSIGN(CdmSession);
};
@@ -258,7 +304,7 @@ FuchsiaCdm::FuchsiaCdm(fuchsia::media::drm::ContentDecryptionModulePtr cdm,
<< " channel was terminated.";
// Reject all the pending promises.
- promises_.Clear();
+ promises_.Clear(CdmPromiseAdapter::ClearReason::kConnectionError);
// If the channel closed prior to invoking the ready_cb_, we should invoke
// it here with failure.
@@ -335,13 +381,6 @@ void FuchsiaCdm::CreateSessionAndGenerateRequest(
EmeInitDataType init_data_type,
const std::vector<uint8_t>& init_data,
std::unique_ptr<NewSessionCdmPromise> promise) {
- // TODO(crbug.com/1131114): Support persistent license.
- if (session_type != CdmSessionType::kTemporary) {
- promise->reject(CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
- "session type is not supported.");
- return;
- }
-
if (init_data_type == EmeInitDataType::UNKNOWN) {
promise->reject(CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
"init data type is not supported.");
@@ -358,8 +397,7 @@ void FuchsiaCdm::CreateSessionAndGenerateRequest(
CdmSession* session_ptr = session.get();
cdm_->CreateLicenseSession(
- fuchsia::media::drm::LicenseSessionType::TEMPORARY,
- session_ptr->NewRequest(),
+ ToFuchsiaLicenseSessionType(session_type), session_ptr->NewRequest(),
[this, promise_id,
session = std::move(session)](std::string session_id) mutable {
OnCreateSession(std::move(session), promise_id, session_id);
@@ -416,7 +454,46 @@ void FuchsiaCdm::OnGenerateLicenseRequestStatus(
void FuchsiaCdm::LoadSession(CdmSessionType session_type,
const std::string& session_id,
std::unique_ptr<NewSessionCdmPromise> promise) {
- NOTIMPLEMENTED();
+ DCHECK_NE(session_type, CdmSessionType::kTemporary);
+ DCHECK(!session_id.empty());
+ REJECT_PROMISE_AND_RETURN_IF_BAD_CDM(promise, cdm_);
+
+ if (session_map_.contains(session_id)) {
+ promise->reject(CdmPromise::Exception::QUOTA_EXCEEDED_ERROR, 0,
+ "session already exists.");
+ return;
+ }
+
+ uint32_t promise_id = promises_.SavePromise(std::move(promise));
+
+ auto session = std::make_unique<CdmSession>(
+ &session_callbacks_,
+ base::BindRepeating(&FuchsiaCdm::OnNewKey, base::Unretained(this)));
+ CdmSession* session_ptr = session.get();
+
+ session_ptr->set_session_id(session_id);
+ session_ptr->set_session_ready_cb(
+ base::BindOnce(&FuchsiaCdm::OnSessionLoaded, base::Unretained(this),
+ std::move(session), promise_id));
+
+ cdm_->LoadLicenseSession(session_id, session_ptr->NewRequest());
+}
+
+void FuchsiaCdm::OnSessionLoaded(std::unique_ptr<CdmSession> session,
+ uint32_t promise_id,
+ bool loaded) {
+ if (!loaded) {
+ promises_.ResolvePromise(promise_id, std::string());
+ return;
+ }
+
+ std::string session_id = session->session_id();
+ DCHECK(session_map_.find(session_id) == session_map_.end())
+ << "Duplicated session id " << session_id;
+
+ session_map_.emplace(session_id, std::move(session));
+
+ promises_.ResolvePromise(promise_id, session_id);
}
void FuchsiaCdm::UpdateSession(const std::string& session_id,
@@ -441,10 +518,11 @@ void FuchsiaCdm::UpdateSession(const std::string& session_id,
session->ProcessLicenseResponse(
response, base::BindOnce(&FuchsiaCdm::OnProcessLicenseServerMessageStatus,
- base::Unretained(this), promise_id));
+ base::Unretained(this), session_id, promise_id));
}
void FuchsiaCdm::OnProcessLicenseServerMessageStatus(
+ const std::string& session_id,
uint32_t promise_id,
base::Optional<CdmPromise::Exception> exception) {
if (exception.has_value()) {
@@ -454,6 +532,19 @@ void FuchsiaCdm::OnProcessLicenseServerMessageStatus(
}
promises_.ResolvePromise(promise_id);
+
+ auto it = session_map_.find(session_id);
+ if (it == session_map_.end())
+ return;
+
+ // Close the session if the session is waiting for license release ack.
+ CdmSession* session = it->second.get();
+ DCHECK(session);
+
+ if (!session->pending_release())
+ return;
+
+ session_map_.erase(it);
}
void FuchsiaCdm::CloseSession(const std::string& session_id,
@@ -470,9 +561,41 @@ void FuchsiaCdm::CloseSession(const std::string& session_id,
void FuchsiaCdm::RemoveSession(const std::string& session_id,
std::unique_ptr<SimpleCdmPromise> promise) {
- NOTIMPLEMENTED();
- promise->reject(CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
- "not implemented");
+ auto it = session_map_.find(session_id);
+ if (it == session_map_.end()) {
+ promise->reject(CdmPromise::Exception::INVALID_STATE_ERROR, 0,
+ "session doesn't exist.");
+ return;
+ }
+
+ REJECT_PROMISE_AND_RETURN_IF_BAD_CDM(promise, cdm_);
+
+ uint32_t promise_id = promises_.SavePromise(std::move(promise));
+
+ CdmSession* session = it->second.get();
+ DCHECK(session);
+
+ // For temporary session, the API will remove the keys and close the session.
+ // For persistent license and persistent usage record, the API will invalidate
+ // the keys and generates a license release message.
+ session->GenerateLicenseRelease(
+ base::BindOnce(&FuchsiaCdm::OnGenerateLicenseReleaseStatus,
+ base::Unretained(this), session_id, promise_id));
+}
+
+void FuchsiaCdm::OnGenerateLicenseReleaseStatus(
+ const std::string& session_id,
+ uint32_t promise_id,
+ base::Optional<CdmPromise::Exception> exception) {
+ if (exception.has_value()) {
+ promises_.RejectPromise(promise_id, exception.value(), 0,
+ "Failed to release license.");
+ session_map_.erase(session_id);
+ return;
+ }
+
+ DCHECK(!session_id.empty());
+ promises_.ResolvePromise(promise_id);
}
CdmContext* FuchsiaCdm::GetCdmContext() {
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.h b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
index c81dc1b1da7..6f3c2a10805 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
@@ -89,6 +89,15 @@ class FuchsiaCdm : public ContentDecryptionModule,
uint32_t promise_id,
base::Optional<CdmPromise::Exception> exception);
void OnProcessLicenseServerMessageStatus(
+ const std::string& session_id,
+ uint32_t promise_id,
+ base::Optional<CdmPromise::Exception> exception);
+ void OnSessionLoaded(std::unique_ptr<CdmSession> session,
+ uint32_t promise_id,
+ bool loaded);
+
+ void OnGenerateLicenseReleaseStatus(
+ const std::string& session_id,
uint32_t promise_id,
base::Optional<CdmPromise::Exception> exception);
diff --git a/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc b/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc
index 6df4312e094..c046957b792 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc
@@ -64,16 +64,16 @@ void FuchsiaDecryptor::InitializeVideoDecoder(const VideoDecoderConfig& config,
void FuchsiaDecryptor::DecryptAndDecodeAudio(
scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) {
+ AudioDecodeCB audio_decode_cb) {
NOTREACHED();
- audio_decode_cb.Run(Status::kError, AudioFrames());
+ std::move(audio_decode_cb).Run(Status::kError, AudioFrames());
}
void FuchsiaDecryptor::DecryptAndDecodeVideo(
scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) {
+ VideoDecodeCB video_decode_cb) {
NOTREACHED();
- video_decode_cb.Run(Status::kError, nullptr);
+ std::move(video_decode_cb).Run(Status::kError, nullptr);
}
void FuchsiaDecryptor::ResetDecoder(StreamType stream_type) {
diff --git a/chromium/media/fuchsia/cdm/fuchsia_decryptor.h b/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
index c9397c3e6fa..ae502e70454 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
@@ -35,9 +35,9 @@ class FuchsiaDecryptor : public Decryptor {
void InitializeVideoDecoder(const VideoDecoderConfig& config,
DecoderInitCB init_cb) override;
void DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) override;
+ AudioDecodeCB audio_decode_cb) override;
void DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) override;
+ VideoDecodeCB video_decode_cb) override;
void ResetDecoder(StreamType stream_type) override;
void DeinitializeDecoder(StreamType stream_type) override;
bool CanAlwaysDecrypt() override;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc b/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc
index 6a45dc9a521..4d5712951b0 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc
@@ -101,7 +101,8 @@ FuchsiaStreamDecryptorBase::FuchsiaStreamDecryptorBase(
fuchsia::media::StreamProcessorPtr processor,
size_t min_buffer_size)
: processor_(std::move(processor), this),
- min_buffer_size_(min_buffer_size) {}
+ min_buffer_size_(min_buffer_size),
+ allocator_("CrFuchsiaStreamDecryptorBase") {}
FuchsiaStreamDecryptorBase::~FuchsiaStreamDecryptorBase() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
diff --git a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
index cb2f70ea362..7edfa056a86 100644
--- a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
+++ b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
@@ -171,7 +171,6 @@ class FuchsiaCdmManager::KeySystemClient {
base::Optional<DataStoreId> data_store_id = GetDataStoreIdForPath(
std::move(storage_path), std::move(create_fetcher_callback));
if (!data_store_id) {
- DLOG(ERROR) << "Unable to create DataStore for path: " << storage_path;
request.Close(ZX_ERR_NO_RESOURCES);
return;
}
@@ -292,11 +291,12 @@ void FuchsiaCdmManager::CreateAndProvision(
base::FilePath storage_path = GetStoragePath(key_system, origin);
+ auto task = base::BindOnce(&CreateStorageDirectory, storage_path);
storage_task_runner_->PostTaskAndReplyWithResult(
- FROM_HERE, base::BindOnce(&CreateStorageDirectory, storage_path),
+ FROM_HERE, std::move(task),
base::BindOnce(&FuchsiaCdmManager::CreateCdm, weak_factory_.GetWeakPtr(),
key_system, std::move(create_fetcher_cb),
- std::move(request), storage_path));
+ std::move(request), std::move(storage_path)));
}
void FuchsiaCdmManager::set_on_key_system_disconnect_for_test_callback(
diff --git a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
index 9b3f753bd3d..a9803b326d6 100644
--- a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
+++ b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
@@ -112,7 +112,7 @@ TEST_F(ProvisioningFetcherImplTest, NoDefaultProvisioningUrl) {
fetcher.Bind(base::MakeExpectedRunClosure(FROM_HERE));
- fetcher.Fetch(CreateProvisioningRequest(fit::nullopt, kTestRequest),
+ fetcher.Fetch(CreateProvisioningRequest({}, kTestRequest),
[](drm::ProvisioningResponse response) { FAIL(); });
}
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_pool.cc b/chromium/media/fuchsia/common/sysmem_buffer_pool.cc
index 2b4b0b5487a..de5e197c1c3 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_pool.cc
+++ b/chromium/media/fuchsia/common/sysmem_buffer_pool.cc
@@ -10,6 +10,7 @@
#include "base/bind.h"
#include "base/fuchsia/fuchsia_logging.h"
#include "base/fuchsia/process_context.h"
+#include "base/process/process_handle.h"
#include "media/fuchsia/common/sysmem_buffer_reader.h"
#include "media/fuchsia/common/sysmem_buffer_writer.h"
@@ -35,6 +36,13 @@ SysmemBufferPool::Creator::~Creator() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
}
+void SysmemBufferPool::Creator::SetName(uint32_t priority,
+ base::StringPiece name) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(!create_cb_);
+ collection_->SetName(priority, std::string(name));
+}
+
void SysmemBufferPool::Creator::Create(
fuchsia::sysmem::BufferCollectionConstraints constraints,
CreateCB create_cb) {
@@ -125,11 +133,14 @@ void SysmemBufferPool::OnError() {
std::move(create_writer_cb_).Run(nullptr);
}
-BufferAllocator::BufferAllocator() {
+BufferAllocator::BufferAllocator(base::StringPiece client_name) {
allocator_ = base::ComponentContextForProcess()
->svc()
->Connect<fuchsia::sysmem::Allocator>();
+ allocator_->SetDebugClientInfo(std::string(client_name),
+ base::GetCurrentProcId());
+
allocator_.set_error_handler([](zx_status_t status) {
// Just log a warning. We will handle BufferCollection the failure when
// trying to create a new BufferCollection.
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_pool.h b/chromium/media/fuchsia/common/sysmem_buffer_pool.h
index a42690b1bb3..84407ccc59c 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_pool.h
+++ b/chromium/media/fuchsia/common/sysmem_buffer_pool.h
@@ -43,6 +43,11 @@ class SysmemBufferPool {
std::vector<fuchsia::sysmem::BufferCollectionTokenPtr> shared_tokens);
~Creator();
+ // Sets the name of the created buffers. Priority is a number used to choose
+ // which name to use when multiple clients set names. Must be called before
+ // Create. See fuchsia.sysmem/BufferCollection.SetName for a description of
+ // the arguments.
+ void SetName(uint32_t priority, base::StringPiece name);
void Create(fuchsia::sysmem::BufferCollectionConstraints constraints,
CreateCB build_cb);
@@ -95,7 +100,7 @@ class SysmemBufferPool {
// Wrapper of sysmem Allocator.
class BufferAllocator {
public:
- BufferAllocator();
+ explicit BufferAllocator(base::StringPiece client_name);
~BufferAllocator();
fuchsia::sysmem::BufferCollectionTokenPtr CreateNewToken();
diff --git a/chromium/media/gpu/BUILD.gn b/chromium/media/gpu/BUILD.gn
index c82217f283b..f146f3cc213 100644
--- a/chromium/media/gpu/BUILD.gn
+++ b/chromium/media/gpu/BUILD.gn
@@ -146,7 +146,7 @@ component("gpu") {
"//gpu/ipc/common:android_image_reader_utils",
# TODO(crbug.com/789435): This can be removed once CdmManager is removed.
- "//gpu/ipc/common:ipc_common_sources",
+ "//gpu/ipc/common",
"//media/mojo:buildflags",
"//services/service_manager/public/cpp:cpp",
"//ui/gl:gl_jni_headers",
@@ -170,6 +170,8 @@ component("gpu") {
if (is_win) {
sources += [
"windows/av1_guids.h",
+ "windows/d3d11_av1_accelerator.cc",
+ "windows/d3d11_av1_accelerator.h",
"windows/d3d11_com_defs.h",
"windows/d3d11_copying_texture_wrapper.cc",
"windows/d3d11_copying_texture_wrapper.h",
@@ -297,6 +299,7 @@ source_set("common") {
]
}
+ public_deps = []
deps = [
":buildflags",
"//base",
@@ -307,6 +310,16 @@ source_set("common") {
"//ui/gfx:memory_buffer",
"//ui/gfx/geometry",
]
+
+ if (use_libgav1_parser) {
+ sources += [
+ "av1_decoder.cc",
+ "av1_decoder.h",
+ "av1_picture.cc",
+ "av1_picture.h",
+ ]
+ public_deps += [ "//third_party/libgav1:libgav1" ]
+ }
}
source_set("command_buffer_helper") {
@@ -442,42 +455,6 @@ source_set("android_video_decode_accelerator_unittests") {
}
}
-if (use_v4l2_codec || use_vaapi || is_mac || is_win) {
- test("video_encode_accelerator_unittest") {
- testonly = true
- deps = [
- "test:helpers",
- "//base",
- "//base/test:test_support",
- "//build:chromeos_buildflags",
- "//media:test_support",
- "//media/gpu",
- "//media/gpu/test:test_helpers",
- "//media/mojo/common:mojo_shared_buffer_video_frame",
- "//media/parsers",
- "//mojo/core/embedder",
- "//testing/gtest",
- "//third_party/ffmpeg",
- "//third_party/libyuv",
- "//ui/base",
- "//ui/gfx",
- "//ui/gfx:test_support",
- "//ui/gfx/geometry",
- "//ui/gl",
- "//ui/gl:test_support",
- ]
- configs += [ "//third_party/libyuv:libyuv_config" ]
- sources = [ "video_encode_accelerator_unittest.cc" ]
- if (use_x11) {
- deps += [ "//ui/gfx/x" ]
- public_configs = [ "//build/config/linux/libva" ]
- }
- if (use_ozone) {
- deps += [ "//ui/ozone" ]
- }
- }
-}
-
static_library("test_support") {
visibility = [ "//media/gpu/*" ]
testonly = true
@@ -510,7 +487,7 @@ source_set("unit_tests") {
if (proprietary_codecs && enable_platform_hevc) {
sources += [ "h265_decoder_unittest.cc" ]
}
- if (is_ash && (use_v4l2_codec || use_vaapi)) {
+ if (is_chromeos_ash && (use_v4l2_codec || use_vaapi)) {
deps += [ "//media/gpu/chromeos:unit_tests" ]
}
if (use_vaapi) {
@@ -524,6 +501,15 @@ source_set("unit_tests") {
if (use_v4l2_codec || use_vaapi) {
sources += [ "vp8_decoder_unittest.cc" ]
}
+
+ if (use_libgav1_parser) {
+ sources += [ "av1_decoder_unittest.cc" ]
+ deps += [
+ "//build:chromeos_buildflags",
+ "//third_party/ffmpeg",
+ ]
+ }
+
if (is_win && enable_library_cdms) {
sources += [
"windows/d3d11_copying_texture_wrapper_unittest.cc",
@@ -547,6 +533,7 @@ source_set("unit_tests") {
# TODO(crbug.com/1006266): consider using |use_chromeos_video_acceleration|.
if (use_v4l2_codec || use_vaapi) {
test("video_decode_accelerator_tests") {
+ assert(enable_av1_decoder)
testonly = true
sources = [ "video_decode_accelerator_tests.cc" ]
data = [ "//media/test/data/" ]
@@ -555,6 +542,7 @@ if (use_v4l2_codec || use_vaapi) {
"test:frame_file_writer",
"test:frame_validator",
"test:helpers",
+ "test:test_helpers",
"test:video_player",
"test:video_player_test_environment",
"test:video_player_thumbnail_renderer",
@@ -600,9 +588,12 @@ if (use_v4l2_codec || use_vaapi) {
data = [ "//media/test/data/" ]
deps = [
":buildflags",
+ "test:frame_validator",
"test:helpers",
+ "test:test_helpers",
"test:video_encoder",
"test:video_encoder_test_environment",
+ "test:video_encoder_test_environment",
"//media:test_support",
"//testing/gtest",
]
@@ -619,3 +610,14 @@ if (proprietary_codecs && enable_platform_hevc) {
]
}
}
+
+if (use_libgav1_parser) {
+ fuzzer_test("media_av1_decoder_fuzzer") {
+ sources = [ "av1_decoder_fuzzertest.cc" ]
+ deps = [
+ ":common",
+ "//base",
+ "//media",
+ ]
+ }
+}
diff --git a/chromium/media/gpu/accelerated_video_decoder.h b/chromium/media/gpu/accelerated_video_decoder.h
index 5dc4cb4a005..135e4b0bfa6 100644
--- a/chromium/media/gpu/accelerated_video_decoder.h
+++ b/chromium/media/gpu/accelerated_video_decoder.h
@@ -69,13 +69,14 @@ class MEDIA_GPU_EXPORT AcceleratedVideoDecoder {
// we need a new set of them, or when an error occurs.
virtual DecodeResult Decode() WARN_UNUSED_RESULT = 0;
- // Return dimensions/visible rectangle/profile/required number of pictures
- // that client should be ready to provide for the decoder to function properly
- // (of which up to GetNumReferenceFrames() might be needed for internal
- // decoding). To be used after Decode() returns kConfigChange.
+ // Return dimensions/visible rectangle/profile/bit depth/required number of
+ // pictures that client should be ready to provide for the decoder to function
+ // properly (of which up to GetNumReferenceFrames() might be needed for
+ // internal decoding). To be used after Decode() returns kConfigChange.
virtual gfx::Size GetPicSize() const = 0;
virtual gfx::Rect GetVisibleRect() const = 0;
virtual VideoCodecProfile GetProfile() const = 0;
+ virtual uint8_t GetBitDepth() const = 0;
virtual size_t GetRequiredNumOfPictures() const = 0;
virtual size_t GetNumReferenceFrames() const = 0;
diff --git a/chromium/media/gpu/android/codec_allocator.cc b/chromium/media/gpu/android/codec_allocator.cc
index 724c54a4fff..0f80ad4fec6 100644
--- a/chromium/media/gpu/android/codec_allocator.cc
+++ b/chromium/media/gpu/android/codec_allocator.cc
@@ -95,6 +95,16 @@ void CodecAllocator::CreateMediaCodecAsync(
if (force_sw_codecs_)
codec_config->codec_type = CodecType::kSoftware;
+ // If we're still allowed to pick any type we want, then limit to software for
+ // low resolution. https://crbug.com/1166833
+ if (codec_config->codec_type == CodecType::kAny &&
+ (codec_config->initial_expected_coded_size.width() <
+ kMinHardwareResolution.width() ||
+ codec_config->initial_expected_coded_size.height() <
+ kMinHardwareResolution.height())) {
+ codec_config->codec_type = CodecType::kSoftware;
+ }
+
const auto start_time = tick_clock_->NowTicks();
pending_operations_.push_back(start_time);
diff --git a/chromium/media/gpu/android/codec_allocator.h b/chromium/media/gpu/android/codec_allocator.h
index 1340d8bc28f..8f42905e403 100644
--- a/chromium/media/gpu/android/codec_allocator.h
+++ b/chromium/media/gpu/android/codec_allocator.h
@@ -31,6 +31,10 @@ namespace media {
// path is hung up.
class MEDIA_GPU_EXPORT CodecAllocator {
public:
+ // Minimum coded size that we'll allow to get a hardware instance, since not
+ // all hw implementation support it. See crbug.com/1166833 .
+ static constexpr gfx::Size kMinHardwareResolution{96, 96};
+
static CodecAllocator* GetInstance(
scoped_refptr<base::SequencedTaskRunner> task_runner);
diff --git a/chromium/media/gpu/android/codec_allocator_unittest.cc b/chromium/media/gpu/android/codec_allocator_unittest.cc
index f9568c33ca9..7b08de5e192 100644
--- a/chromium/media/gpu/android/codec_allocator_unittest.cc
+++ b/chromium/media/gpu/android/codec_allocator_unittest.cc
@@ -108,6 +108,16 @@ class CodecAllocatorTest : public testing::Test {
MOCK_METHOD1(OnCodecCreated, void(CodecType));
MOCK_METHOD0(OnCodecReleased, void());
+ // Allocate and return a config that allows any codec, and is suitable for
+ // hardware decode.
+ std::unique_ptr<VideoCodecConfig> CreateConfig() {
+ auto config = std::make_unique<VideoCodecConfig>();
+ config->codec_type = CodecType::kAny;
+ config->initial_expected_coded_size =
+ CodecAllocator::kMinHardwareResolution;
+ return config;
+ }
+
protected:
// So that we can get the thread's task runner.
base::test::TaskEnvironment task_environment_;
@@ -130,8 +140,7 @@ class CodecAllocatorTest : public testing::Test {
TEST_F(CodecAllocatorTest, NormalCreation) {
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
- auto config = std::make_unique<VideoCodecConfig>();
- config->codec_type = CodecType::kAny;
+ auto config = CreateConfig();
base::RunLoop run_loop;
allocator_->CreateMediaCodecAsync(
@@ -147,7 +156,7 @@ TEST_F(CodecAllocatorTest, NormalCreation) {
TEST_F(CodecAllocatorTest, NormalSecureCreation) {
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
- auto config = std::make_unique<VideoCodecConfig>();
+ auto config = CreateConfig();
config->codec_type = CodecType::kSecure;
base::RunLoop run_loop;
@@ -164,8 +173,7 @@ TEST_F(CodecAllocatorTest, NormalSecureCreation) {
TEST_F(CodecAllocatorTest, MultipleCreation) {
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
- auto config = std::make_unique<VideoCodecConfig>();
- config->codec_type = CodecType::kAny;
+ auto config = CreateConfig();
base::RunLoop run_loop;
allocator_->CreateMediaCodecAsync(
@@ -178,7 +186,7 @@ TEST_F(CodecAllocatorTest, MultipleCreation) {
tick_clock_.Advance(base::TimeDelta::FromMilliseconds(400));
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
- auto config_secure = std::make_unique<VideoCodecConfig>();
+ auto config_secure = CreateConfig();
config_secure->codec_type = CodecType::kSecure;
allocator_->CreateMediaCodecAsync(
@@ -232,7 +240,7 @@ TEST_F(CodecAllocatorTest, StalledCreateCountsAsHung) {
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
// Create codec, but don't pump message loop.
- auto config = std::make_unique<VideoCodecConfig>();
+ auto config = CreateConfig();
config->codec_type = CodecType::kSecure;
allocator_->CreateMediaCodecAsync(base::DoNothing(), std::move(config));
tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
@@ -249,7 +257,7 @@ TEST_F(CodecAllocatorTest, SecureCreationFailsWhenHung) {
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
// Secure creation should fail since we're now using software codecs.
- auto config = std::make_unique<VideoCodecConfig>();
+ auto config = CreateConfig();
config->codec_type = CodecType::kSecure;
base::RunLoop run_loop;
allocator_->CreateMediaCodecAsync(
@@ -280,9 +288,8 @@ TEST_F(CodecAllocatorTest, SoftwareCodecUsedWhenHung) {
tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
- // Secure creation should fail since we're now using software codecs.
- auto config = std::make_unique<VideoCodecConfig>();
- config->codec_type = CodecType::kAny;
+ // Creation should fall back to software.
+ auto config = CreateConfig();
base::RunLoop run_loop;
allocator_->CreateMediaCodecAsync(
base::BindOnce(&CodecAllocatorTest::OnCodecCreatedInternal,
@@ -312,7 +319,7 @@ TEST_F(CodecAllocatorTest, CodecReleasedOnRightTaskRunnerWhenHung) {
ASSERT_TRUE(IsPrimaryTaskRunnerLikelyHung());
// Release software codec, ensure it runs on secondary task runner.
- auto config = std::make_unique<VideoCodecConfig>();
+ auto config = CreateConfig();
config->codec_type = CodecType::kSoftware;
auto sw_codec = MockMediaCodecBridge::CreateVideoDecoder(*config);
reinterpret_cast<MockMediaCodecBridge*>(sw_codec.get())
@@ -354,8 +361,7 @@ TEST_F(CodecAllocatorTest, AllocateAndDestroyCodecOnAllocatorThread) {
{
base::RunLoop run_loop;
- auto config = std::make_unique<VideoCodecConfig>();
- config->codec_type = CodecType::kAny;
+ auto config = CreateConfig();
allocator_->CreateMediaCodecAsync(
base::BindOnce(&CodecAllocatorTest::OnCodecCreatedInternal,
@@ -376,4 +382,18 @@ TEST_F(CodecAllocatorTest, AllocateAndDestroyCodecOnAllocatorThread) {
}
}
+TEST_F(CodecAllocatorTest, LowResolutionGetsSoftware) {
+ auto config = CreateConfig();
+ config->initial_expected_coded_size =
+ CodecAllocator::kMinHardwareResolution - gfx::Size(1, 1);
+ base::RunLoop run_loop;
+ allocator_->CreateMediaCodecAsync(
+ base::BindOnce(&CodecAllocatorTest::OnCodecCreatedInternal,
+ base::Unretained(this), run_loop.QuitClosure()),
+ std::move(config));
+
+ EXPECT_CALL(*this, OnCodecCreated(CodecType::kSoftware));
+ run_loop.Run();
+}
+
} // namespace media
diff --git a/chromium/media/gpu/android/codec_wrapper.cc b/chromium/media/gpu/android/codec_wrapper.cc
index a89cc168c2d..128ebadb96c 100644
--- a/chromium/media/gpu/android/codec_wrapper.cc
+++ b/chromium/media/gpu/android/codec_wrapper.cc
@@ -125,7 +125,10 @@ CodecOutputBuffer::~CodecOutputBuffer() {
bool CodecOutputBuffer::ReleaseToSurface() {
was_rendered_ = true;
- return codec_->ReleaseCodecOutputBuffer(id_, true);
+ auto result = codec_->ReleaseCodecOutputBuffer(id_, true);
+ if (render_cb_)
+ std::move(render_cb_).Run();
+ return result;
}
CodecWrapperImpl::CodecWrapperImpl(
diff --git a/chromium/media/gpu/android/codec_wrapper.h b/chromium/media/gpu/android/codec_wrapper.h
index 0716f13d4b4..c49931981ba 100644
--- a/chromium/media/gpu/android/codec_wrapper.h
+++ b/chromium/media/gpu/android/codec_wrapper.h
@@ -44,6 +44,12 @@ class MEDIA_GPU_EXPORT CodecOutputBuffer {
// The size of the image.
gfx::Size size() const { return size_; }
+ // Sets a callback that will be called when we're released to the surface.
+ // Will not be called if we're dropped.
+ void set_render_cb(base::OnceClosure render_cb) {
+ render_cb_ = std::move(render_cb);
+ }
+
// Note that you can't use the first ctor, since CodecWrapperImpl isn't
// defined here. Use the second, and it'll be nullptr.
template <typename... Args>
@@ -67,6 +73,7 @@ class MEDIA_GPU_EXPORT CodecOutputBuffer {
int64_t id_;
bool was_rendered_ = false;
gfx::Size size_;
+ base::OnceClosure render_cb_;
DISALLOW_COPY_AND_ASSIGN(CodecOutputBuffer);
};
diff --git a/chromium/media/gpu/android/codec_wrapper_unittest.cc b/chromium/media/gpu/android/codec_wrapper_unittest.cc
index 66c48ad7f3e..5791937a67c 100644
--- a/chromium/media/gpu/android/codec_wrapper_unittest.cc
+++ b/chromium/media/gpu/android/codec_wrapper_unittest.cc
@@ -355,4 +355,22 @@ TEST_F(CodecWrapperTest, CodecWrapperPostsReleaseToProvidedThread) {
base::RunLoop().RunUntilIdle();
}
+TEST_F(CodecWrapperTest, RenderCallbackCalledIfRendered) {
+ auto codec_buffer = DequeueCodecOutputBuffer();
+ bool flag = false;
+ codec_buffer->set_render_cb(base::BindOnce([](bool* flag) { *flag = true; },
+ base::Unretained(&flag)));
+ codec_buffer->ReleaseToSurface();
+ EXPECT_TRUE(flag);
+}
+
+TEST_F(CodecWrapperTest, RenderCallbackIsNotCalledIfNotRendered) {
+ auto codec_buffer = DequeueCodecOutputBuffer();
+ bool flag = false;
+ codec_buffer->set_render_cb(base::BindOnce([](bool* flag) { *flag = true; },
+ base::Unretained(&flag)));
+ codec_buffer.reset();
+ EXPECT_FALSE(flag);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/android/direct_shared_image_video_provider.cc b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
index 2a700882f09..11efbbe4db0 100644
--- a/chromium/media/gpu/android/direct_shared_image_video_provider.cc
+++ b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
@@ -64,10 +64,11 @@ DirectSharedImageVideoProvider::~DirectSharedImageVideoProvider() = default;
// TODO(liberato): add a thread hop to create the default texture owner, but
// not as part of this class. just post something from VideoFrameFactory.
void DirectSharedImageVideoProvider::Initialize(GpuInitCB gpu_init_cb) {
- // Note that we do not BindToCurrentLoop |gpu_init_cb|, since it is supposed
- // to be called on the gpu main thread, which is somewhat hacky.
- gpu_factory_.Post(FROM_HERE, &GpuSharedImageVideoFactory::Initialize,
- std::move(gpu_init_cb));
+ // Note that we do use not `AsyncCall()` + `Then()` to call `gpu_init_cb`,
+ // since it is supposed to be called on the gpu main thread, which is somewhat
+ // hacky.
+ gpu_factory_.AsyncCall(&GpuSharedImageVideoFactory::Initialize)
+ .WithArgs(std::move(gpu_init_cb));
}
void DirectSharedImageVideoProvider::RequestImage(
@@ -83,9 +84,11 @@ void DirectSharedImageVideoProvider::RequestImage(
// group anyway. The thing that owns buffer management is all we really
// care about, and that doesn't have anything to do with GLImage.
- gpu_factory_.Post(FROM_HERE, &GpuSharedImageVideoFactory::CreateImage,
- BindToCurrentLoop(std::move(cb)), spec,
- std::move(texture_owner));
+ // Note: `cb` is only run on successful creation, so this does not use
+ // `AsyncCall()` + `Then()` to chain the callbacks.
+ gpu_factory_.AsyncCall(&GpuSharedImageVideoFactory::CreateImage)
+ .WithArgs(BindToCurrentLoop(std::move(cb)), spec,
+ std::move(texture_owner));
}
GpuSharedImageVideoFactory::GpuSharedImageVideoFactory(
diff --git a/chromium/media/gpu/android/frame_info_helper.cc b/chromium/media/gpu/android/frame_info_helper.cc
index f46ee94a2c6..6d4b20935e0 100644
--- a/chromium/media/gpu/android/frame_info_helper.cc
+++ b/chromium/media/gpu/android/frame_info_helper.cc
@@ -68,7 +68,7 @@ class FrameInfoHelperImpl : public FrameInfoHelper {
stub_ = nullptr;
}
- void GetFrameInfo(
+ void GetFrameInfoImpl(
std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
base::OnceCallback<void(std::unique_ptr<CodecOutputBufferRenderer>,
base::Optional<FrameInfo>)> cb) {
@@ -95,6 +95,21 @@ class FrameInfoHelperImpl : public FrameInfoHelper {
std::move(cb).Run(std::move(buffer_renderer), info);
}
+ void GetFrameInfo(
+ std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
+ base::OnceCallback<void(std::unique_ptr<CodecOutputBufferRenderer>,
+ base::Optional<FrameInfo>)> cb) {
+ DCHECK(buffer_renderer);
+
+ auto texture_owner = buffer_renderer->texture_owner();
+ DCHECK(texture_owner);
+
+ auto buffer_available_cb =
+ base::BindOnce(&OnGpu::GetFrameInfoImpl, weak_factory_.GetWeakPtr(),
+ std::move(buffer_renderer), std::move(cb));
+ texture_owner->RunWhenBufferIsAvailable(std::move(buffer_available_cb));
+ }
+
private:
// Gets YCbCrInfo from last rendered frame.
base::Optional<gpu::VulkanYCbCrInfo> GetYCbCrInfo(
@@ -117,6 +132,7 @@ class FrameInfoHelperImpl : public FrameInfoHelper {
}
gpu::CommandBufferStub* stub_ = nullptr;
+ base::WeakPtrFactory<OnGpu> weak_factory_{this};
};
FrameInfo GetFrameInfoWithVisibleSize(const gfx::Size& visible_size) {
@@ -180,8 +196,8 @@ class FrameInfoHelperImpl : public FrameInfoHelper {
base::BindOnce(&FrameInfoHelperImpl::OnFrameInfoReady,
weak_factory_.GetWeakPtr()));
- on_gpu_.Post(FROM_HERE, &OnGpu::GetFrameInfo,
- std::move(request.buffer_renderer), std::move(cb));
+ on_gpu_.AsyncCall(&OnGpu::GetFrameInfo)
+ .WithArgs(std::move(request.buffer_renderer), std::move(cb));
// We didn't complete this request quite yet, so we can't process queue
// any further.
break;
diff --git a/chromium/media/gpu/android/frame_info_helper_unittest.cc b/chromium/media/gpu/android/frame_info_helper_unittest.cc
index 94dc6b5fa09..c992740d01d 100644
--- a/chromium/media/gpu/android/frame_info_helper_unittest.cc
+++ b/chromium/media/gpu/android/frame_info_helper_unittest.cc
@@ -12,6 +12,7 @@
using testing::_;
using testing::DoAll;
+using testing::Invoke;
using testing::Mock;
using testing::Return;
using testing::SetArgPointee;
@@ -245,4 +246,50 @@ TEST_F(FrameInfoHelperTest, FailedGetCodedSize) {
Mock::VerifyAndClearExpectations(texture_owner.get());
}
+TEST_F(FrameInfoHelperTest, TextureOwnerBufferNotAvailable) {
+ auto texture_owner = base::MakeRefCounted<NiceMock<gpu::MockTextureOwner>>(
+ 0, nullptr, nullptr, true);
+
+ // Return CodedSize when GetCodedSizeAndVisibleRect is called.
+ ON_CALL(*texture_owner, GetCodedSizeAndVisibleRect(_, _, _))
+ .WillByDefault(DoAll(SetArgPointee<1>(kTestCodedSize), Return(true)));
+
+ // Save buffer available callback, we will run it manually.
+ base::OnceClosure buffer_available_cb;
+ EXPECT_CALL(*texture_owner, RunWhenBufferIsAvailable(_))
+ .WillOnce(Invoke([&buffer_available_cb](base::OnceClosure cb) {
+ buffer_available_cb = std::move(cb);
+ }));
+
+ // Verify that no GetCodedSizeAndVisibleRect will be called until buffer is
+ // available.
+ EXPECT_CALL(*texture_owner, GetCodedSizeAndVisibleRect(_, _, _)).Times(0);
+
+ // Note that we can't use helper above because the callback won't run until a
+ // buffer is available.
+ auto buffer_renderer = CreateBufferRenderer(kTestVisibleSize, texture_owner);
+ const auto* buffer_renderer_raw = buffer_renderer.get();
+ bool called = false;
+ auto callback = base::BindLambdaForTesting(
+ [&](std::unique_ptr<CodecOutputBufferRenderer> buffer_renderer,
+ FrameInfoHelper::FrameInfo info) {
+ ASSERT_EQ(buffer_renderer_raw, buffer_renderer.get());
+ called = true;
+ last_frame_info_ = info;
+ });
+ helper_->GetFrameInfo(std::move(buffer_renderer), callback);
+ base::RunLoop().RunUntilIdle();
+ ASSERT_TRUE(buffer_available_cb);
+ Mock::VerifyAndClearExpectations(texture_owner.get());
+
+ // When buffer is available we expect GetCodedSizeAndVisibleRect to be called
+ // and result should be kTestCodedSize.
+ EXPECT_CALL(*texture_owner, GetCodedSizeAndVisibleRect(_, _, _)).Times(1);
+ std::move(buffer_available_cb).Run();
+ base::RunLoop().RunUntilIdle();
+ EXPECT_EQ(last_frame_info_.coded_size, kTestCodedSize);
+ ASSERT_TRUE(called);
+ Mock::VerifyAndClearExpectations(texture_owner.get());
+}
+
} // namespace media
diff --git a/chromium/media/gpu/android/maybe_render_early_manager.cc b/chromium/media/gpu/android/maybe_render_early_manager.cc
index 84c2b9c2963..3ba450a3f06 100644
--- a/chromium/media/gpu/android/maybe_render_early_manager.cc
+++ b/chromium/media/gpu/android/maybe_render_early_manager.cc
@@ -89,18 +89,18 @@ class MaybeRenderEarlyManagerImpl : public MaybeRenderEarlyManager {
// Give the image group to |gpu_impl_|. Note that we don't drop our ref to
// |image_group| on this thread. It can only be constructed here.
- gpu_impl_.Post(FROM_HERE, &GpuMaybeRenderEarlyImpl::SetCodecImageGroup,
- std::move(image_group));
+ gpu_impl_.AsyncCall(&GpuMaybeRenderEarlyImpl::SetCodecImageGroup)
+ .WithArgs(std::move(image_group));
}
void AddCodecImage(
scoped_refptr<CodecImageHolder> codec_image_holder) override {
- gpu_impl_.Post(FROM_HERE, &GpuMaybeRenderEarlyImpl::AddCodecImage,
- std::move(codec_image_holder));
+ gpu_impl_.AsyncCall(&GpuMaybeRenderEarlyImpl::AddCodecImage)
+ .WithArgs(std::move(codec_image_holder));
}
void MaybeRenderEarly() override {
- gpu_impl_.Post(FROM_HERE, &GpuMaybeRenderEarlyImpl::MaybeRenderEarly);
+ gpu_impl_.AsyncCall(&GpuMaybeRenderEarlyImpl::MaybeRenderEarly);
}
private:
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.cc b/chromium/media/gpu/android/media_codec_video_decoder.cc
index 686357a8891..9069826e489 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder.cc
@@ -26,6 +26,7 @@
#include "media/base/media_switches.h"
#include "media/base/scoped_async_trace.h"
#include "media/base/status.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_codecs.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
@@ -33,7 +34,6 @@
#include "media/gpu/android/android_video_surface_chooser.h"
#include "media/gpu/android/codec_allocator.h"
#include "media/media_buildflags.h"
-#include "media/video/supported_video_decoder_config.h"
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#include "media/base/android/extract_sps_and_pps.h"
@@ -601,7 +601,8 @@ void MediaCodecVideoDecoder::OnSurfaceDestroyed(AndroidOverlay* overlay) {
if (target_surface_bundle_ && target_surface_bundle_->overlay() == overlay)
target_surface_bundle_ = texture_owner_bundle_;
- // Transition the codec away from the overlay if necessary.
+ // Transition the codec away from the overlay if necessary. This must be
+ // complete before this function returns.
if (SurfaceTransitionPending())
TransitionToTargetSurface();
}
@@ -966,6 +967,16 @@ bool MediaCodecVideoDecoder::DequeueOutput() {
std::unique_ptr<ScopedAsyncTrace> async_trace =
ScopedAsyncTrace::CreateIfEnabled(
"MediaCodecVideoDecoder::CreateVideoFrame");
+ // Make sure that we're notified when this is rendered. Otherwise, if we're
+ // waiting for all output buffers to drain so that we can swap the output
+ // surface, we might not realize that we may continue. If we're using
+ // SurfaceControl overlays, then this isn't needed; there is never a surface
+ // transition anyway.
+ if (!is_surface_control_enabled_) {
+ output_buffer->set_render_cb(BindToCurrentLoop(
+ base::BindOnce(&MediaCodecVideoDecoder::StartTimerOrPumpCodec,
+ weak_factory_.GetWeakPtr())));
+ }
video_frame_factory_->CreateVideoFrame(
std::move(output_buffer), presentation_time,
GetNaturalSize(visible_rect, decoder_config_.GetPixelAspectRatio()),
@@ -1010,7 +1021,7 @@ void MediaCodecVideoDecoder::ForwardVideoFrame(
if (reset_generation == reset_generation_) {
// TODO(liberato): We might actually have a SW decoder. Consider setting
// this to false if so, especially for higher bitrates.
- frame->metadata()->power_efficient = true;
+ frame->metadata().power_efficient = true;
output_cb_.Run(std::move(frame));
}
}
@@ -1148,6 +1159,10 @@ std::string MediaCodecVideoDecoder::GetDisplayName() const {
return "MediaCodecVideoDecoder";
}
+VideoDecoderType MediaCodecVideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kMediaCodec;
+}
+
bool MediaCodecVideoDecoder::NeedsBitstreamConversion() const {
return true;
}
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.h b/chromium/media/gpu/android/media_codec_video_decoder.h
index 8c984951c64..d3e3f21057f 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.h
+++ b/chromium/media/gpu/android/media_codec_video_decoder.h
@@ -80,6 +80,7 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder final : public VideoDecoder {
// VideoDecoder implementation:
std::string GetDisplayName() const override;
+ VideoDecoderType GetDecoderType() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
diff --git a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
index 0fc5de42bdf..be0e415b85c 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -20,14 +20,15 @@
#include "media/base/async_destroy_video_decoder.h"
#include "media/base/decoder_buffer.h"
#include "media/base/media_util.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/test_helpers.h"
+#include "media/base/video_codecs.h"
#include "media/base/video_frame.h"
#include "media/gpu/android/android_video_surface_chooser_impl.h"
#include "media/gpu/android/fake_codec_allocator.h"
#include "media/gpu/android/mock_android_video_surface_chooser.h"
#include "media/gpu/android/mock_device_info.h"
#include "media/gpu/android/video_frame_factory.h"
-#include "media/video/supported_video_decoder_config.h"
#include "testing/gtest/include/gtest/gtest.h"
using base::test::RunCallback;
@@ -917,7 +918,7 @@ TEST_P(MediaCodecVideoDecoderTest, VideoFramesArePowerEfficient) {
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(!!most_recent_frame_);
- EXPECT_TRUE(most_recent_frame_->metadata()->power_efficient);
+ EXPECT_TRUE(most_recent_frame_->metadata().power_efficient);
}
TEST_P(MediaCodecVideoDecoderH264Test, CsdIsIncludedInCodecConfig) {
@@ -996,6 +997,8 @@ static std::vector<VideoCodec> GetTestList() {
test_codecs.push_back(kCodecVP8);
if (MediaCodecUtil::IsVp9DecoderAvailable())
test_codecs.push_back(kCodecVP9);
+ if (MediaCodecUtil::IsAv1DecoderAvailable())
+ test_codecs.push_back(kCodecAV1);
return test_codecs;
}
@@ -1013,6 +1016,20 @@ static std::vector<VideoCodec> GetVp8IfAvailable() {
: std::vector<VideoCodec>();
}
+// TODO(https://crbug.com/1179801): Uncomment once MediaCodecVideoDecoderVp9Test
+// is fixed.
+// static std::vector<VideoCodec> GetVp9IfAvailable() {
+// return MediaCodecUtil::IsVp9DecoderAvailable()
+// ? std::vector<VideoCodec>(1, kCodecVP9)
+// : std::vector<VideoCodec>();
+// }
+
+static std::vector<VideoCodec> GetAv1IfAvailable() {
+ return MediaCodecUtil::IsAv1DecoderAvailable()
+ ? std::vector<VideoCodec>(1, kCodecAV1)
+ : std::vector<VideoCodec>();
+}
+
INSTANTIATE_TEST_SUITE_P(MediaCodecVideoDecoderTest,
MediaCodecVideoDecoderTest,
testing::ValuesIn(GetTestList()));
@@ -1027,4 +1044,20 @@ INSTANTIATE_TEST_SUITE_P(MediaCodecVideoDecoderVp8Test,
MediaCodecVideoDecoderVp8Test,
testing::ValuesIn(GetVp8IfAvailable()));
+// TODO(https://crbug.com/1179801): Uncomment once MediaCodecVideoDecoderVp9Test
+// is fixed.
+// INSTANTIATE_TEST_SUITE_P(MediaCodecVideoDecoderVp9Test,
+// MediaCodecVideoDecoderVp9Test,
+// testing::ValuesIn(GetVp9IfAvailable()));
+
+INSTANTIATE_TEST_SUITE_P(MediaCodecVideoDecoderAV1Test,
+ MediaCodecVideoDecoderAV1Test,
+ testing::ValuesIn(GetAv1IfAvailable()));
+
+// TODO(https://crbug.com/1179801): Remove this annotation once
+// MediaCodecVideoDecoderVp9Test is fixed.
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(MediaCodecVideoDecoderVp9Test);
+// This test suite is empty on some OSes.
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(MediaCodecVideoDecoderAV1Test);
+
} // namespace media
diff --git a/chromium/media/gpu/android/pooled_shared_image_video_provider.cc b/chromium/media/gpu/android/pooled_shared_image_video_provider.cc
index f46a8ca000f..a4b10ff30a7 100644
--- a/chromium/media/gpu/android/pooled_shared_image_video_provider.cc
+++ b/chromium/media/gpu/android/pooled_shared_image_video_provider.cc
@@ -122,11 +122,11 @@ void PooledSharedImageVideoProvider::OnImageReturned(
const gpu::SyncToken& sync_token) {
// An image has been returned to us. Wait for |sync_token| and then send it
// to ProcessFreePooledImage to re-use / pool / delete.
- gpu_helper_.Post(FROM_HERE, &GpuHelper::OnImageReturned, sync_token,
- pooled_image->record.codec_image_holder,
- BindToCurrentLoop(base::BindOnce(
- &PooledSharedImageVideoProvider::ProcessFreePooledImage,
- weak_factory_.GetWeakPtr(), pooled_image)));
+ gpu_helper_.AsyncCall(&GpuHelper::OnImageReturned)
+ .WithArgs(sync_token, pooled_image->record.codec_image_holder,
+ BindToCurrentLoop(base::BindOnce(
+ &PooledSharedImageVideoProvider::ProcessFreePooledImage,
+ weak_factory_.GetWeakPtr(), pooled_image)));
}
void PooledSharedImageVideoProvider::ProcessFreePooledImage(
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.cc b/chromium/media/gpu/android/video_frame_factory_impl.cc
index 6531d808624..8be7a01fcb7 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl.cc
@@ -301,7 +301,7 @@ void VideoFrameFactoryImpl::CreateVideoFrame_OnImageReady(
std::move(output_cb).Run(nullptr);
return;
}
- frame->metadata()->copy_mode = copy_mode;
+ frame->metadata().copy_mode = copy_mode;
const bool is_surface_control =
overlay_mode == OverlayMode::kSurfaceControlSecure ||
overlay_mode == OverlayMode::kSurfaceControlInsecure;
@@ -319,9 +319,9 @@ void VideoFrameFactoryImpl::CreateVideoFrame_OnImageReady(
allow_overlay = !is_texture_owner_backed || wants_promotion_hints;
}
- frame->metadata()->allow_overlay = allow_overlay;
- frame->metadata()->wants_promotion_hint = wants_promotion_hints;
- frame->metadata()->texture_owner = is_texture_owner_backed;
+ frame->metadata().allow_overlay = allow_overlay;
+ frame->metadata().wants_promotion_hint = wants_promotion_hints;
+ frame->metadata().texture_owner = is_texture_owner_backed;
// TODO(liberato): if this is run via being dropped, then it would be nice
// to find that out rather than treating the image as unused. If the renderer
diff --git a/chromium/media/gpu/args.gni b/chromium/media/gpu/args.gni
index e61e9567d85..33c49cd2938 100644
--- a/chromium/media/gpu/args.gni
+++ b/chromium/media/gpu/args.gni
@@ -21,8 +21,8 @@ declare_args() {
# is typically the case on x86-based ChromeOS devices.
# VA-API should also be compiled by default on x11-using linux devices
# using x86/x64.
- use_vaapi =
- is_linux && use_x11 && (current_cpu == "x86" || current_cpu == "x64")
+ use_vaapi = (is_linux || is_chromeos_lacros) && use_x11 &&
+ (target_cpu == "x86" || target_cpu == "x64")
# Indicates if ChromeOS protected media support exists. This is used
# to enable the CDM daemon in Chrome OS as well as support for
@@ -35,5 +35,5 @@ declare_args() {
declare_args() {
# VA-API also allows decoding of images, but we don't want to use this
# outside of chromeos, even if video decoding is enabled.
- use_vaapi_image_codecs = use_vaapi && is_ash
+ use_vaapi_image_codecs = use_vaapi && is_chromeos_ash
}
diff --git a/chromium/media/gpu/av1_decoder.cc b/chromium/media/gpu/av1_decoder.cc
new file mode 100644
index 00000000000..657202357c7
--- /dev/null
+++ b/chromium/media/gpu/av1_decoder.cc
@@ -0,0 +1,496 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/av1_decoder.h"
+
+#include <bitset>
+
+#include "base/callback_helpers.h"
+#include "base/logging.h"
+#include "base/memory/ptr_util.h"
+#include "base/stl_util.h"
+#include "media/base/limits.h"
+#include "media/gpu/av1_picture.h"
+#include "third_party/libgav1/src/src/decoder_state.h"
+#include "third_party/libgav1/src/src/gav1/status_code.h"
+#include "third_party/libgav1/src/src/utils/constants.h"
+
+namespace media {
+namespace {
+// (Section 6.4.1):
+//
+// - "An operating point specifies which spatial and temporal layers should be
+// decoded."
+//
+// - "The order of operating points indicates the preferred order for producing
+// an output: a decoder should select the earliest operating point in the list
+// that meets its decoding capabilities as expressed by the level associated
+// with each operating point."
+//
+// For simplicity, we always select operating point 0 and will validate that it
+// doesn't have scalability information.
+constexpr unsigned int kDefaultOperatingPoint = 0;
+
+// Conversion function from libgav1 profiles to media::VideoCodecProfile.
+VideoCodecProfile AV1ProfileToVideoCodecProfile(
+ libgav1::BitstreamProfile profile) {
+ switch (profile) {
+ case libgav1::kProfile0:
+ return AV1PROFILE_PROFILE_MAIN;
+ case libgav1::kProfile1:
+ return AV1PROFILE_PROFILE_HIGH;
+ case libgav1::kProfile2:
+ return AV1PROFILE_PROFILE_PRO;
+ default:
+ // ObuParser::ParseSequenceHeader() validates the profile.
+ NOTREACHED() << "Invalid profile: " << base::strict_cast<int>(profile);
+ return AV1PROFILE_PROFILE_MAIN;
+ }
+}
+
+// Returns true iff the sequence has spatial or temporal scalability information
+// for the selected operating point.
+bool SequenceUsesScalability(int operating_point_idc) {
+ return operating_point_idc != 0;
+}
+
+bool IsYUV420Sequence(const libgav1::ColorConfig& color_config) {
+ return color_config.subsampling_x == 1u && color_config.subsampling_y == 1u &&
+ !color_config.is_monochrome;
+}
+
+bool IsValidBitDepth(uint8_t bit_depth, VideoCodecProfile profile) {
+ // Spec 6.4.1.
+ switch (profile) {
+ case AV1PROFILE_PROFILE_MAIN:
+ case AV1PROFILE_PROFILE_HIGH:
+ return bit_depth == 8u || bit_depth == 10u;
+ case AV1PROFILE_PROFILE_PRO:
+ return bit_depth == 8u || bit_depth == 10u || bit_depth == 12u;
+ default:
+ NOTREACHED();
+ return false;
+ }
+}
+} // namespace
+
+AV1Decoder::AV1Decoder(std::unique_ptr<AV1Accelerator> accelerator,
+ VideoCodecProfile profile,
+ const VideoColorSpace& container_color_space)
+ : buffer_pool_(std::make_unique<libgav1::BufferPool>(
+ /*on_frame_buffer_size_changed=*/nullptr,
+ /*get_frame_buffer=*/nullptr,
+ /*release_frame_buffer=*/nullptr,
+ /*callback_private_data=*/nullptr)),
+ state_(std::make_unique<libgav1::DecoderState>()),
+ accelerator_(std::move(accelerator)),
+ profile_(profile),
+ container_color_space_(container_color_space) {
+ ref_frames_.fill(nullptr);
+}
+
+AV1Decoder::~AV1Decoder() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // |buffer_pool_| checks that all the allocated frames are released in its
+ // dtor. Explicitly destruct |state_| before |buffer_pool_| to release frames
+ // in |reference_frame| in |state_|.
+ state_.reset();
+}
+
+bool AV1Decoder::Flush() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOG(2) << "Decoder flush";
+ Reset();
+ return true;
+}
+
+void AV1Decoder::Reset() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ ClearCurrentFrame();
+
+ // We must reset the |current_sequence_header_| to ensure we don't try to
+ // decode frames using an incorrect sequence header. If the first
+ // DecoderBuffer after the reset doesn't contain a sequence header, we'll just
+ // skip it and will keep skipping until we get a sequence header.
+ current_sequence_header_.reset();
+ stream_id_ = 0;
+ stream_ = nullptr;
+ stream_size_ = 0;
+ on_error_ = false;
+
+ state_ = std::make_unique<libgav1::DecoderState>();
+ ClearReferenceFrames();
+ parser_.reset();
+
+ buffer_pool_ = std::make_unique<libgav1::BufferPool>(
+ /*on_frame_buffer_size_changed=*/nullptr,
+ /*get_frame_buffer=*/nullptr,
+ /*release_frame_buffer=*/nullptr,
+ /*callback_private_data=*/nullptr);
+}
+
+void AV1Decoder::SetStream(int32_t id, const DecoderBuffer& decoder_buffer) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ stream_id_ = id;
+ stream_ = decoder_buffer.data();
+ stream_size_ = decoder_buffer.data_size();
+ ClearCurrentFrame();
+
+ parser_ = base::WrapUnique(new (std::nothrow) libgav1::ObuParser(
+ decoder_buffer.data(), decoder_buffer.data_size(), kDefaultOperatingPoint,
+ buffer_pool_.get(), state_.get()));
+ if (!parser_) {
+ on_error_ = true;
+ return;
+ }
+
+ if (current_sequence_header_)
+ parser_->set_sequence_header(*current_sequence_header_);
+}
+
+void AV1Decoder::ClearCurrentFrame() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ current_frame_.reset();
+ current_frame_header_.reset();
+}
+
+AcceleratedVideoDecoder::DecodeResult AV1Decoder::Decode() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (on_error_)
+ return kDecodeError;
+ auto result = DecodeInternal();
+ on_error_ = result == kDecodeError;
+ return result;
+}
+
+AcceleratedVideoDecoder::DecodeResult AV1Decoder::DecodeInternal() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (!parser_) {
+ DLOG(ERROR) << "Decode() is called before SetStream()";
+ return kDecodeError;
+ }
+ while (parser_->HasData() || current_frame_header_) {
+ base::ScopedClosureRunner clear_current_frame(
+ base::BindOnce(&AV1Decoder::ClearCurrentFrame, base::Unretained(this)));
+ if (!current_frame_header_) {
+ libgav1::StatusCode status_code = parser_->ParseOneFrame(&current_frame_);
+ if (status_code != libgav1::kStatusOk) {
+ DLOG(WARNING) << "Failed to parse OBU: "
+ << libgav1::GetErrorString(status_code);
+ return kDecodeError;
+ }
+ if (!current_frame_) {
+ DLOG(WARNING) << "No frame found. Skipping the current stream";
+ continue;
+ }
+
+ current_frame_header_ = parser_->frame_header();
+ // Detects if a new coded video sequence is starting.
+ if (parser_->sequence_header_changed()) {
+ // TODO(b/171853869): Remove this check once libgav1::ObuParser does
+ // this check.
+ if (current_frame_header_->frame_type != libgav1::kFrameKey ||
+ !current_frame_header_->show_frame ||
+ current_frame_header_->show_existing_frame ||
+ current_frame_->temporal_id() != 0) {
+ // Section 7.5.
+ DVLOG(1)
+ << "The first frame successive to sequence header OBU must be a "
+ << "keyframe with show_frame=1, show_existing_frame=0 and "
+ << "temporal_id=0";
+ return kDecodeError;
+ }
+ if (SequenceUsesScalability(
+ parser_->sequence_header()
+ .operating_point_idc[kDefaultOperatingPoint])) {
+ DVLOG(3) << "Either temporal or spatial layer decoding is not "
+ << "supported";
+ return kDecodeError;
+ }
+
+ current_sequence_header_ = parser_->sequence_header();
+ if (!IsYUV420Sequence(current_sequence_header_->color_config)) {
+ DVLOG(1) << "Only YUV 4:2:0 is supported";
+ return kDecodeError;
+ }
+
+ const VideoCodecProfile new_profile =
+ AV1ProfileToVideoCodecProfile(current_sequence_header_->profile);
+ const uint8_t new_bit_depth = base::checked_cast<uint8_t>(
+ current_sequence_header_->color_config.bitdepth);
+ if (!IsValidBitDepth(new_bit_depth, new_profile)) {
+ DVLOG(1) << "Invalid bit depth="
+ << base::strict_cast<int>(new_bit_depth)
+ << ", profile=" << GetProfileName(new_profile);
+ return kDecodeError;
+ }
+
+ const gfx::Size new_frame_size(
+ base::strict_cast<int>(current_sequence_header_->max_frame_width),
+ base::strict_cast<int>(current_sequence_header_->max_frame_height));
+ gfx::Rect new_visible_rect(
+ base::strict_cast<int>(current_frame_header_->render_width),
+ base::strict_cast<int>(current_frame_header_->render_height));
+ DCHECK(!new_frame_size.IsEmpty());
+ if (!gfx::Rect(new_frame_size).Contains(new_visible_rect)) {
+ DVLOG(1) << "Render size exceeds picture size. render size: "
+ << new_visible_rect.ToString()
+ << ", picture size: " << new_frame_size.ToString();
+ new_visible_rect = gfx::Rect(new_frame_size);
+ }
+
+ ClearReferenceFrames();
+ // Issues kConfigChange only if either the dimensions, profile or bit
+ // depth is changed.
+ if (frame_size_ != new_frame_size ||
+ visible_rect_ != new_visible_rect || profile_ != new_profile ||
+ bit_depth_ != new_bit_depth) {
+ frame_size_ = new_frame_size;
+ visible_rect_ = new_visible_rect;
+ profile_ = new_profile;
+ bit_depth_ = new_bit_depth;
+ clear_current_frame.ReplaceClosure(base::DoNothing());
+ return kConfigChange;
+ }
+ }
+ }
+
+ if (!current_sequence_header_) {
+ // Decoding is not doable because we haven't received a sequence header.
+ // This occurs when seeking a video.
+ DVLOG(3) << "Discarded the current frame because no sequence header has "
+ << "been found yet";
+ continue;
+ }
+
+ DCHECK(current_frame_header_);
+ const auto& frame_header = *current_frame_header_;
+ if (frame_header.show_existing_frame) {
+ const size_t frame_to_show =
+ base::checked_cast<size_t>(frame_header.frame_to_show);
+ DCHECK_LE(0u, frame_to_show);
+ DCHECK_LT(frame_to_show, ref_frames_.size());
+ if (!CheckAndCleanUpReferenceFrames()) {
+ DLOG(ERROR) << "The states of reference frames are different between "
+ << "|ref_frames_| and |state_|";
+ return kDecodeError;
+ }
+
+ auto pic = ref_frames_[frame_to_show];
+ CHECK(pic);
+ pic = pic->Duplicate();
+ if (!pic) {
+ DVLOG(1) << "Failed duplication";
+ return kDecodeError;
+ }
+
+ pic->set_bitstream_id(stream_id_);
+ if (!accelerator_->OutputPicture(*pic)) {
+ return kDecodeError;
+ }
+
+ // libgav1::ObuParser sets |current_frame_| to the frame to show while
+ // |current_frame_header_| is the frame header of the currently parsed
+ // frame. If |current_frame_| is a keyframe, then refresh_frame_flags must
+ // be 0xff. Otherwise, refresh_frame_flags must be 0x00 (Section 5.9.2).
+ DCHECK(current_frame_->frame_type() == libgav1::kFrameKey ||
+ current_frame_header_->refresh_frame_flags == 0x00);
+ DCHECK(current_frame_->frame_type() != libgav1::kFrameKey ||
+ current_frame_header_->refresh_frame_flags == 0xff);
+ UpdateReferenceFrames(std::move(pic));
+ continue;
+ }
+
+ if (parser_->tile_buffers().empty()) {
+ // The last call to ParseOneFrame() didn't actually have any tile groups.
+ // This could happen in rare cases (for example, if there is a Metadata
+ // OBU after the TileGroup OBU). Ignore this case.
+ continue;
+ }
+
+ const gfx::Size current_frame_size(
+ base::strict_cast<int>(frame_header.width),
+ base::strict_cast<int>(frame_header.height));
+ if (current_frame_size != frame_size_) {
+ // TODO(hiroh): This must be handled in decoding spatial layer.
+ DVLOG(1) << "Resolution change in the middle of video sequence (i.e."
+ << " between sequence headers) is not supported";
+ return kDecodeError;
+ }
+ if (current_frame_size.width() !=
+ base::strict_cast<int>(frame_header.upscaled_width)) {
+ DVLOG(1) << "Super resolution is not supported";
+ return kDecodeError;
+ }
+ const gfx::Rect current_visible_rect(
+ base::strict_cast<int>(frame_header.render_width),
+ base::strict_cast<int>(frame_header.render_height));
+ if (current_visible_rect != visible_rect_) {
+ // TODO(andrescj): Handle the visible rectangle change in the middle of
+ // video sequence.
+ DVLOG(1) << "Visible rectangle change in the middle of video sequence"
+ << "(i.e. between sequence headers) is not supported";
+ return kDecodeError;
+ }
+
+ DCHECK(current_sequence_header_->film_grain_params_present ||
+ !frame_header.film_grain_params.apply_grain);
+ auto pic = accelerator_->CreateAV1Picture(
+ frame_header.film_grain_params.apply_grain);
+ if (!pic) {
+ clear_current_frame.ReplaceClosure(base::DoNothing());
+ return kRanOutOfSurfaces;
+ }
+
+ pic->set_visible_rect(current_visible_rect);
+ pic->set_bitstream_id(stream_id_);
+
+ // For AV1, prefer the frame color space over the config.
+ const auto& cc = current_sequence_header_->color_config;
+ const auto cs = VideoColorSpace(
+ cc.color_primary, cc.transfer_characteristics, cc.matrix_coefficients,
+ cc.color_range == libgav1::kColorRangeStudio
+ ? gfx::ColorSpace::RangeID::LIMITED
+ : gfx::ColorSpace::RangeID::FULL);
+ if (cs.IsSpecified())
+ pic->set_colorspace(cs);
+ else if (container_color_space_.IsSpecified())
+ pic->set_colorspace(container_color_space_);
+
+ pic->frame_header = frame_header;
+ // TODO(hiroh): Set decrypt config.
+ if (!DecodeAndOutputPicture(std::move(pic), parser_->tile_buffers()))
+ return kDecodeError;
+ }
+ return kRanOutOfStreamData;
+}
+
+void AV1Decoder::UpdateReferenceFrames(scoped_refptr<AV1Picture> pic) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(state_);
+ DCHECK(current_frame_header_);
+ const uint8_t refresh_frame_flags =
+ current_frame_header_->refresh_frame_flags;
+ const std::bitset<libgav1::kNumReferenceFrameTypes> update_reference_frame(
+ refresh_frame_flags);
+ for (size_t i = 0; i < libgav1::kNumReferenceFrameTypes; ++i) {
+ if (update_reference_frame[i])
+ ref_frames_[i] = pic;
+ }
+ state_->UpdateReferenceFrames(current_frame_,
+ base::strict_cast<int>(refresh_frame_flags));
+}
+
+void AV1Decoder::ClearReferenceFrames() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(state_);
+ ref_frames_.fill(nullptr);
+ // If AV1Decoder has decided to clear the reference frames, then ObuParser
+ // must have also decided to do so.
+ DCHECK_EQ(base::STLCount(state_->reference_frame, nullptr),
+ static_cast<int>(state_->reference_frame.size()));
+}
+
+bool AV1Decoder::CheckAndCleanUpReferenceFrames() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(state_);
+ DCHECK(current_frame_header_);
+ for (size_t i = 0; i < libgav1::kNumReferenceFrameTypes; ++i) {
+ if (state_->reference_frame[i] && !ref_frames_[i])
+ return false;
+ if (!state_->reference_frame[i] && ref_frames_[i])
+ ref_frames_[i].reset();
+ }
+
+ // If we get here, we know |ref_frames_| includes all and only those frames
+ // that can be currently used as reference frames. Now we'll assert that for
+ // non-intra frames, all the necessary reference frames are in |ref_frames_|.
+ // For intra frames, we don't need this assertion because they shouldn't
+ // depend on reference frames.
+ if (!libgav1::IsIntraFrame(current_frame_header_->frame_type)) {
+ for (size_t i = 0; i < libgav1::kNumInterReferenceFrameTypes; ++i) {
+ const auto ref_frame_index =
+ current_frame_header_->reference_frame_index[i];
+
+ // Unless an error occurred in libgav1, |ref_frame_index| should be valid,
+ // and since CheckAndCleanUpReferenceFrames() only gets called if parsing
+ // succeeded, we can assert that validity.
+ CHECK_GE(ref_frame_index, 0);
+ CHECK_LT(ref_frame_index, libgav1::kNumReferenceFrameTypes);
+ CHECK(ref_frames_[ref_frame_index]);
+ }
+ }
+
+ // If we get here, we know that all the reference frames needed by the current
+ // frame are in |ref_frames_|.
+ return true;
+}
+
+bool AV1Decoder::DecodeAndOutputPicture(
+ scoped_refptr<AV1Picture> pic,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(pic);
+ DCHECK(current_sequence_header_);
+ DCHECK(stream_);
+ DCHECK_GT(stream_size_, 0u);
+ if (!CheckAndCleanUpReferenceFrames()) {
+ DLOG(ERROR) << "The states of reference frames are different between "
+ << "|ref_frames_| and |state_|";
+ return false;
+ }
+ if (!accelerator_->SubmitDecode(*pic, *current_sequence_header_, ref_frames_,
+ tile_buffers,
+ base::make_span(stream_, stream_size_))) {
+ return false;
+ }
+
+ if (pic->frame_header.show_frame && !accelerator_->OutputPicture(*pic))
+ return false;
+
+ // |current_frame_header_->refresh_frame_flags| should be 0xff if the frame is
+ // either a SWITCH_FRAME or a visible KEY_FRAME (Spec 5.9.2).
+ DCHECK(!(current_frame_header_->frame_type == libgav1::kFrameSwitch ||
+ (current_frame_header_->frame_type == libgav1::kFrameKey &&
+ current_frame_header_->show_frame)) ||
+ current_frame_header_->refresh_frame_flags == 0xff);
+ UpdateReferenceFrames(std::move(pic));
+ return true;
+}
+
+gfx::Size AV1Decoder::GetPicSize() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // TODO(hiroh): It should be safer to align this by 64 or 128 (depending on
+ // use_128x128_superblock) so that a driver doesn't touch out of the buffer.
+ return frame_size_;
+}
+
+gfx::Rect AV1Decoder::GetVisibleRect() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ return visible_rect_;
+}
+
+VideoCodecProfile AV1Decoder::GetProfile() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ return profile_;
+}
+
+uint8_t AV1Decoder::GetBitDepth() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ return bit_depth_;
+}
+
+size_t AV1Decoder::GetRequiredNumOfPictures() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
+ DCHECK(current_sequence_header_);
+ return (kPicsInPipeline + GetNumReferenceFrames()) *
+ (1 + current_sequence_header_->film_grain_params_present);
+}
+
+size_t AV1Decoder::GetNumReferenceFrames() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ return libgav1::kNumReferenceFrameTypes;
+}
+} // namespace media
diff --git a/chromium/media/gpu/av1_decoder.h b/chromium/media/gpu/av1_decoder.h
new file mode 100644
index 00000000000..02bee3a1ec2
--- /dev/null
+++ b/chromium/media/gpu/av1_decoder.h
@@ -0,0 +1,160 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_AV1_DECODER_H_
+#define MEDIA_GPU_AV1_DECODER_H_
+
+#include <array>
+#include <memory>
+
+#include "base/containers/span.h"
+#include "base/macros.h"
+#include "base/sequence_checker.h"
+#include "media/base/video_codecs.h"
+#include "media/base/video_color_space.h"
+#include "media/gpu/accelerated_video_decoder.h"
+#include "media/gpu/media_gpu_export.h"
+#include "third_party/libgav1/src/src/utils/constants.h"
+
+// For libgav1::RefCountedBufferPtr.
+#include "third_party/libgav1/src/src/buffer_pool.h"
+// For libgav1::ObuSequenceHeader. base::Optional demands ObuSequenceHeader to
+// fulfill std::is_trivially_constructible if it is forward-declared. But
+// ObuSequenceHeader doesn't.
+#include "third_party/libgav1/src/src/obu_parser.h"
+
+namespace libgav1 {
+struct DecoderState;
+struct ObuFrameHeader;
+template <typename T>
+class Vector;
+} // namespace libgav1
+
+namespace media {
+class AV1Picture;
+using AV1ReferenceFrameVector =
+ std::array<scoped_refptr<AV1Picture>, libgav1::kNumReferenceFrameTypes>;
+
+// Clients of this class are expected to pass an AV1 OBU stream and are expected
+// to provide an implementation of AV1Accelerator for offloading final steps
+// of the decoding process.
+//
+// This class must be created, called and destroyed on a single thread, and
+// does nothing internally on any other thread.
+class MEDIA_GPU_EXPORT AV1Decoder : public AcceleratedVideoDecoder {
+ public:
+ class MEDIA_GPU_EXPORT AV1Accelerator {
+ public:
+ AV1Accelerator() = default;
+ virtual ~AV1Accelerator() = default;
+ AV1Accelerator(const AV1Accelerator&) = delete;
+ AV1Accelerator& operator=(const AV1Accelerator&) = delete;
+
+ // Creates an AV1Picture that the AV1Decoder can use to store some of the
+ // information needed to request accelerated decoding. This picture is later
+ // passed when calling SubmitDecode() so that the AV1Accelerator can submit
+ // the decode request to the driver. It may also be stored for use as
+ // reference to decode other pictures.
+ // When a picture is no longer needed by the decoder, it will just drop
+ // its reference to it, and it may do so at any time.
+ // Note that this may return nullptr if the accelerator is not able to
+ // provide any new pictures at the given time. The decoder must handle this
+ // case and treat it as normal, returning kRanOutOfSurfaces from Decode().
+ virtual scoped_refptr<AV1Picture> CreateAV1Picture(bool apply_grain) = 0;
+
+ // Submits |pic| to the driver for accelerated decoding. The following
+ // parameters are also passed:
+ // - |sequence_header|: the current OBU sequence header.
+ // - |ref_frames|: the pictures used as reference for decoding |pic|.
+ // - |tile_buffers|: tile information.
+ // - |data|: the entire data of the DecoderBuffer set by
+ // AV1Decoder::SetStream().
+ // Note that returning from this method does not mean that the decode
+ // process is finished, but the caller may drop its references to |pic|
+ // and |ref_frames| immediately, and |data| does not need to remain valid
+ // after this method returns.
+ // Returns true when successful, false otherwise.
+ virtual bool SubmitDecode(
+ const AV1Picture& pic,
+ const libgav1::ObuSequenceHeader& sequence_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
+ base::span<const uint8_t> data) = 0;
+
+ // Schedules output (display) of |pic|.
+ // Note that returning from this method does not mean that |pic| has already
+ // been outputted (displayed), but guarantees that all pictures will be
+ // outputted in the same order as this method was called for them, and that
+ // they are decoded before outputting (assuming SubmitDecode() has been
+ // called for them beforehand).
+ // Returns true when successful, false otherwise.
+ virtual bool OutputPicture(const AV1Picture& pic) = 0;
+ };
+
+ AV1Decoder(std::unique_ptr<AV1Accelerator> accelerator,
+ VideoCodecProfile profile,
+ const VideoColorSpace& container_color_space = VideoColorSpace());
+ ~AV1Decoder() override;
+ AV1Decoder(const AV1Decoder&) = delete;
+ AV1Decoder& operator=(const AV1Decoder&) = delete;
+
+ // AcceleratedVideoDecoder implementation.
+ void SetStream(int32_t id, const DecoderBuffer& decoder_buffer) override;
+ bool Flush() override WARN_UNUSED_RESULT;
+ void Reset() override;
+ DecodeResult Decode() override WARN_UNUSED_RESULT;
+ gfx::Size GetPicSize() const override;
+ gfx::Rect GetVisibleRect() const override;
+ VideoCodecProfile GetProfile() const override;
+ uint8_t GetBitDepth() const override;
+ size_t GetRequiredNumOfPictures() const override;
+ size_t GetNumReferenceFrames() const override;
+
+ private:
+ friend class AV1DecoderTest;
+
+ bool DecodeAndOutputPicture(
+ scoped_refptr<AV1Picture> pic,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers);
+ void UpdateReferenceFrames(scoped_refptr<AV1Picture> pic);
+ void ClearReferenceFrames();
+ // Checks that |ref_frames_| is consistent with libgav1's reference frame
+ // state (returns false if not) and cleans old reference frames from
+ // |ref_frames_| as needed. Also asserts that all reference frames needed by
+ // |current_frame_header_| are in |ref_frames_|. This method should be called
+ // prior to using |ref_frames_| (which includes calling
+ // |accelerator_|->SubmitDecode());
+ bool CheckAndCleanUpReferenceFrames();
+ void ClearCurrentFrame();
+ DecodeResult DecodeInternal();
+
+ bool on_error_ = false;
+
+ std::unique_ptr<libgav1::BufferPool> buffer_pool_;
+ std::unique_ptr<libgav1::DecoderState> state_;
+ std::unique_ptr<libgav1::ObuParser> parser_;
+
+ const std::unique_ptr<AV1Accelerator> accelerator_;
+ AV1ReferenceFrameVector ref_frames_;
+
+ base::Optional<libgav1::ObuSequenceHeader> current_sequence_header_;
+ base::Optional<libgav1::ObuFrameHeader> current_frame_header_;
+ libgav1::RefCountedBufferPtr current_frame_;
+
+ gfx::Rect visible_rect_;
+ gfx::Size frame_size_;
+ VideoCodecProfile profile_;
+ VideoColorSpace container_color_space_;
+ uint8_t bit_depth_ = 0;
+
+ int32_t stream_id_ = 0;
+ const uint8_t* stream_ = nullptr;
+ size_t stream_size_ = 0;
+
+ SEQUENCE_CHECKER(sequence_checker_);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_AV1_DECODER_H_
diff --git a/chromium/media/gpu/av1_decoder_fuzzertest.cc b/chromium/media/gpu/av1_decoder_fuzzertest.cc
new file mode 100644
index 00000000000..dba060d0d18
--- /dev/null
+++ b/chromium/media/gpu/av1_decoder_fuzzertest.cc
@@ -0,0 +1,74 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/video_codecs.h"
+#include "media/gpu/av1_decoder.h"
+#include "media/gpu/av1_picture.h"
+
+namespace {
+
+class FakeAV1Accelerator : public media::AV1Decoder::AV1Accelerator {
+ public:
+ FakeAV1Accelerator() = default;
+ ~FakeAV1Accelerator() override = default;
+ FakeAV1Accelerator(const FakeAV1Accelerator&) = delete;
+ FakeAV1Accelerator& operator=(const FakeAV1Accelerator&) = delete;
+
+ // media::AV1Decoder::AV1Accelerator implementation.
+ scoped_refptr<media::AV1Picture> CreateAV1Picture(bool apply_grain) override {
+ return base::MakeRefCounted<media::AV1Picture>();
+ }
+ bool SubmitDecode(const media::AV1Picture& pic,
+ const libgav1::ObuSequenceHeader& sequence_header,
+ const media::AV1ReferenceFrameVector& ref_frames,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
+ base::span<const uint8_t> data) override {
+ return true;
+ }
+ bool OutputPicture(const media::AV1Picture& pic) override { return true; }
+};
+
+} // namespace
+
+// Entry point for LibFuzzer.
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ FuzzedDataProvider fuzzed_data_provider(data, size);
+ media::AV1Decoder decoder(std::make_unique<FakeAV1Accelerator>(),
+ media::AV1PROFILE_PROFILE_MAIN);
+
+ // Split the input in two: we'll create a DecoderBuffer from each half. This
+ // allows us to Decode(), Reset(), and Decode() again for more coverage.
+ for (int i = 0; i < 2; ++i) {
+ size_t size_to_consume = i == 0 ? size / 2 : (size - size / 2);
+ std::vector<uint8_t> decoder_buffer_data =
+ fuzzed_data_provider.ConsumeBytes<uint8_t>(size_to_consume);
+ if (decoder_buffer_data.empty())
+ continue;
+ // The *|decoder_buffer| can be destroyed at the end of each iteration
+ // because Reset() is expected to ensure that the current DecoderBuffer
+ // won't be needed after that.
+ scoped_refptr<media::DecoderBuffer> decoder_buffer =
+ media::DecoderBuffer::CopyFrom(decoder_buffer_data.data(),
+ decoder_buffer_data.size());
+ decoder.SetStream(i, *decoder_buffer);
+
+ // Decode should consume all the data unless it returns kConfigChange, and
+ // in that case it needs to be called again.
+ while (true) {
+ if (decoder.Decode() != media::AcceleratedVideoDecoder::kConfigChange)
+ break;
+ }
+ decoder.Reset();
+ }
+ ignore_result(decoder.Flush());
+
+ return 0;
+}
diff --git a/chromium/media/gpu/av1_decoder_unittest.cc b/chromium/media/gpu/av1_decoder_unittest.cc
new file mode 100644
index 00000000000..41adee33c24
--- /dev/null
+++ b/chromium/media/gpu/av1_decoder_unittest.cc
@@ -0,0 +1,753 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/av1_decoder.h"
+
+#include <string.h>
+
+#include <string>
+#include <vector>
+
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/numerics/safe_conversions.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/test_data_util.h"
+#include "media/ffmpeg/ffmpeg_common.h"
+#include "media/filters/ffmpeg_demuxer.h"
+#include "media/filters/in_memory_url_protocol.h"
+#include "media/filters/ivf_parser.h"
+#include "media/gpu/av1_picture.h"
+#include "media/media_buildflags.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/libgav1/src/src/obu_parser.h"
+#include "third_party/libgav1/src/src/utils/constants.h"
+#include "third_party/libgav1/src/src/utils/types.h"
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Return;
+using ::testing::SaveArg;
+
+namespace media {
+namespace {
+
+class FakeAV1Picture : public AV1Picture {
+ public:
+ FakeAV1Picture() = default;
+
+ protected:
+ ~FakeAV1Picture() override = default;
+
+ private:
+ scoped_refptr<AV1Picture> CreateDuplicate() override {
+ return base::MakeRefCounted<FakeAV1Picture>();
+ }
+};
+
+bool IsYUV420(int8_t subsampling_x, int8_t subsampling_y, bool is_monochrome) {
+ return subsampling_x == 1 && subsampling_y == 1 && !is_monochrome;
+}
+
+MATCHER_P(SameAV1PictureInstance, av1_picture, "") {
+ return &arg == av1_picture.get();
+}
+
+MATCHER_P2(MatchesFrameSizeAndRenderSize, frame_size, render_size, "") {
+ const auto& frame_header = arg.frame_header;
+ return base::strict_cast<int>(frame_header.width) == frame_size.width() &&
+ base::strict_cast<int>(frame_header.height) == frame_size.height() &&
+ base::strict_cast<int>(frame_header.render_width) ==
+ render_size.width() &&
+ base::strict_cast<int>(frame_header.render_height) ==
+ render_size.height();
+}
+
+MATCHER_P4(MatchesFrameHeader,
+ frame_size,
+ render_size,
+ show_existing_frame,
+ show_frame,
+ "") {
+ const auto& frame_header = arg.frame_header;
+ return base::strict_cast<int>(frame_header.width) == frame_size.width() &&
+ base::strict_cast<int>(frame_header.height) == frame_size.height() &&
+ base::strict_cast<int>(frame_header.render_width) ==
+ render_size.width() &&
+ base::strict_cast<int>(frame_header.render_height) ==
+ render_size.height() &&
+ frame_header.show_existing_frame == show_existing_frame &&
+ frame_header.show_frame == show_frame;
+}
+
+MATCHER_P4(MatchesYUV420SequenceHeader,
+ profile,
+ bitdepth,
+ max_frame_size,
+ film_grain_params_present,
+ "") {
+ return arg.profile == profile && arg.color_config.bitdepth == bitdepth &&
+ base::strict_cast<int>(arg.max_frame_width) ==
+ max_frame_size.width() &&
+ base::strict_cast<int>(arg.max_frame_height) ==
+ max_frame_size.height() &&
+ arg.film_grain_params_present == film_grain_params_present &&
+ IsYUV420(arg.color_config.subsampling_x,
+ arg.color_config.subsampling_y,
+ arg.color_config.is_monochrome);
+}
+
+MATCHER(NonEmptyTileBuffers, "") {
+ return !arg.empty();
+}
+
+MATCHER_P(MatchesFrameData, decoder_buffer, "") {
+ return arg.data() == decoder_buffer->data() &&
+ arg.size() == decoder_buffer->data_size();
+}
+
+class MockAV1Accelerator : public AV1Decoder::AV1Accelerator {
+ public:
+ MockAV1Accelerator() = default;
+ ~MockAV1Accelerator() override = default;
+
+ MOCK_METHOD1(CreateAV1Picture, scoped_refptr<AV1Picture>(bool));
+ MOCK_METHOD5(SubmitDecode,
+ bool(const AV1Picture&,
+ const libgav1::ObuSequenceHeader&,
+ const AV1ReferenceFrameVector&,
+ const libgav1::Vector<libgav1::TileBuffer>&,
+ base::span<const uint8_t>));
+ MOCK_METHOD1(OutputPicture, bool(const AV1Picture&));
+};
+} // namespace
+
+class AV1DecoderTest : public ::testing::Test {
+ public:
+ using DecodeResult = AcceleratedVideoDecoder::DecodeResult;
+
+ AV1DecoderTest() = default;
+ ~AV1DecoderTest() override = default;
+ void SetUp() override;
+ std::vector<DecodeResult> Decode(scoped_refptr<DecoderBuffer> buffer);
+ const libgav1::DecoderState* GetDecoderState() const;
+ AV1ReferenceFrameVector& GetReferenceFrames() const;
+ void Reset();
+ scoped_refptr<DecoderBuffer> ReadDecoderBuffer(const std::string& fname);
+ std::vector<scoped_refptr<DecoderBuffer>> ReadIVF(const std::string& fname);
+ std::vector<scoped_refptr<DecoderBuffer>> ReadWebm(const std::string& fname);
+
+ protected:
+ base::FilePath GetTestFilePath(const std::string& fname) {
+ base::FilePath file_path(
+ base::FilePath(base::FilePath::kCurrentDirectory).AppendASCII(fname));
+ if (base::PathExists(file_path)) {
+ return file_path;
+ }
+ return GetTestDataFilePath(fname);
+ }
+
+ // Owned by |decoder_|.
+ MockAV1Accelerator* mock_accelerator_;
+
+ std::unique_ptr<AV1Decoder> decoder_;
+ int32_t bitstream_id_ = 0;
+};
+
+void AV1DecoderTest::SetUp() {
+ auto accelerator = std::make_unique<MockAV1Accelerator>();
+ mock_accelerator_ = accelerator.get();
+ decoder_ = std::make_unique<AV1Decoder>(std::move(accelerator),
+ VIDEO_CODEC_PROFILE_UNKNOWN);
+}
+
+std::vector<AcceleratedVideoDecoder::DecodeResult> AV1DecoderTest::Decode(
+ scoped_refptr<DecoderBuffer> buffer) {
+ decoder_->SetStream(bitstream_id_++, *buffer);
+
+ std::vector<DecodeResult> results;
+ DecodeResult res;
+ do {
+ res = decoder_->Decode();
+ results.push_back(res);
+ } while (res != DecodeResult::kDecodeError &&
+ res != DecodeResult::kRanOutOfStreamData);
+ return results;
+}
+
+const libgav1::DecoderState* AV1DecoderTest::GetDecoderState() const {
+ return decoder_->state_.get();
+}
+
+AV1ReferenceFrameVector& AV1DecoderTest::GetReferenceFrames() const {
+ return decoder_->ref_frames_;
+}
+
+void AV1DecoderTest::Reset() {
+ EXPECT_NE(decoder_->state_->current_frame_id, -1);
+ EXPECT_TRUE(decoder_->parser_);
+ EXPECT_EQ(decoder_->accelerator_.get(), mock_accelerator_);
+ EXPECT_LT(base::checked_cast<AV1ReferenceFrameVector::size_type>(
+ base::STLCount(decoder_->ref_frames_, nullptr)),
+ decoder_->ref_frames_.size());
+ EXPECT_FALSE(decoder_->current_frame_header_);
+ EXPECT_FALSE(decoder_->current_frame_);
+ EXPECT_NE(decoder_->stream_id_, 0);
+ EXPECT_TRUE(decoder_->stream_);
+ EXPECT_GT(decoder_->stream_size_, 0u);
+
+ decoder_->Reset();
+ EXPECT_EQ(decoder_->state_->current_frame_id, -1);
+ EXPECT_FALSE(decoder_->parser_);
+ EXPECT_EQ(decoder_->accelerator_.get(), mock_accelerator_);
+ EXPECT_EQ(base::checked_cast<AV1ReferenceFrameVector::size_type>(
+ base::STLCount(decoder_->ref_frames_, nullptr)),
+ decoder_->ref_frames_.size());
+ EXPECT_FALSE(decoder_->current_frame_header_);
+ EXPECT_FALSE(decoder_->current_frame_);
+ EXPECT_EQ(decoder_->stream_id_, 0);
+ EXPECT_FALSE(decoder_->stream_);
+ EXPECT_EQ(decoder_->stream_size_, 0u);
+}
+
+scoped_refptr<DecoderBuffer> AV1DecoderTest::ReadDecoderBuffer(
+ const std::string& fname) {
+ auto input_file = GetTestFilePath(fname);
+ std::string bitstream;
+
+ EXPECT_TRUE(base::ReadFileToString(input_file, &bitstream));
+ auto buffer = DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(bitstream.data()), bitstream.size());
+ EXPECT_TRUE(!!buffer);
+ return buffer;
+}
+
+std::vector<scoped_refptr<DecoderBuffer>> AV1DecoderTest::ReadIVF(
+ const std::string& fname) {
+ std::string ivf_data;
+ auto input_file = GetTestFilePath(fname);
+ EXPECT_TRUE(base::ReadFileToString(input_file, &ivf_data));
+
+ IvfParser ivf_parser;
+ IvfFileHeader ivf_header{};
+ EXPECT_TRUE(
+ ivf_parser.Initialize(reinterpret_cast<const uint8_t*>(ivf_data.data()),
+ ivf_data.size(), &ivf_header));
+ EXPECT_EQ(ivf_header.fourcc, /*AV01=*/0x31305641u);
+
+ std::vector<scoped_refptr<DecoderBuffer>> buffers;
+ IvfFrameHeader ivf_frame_header{};
+ const uint8_t* data;
+ while (ivf_parser.ParseNextFrame(&ivf_frame_header, &data)) {
+ buffers.push_back(DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(data), ivf_frame_header.frame_size));
+ }
+ return buffers;
+}
+
+std::vector<scoped_refptr<DecoderBuffer>> AV1DecoderTest::ReadWebm(
+ const std::string& fname) {
+ std::string webm_data;
+ auto input_file = GetTestFilePath(fname);
+ EXPECT_TRUE(base::ReadFileToString(input_file, &webm_data));
+
+ InMemoryUrlProtocol protocol(
+ reinterpret_cast<const uint8_t*>(webm_data.data()), webm_data.size(),
+ false);
+ FFmpegGlue glue(&protocol);
+ LOG_ASSERT(glue.OpenContext());
+ int stream_index = -1;
+ for (unsigned int i = 0; i < glue.format_context()->nb_streams; ++i) {
+ const AVStream* stream = glue.format_context()->streams[i];
+ const AVCodecParameters* codec_parameters = stream->codecpar;
+ const AVMediaType codec_type = codec_parameters->codec_type;
+ const AVCodecID codec_id = codec_parameters->codec_id;
+ if (codec_type == AVMEDIA_TYPE_VIDEO && codec_id == AV_CODEC_ID_AV1) {
+ stream_index = i;
+ break;
+ }
+ }
+ EXPECT_NE(stream_index, -1) << "No AV1 data found in " << input_file;
+
+ std::vector<scoped_refptr<DecoderBuffer>> buffers;
+ AVPacket packet{};
+ while (av_read_frame(glue.format_context(), &packet) >= 0) {
+ if (packet.stream_index == stream_index)
+ buffers.push_back(DecoderBuffer::CopyFrom(packet.data, packet.size));
+ av_packet_unref(&packet);
+ }
+ return buffers;
+}
+
+TEST_F(AV1DecoderTest, DecodeInvalidOBU) {
+ std::string kInvalidData = "ThisIsInvalidData";
+ auto kInvalidBuffer = DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(kInvalidData.data()),
+ kInvalidData.size());
+ std::vector<DecodeResult> results = Decode(kInvalidBuffer);
+ std::vector<DecodeResult> expected = {DecodeResult::kDecodeError};
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, DecodeEmptyOBU) {
+ auto kEmptyBuffer = base::MakeRefCounted<DecoderBuffer>(0);
+ std::vector<DecodeResult> results = Decode(kEmptyBuffer);
+ std::vector<DecodeResult> expected = {DecodeResult::kRanOutOfStreamData};
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, DecodeOneIFrame) {
+ constexpr gfx::Size kFrameSize(320, 240);
+ constexpr gfx::Size kRenderSize(320, 240);
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ const std::string kIFrame("av1-I-frame-320x240");
+ scoped_refptr<DecoderBuffer> i_frame_buffer = ReadDecoderBuffer(kIFrame);
+ ASSERT_TRUE(!!i_frame_buffer);
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ ::testing::InSequence s;
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(
+ MatchesFrameHeader(kFrameSize, kRenderSize,
+ /*show_existing_frame=*/false,
+ /*show_frame=*/true),
+ MatchesYUV420SequenceHeader(kProfile, /*bitdepth=*/8, kFrameSize,
+ /*film_grain_params_present=*/false),
+ _, NonEmptyTileBuffers(), MatchesFrameData(i_frame_buffer)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_accelerator_,
+ OutputPicture(SameAV1PictureInstance(av1_picture)))
+ .WillOnce(Return(true));
+ std::vector<DecodeResult> results = Decode(i_frame_buffer);
+ std::vector<DecodeResult> expected = {DecodeResult::kConfigChange,
+ DecodeResult::kRanOutOfStreamData};
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, DecodeSimpleStream) {
+ constexpr gfx::Size kFrameSize(320, 240);
+ constexpr gfx::Size kRenderSize(320, 240);
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ const std::string kSimpleStream("bear-av1.webm");
+ std::vector<scoped_refptr<DecoderBuffer>> buffers = ReadWebm(kSimpleStream);
+ ASSERT_FALSE(buffers.empty());
+ std::vector<DecodeResult> expected = {DecodeResult::kConfigChange};
+ std::vector<DecodeResult> results;
+ for (auto buffer : buffers) {
+ ::testing::InSequence sequence;
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(
+ MatchesFrameHeader(kFrameSize, kRenderSize,
+ /*show_existing_frame=*/false,
+ /*show_frame=*/true),
+ MatchesYUV420SequenceHeader(kProfile, /*bitdepth=*/8, kFrameSize,
+ /*film_grain_params_present=*/false),
+ _, NonEmptyTileBuffers(), MatchesFrameData(buffer)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_accelerator_,
+ OutputPicture(SameAV1PictureInstance(av1_picture)))
+ .WillOnce(Return(true));
+ for (DecodeResult r : Decode(buffer))
+ results.push_back(r);
+ expected.push_back(DecodeResult::kRanOutOfStreamData);
+ testing::Mock::VerifyAndClearExpectations(mock_accelerator_);
+ }
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, DecodeShowExistingPictureStream) {
+ constexpr gfx::Size kFrameSize(208, 144);
+ constexpr gfx::Size kRenderSize(208, 144);
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ constexpr size_t kDecodedFrames = 10;
+ constexpr size_t kOutputFrames = 10;
+ const std::string kShowExistingFrameStream("av1-show_existing_frame.ivf");
+ std::vector<scoped_refptr<DecoderBuffer>> buffers =
+ ReadIVF(kShowExistingFrameStream);
+ ASSERT_FALSE(buffers.empty());
+
+ // TODO(hiroh): Test what's unique about the show_existing_frame path.
+ std::vector<DecodeResult> expected = {DecodeResult::kConfigChange};
+ std::vector<DecodeResult> results;
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .Times(kDecodedFrames)
+ .WillRepeatedly(Return(base::MakeRefCounted<FakeAV1Picture>()));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(
+ MatchesFrameSizeAndRenderSize(kFrameSize, kRenderSize),
+ MatchesYUV420SequenceHeader(kProfile, /*bitdepth=*/8, kFrameSize,
+ /*film_grain_params_present=*/false),
+ _, NonEmptyTileBuffers(), _))
+ .Times(kDecodedFrames)
+ .WillRepeatedly(Return(true));
+ EXPECT_CALL(*mock_accelerator_, OutputPicture(_))
+ .Times(kOutputFrames)
+ .WillRepeatedly(Return(true));
+
+ for (auto buffer : buffers) {
+ for (DecodeResult r : Decode(buffer))
+ results.push_back(r);
+ expected.push_back(DecodeResult::kRanOutOfStreamData);
+ }
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, Decode10bitStream) {
+ const std::string k10bitStream("bear-av1-320x180-10bit.webm");
+ std::vector<scoped_refptr<DecoderBuffer>> buffers = ReadWebm(k10bitStream);
+ ASSERT_FALSE(buffers.empty());
+ constexpr gfx::Size kFrameSize(320, 180);
+ constexpr gfx::Size kRenderSize(320, 180);
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ std::vector<DecodeResult> expected = {DecodeResult::kConfigChange};
+ std::vector<DecodeResult> results;
+ for (auto buffer : buffers) {
+ ::testing::InSequence sequence;
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(
+ MatchesFrameHeader(kFrameSize, kRenderSize,
+ /*show_existing_frame=*/false,
+ /*show_frame=*/true),
+ MatchesYUV420SequenceHeader(kProfile, /*bitdepth=*/10, kFrameSize,
+ /*film_grain_params_present=*/false),
+ _, NonEmptyTileBuffers(), MatchesFrameData(buffer)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_accelerator_,
+ OutputPicture(SameAV1PictureInstance(av1_picture)))
+ .WillOnce(Return(true));
+ for (DecodeResult r : Decode(buffer))
+ results.push_back(r);
+ expected.push_back(DecodeResult::kRanOutOfStreamData);
+ testing::Mock::VerifyAndClearExpectations(mock_accelerator_);
+ }
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, DecodeSVCStream) {
+ const std::string kSVCStream("av1-svc-L2T2.ivf");
+ std::vector<scoped_refptr<DecoderBuffer>> buffers = ReadIVF(kSVCStream);
+ ASSERT_FALSE(buffers.empty());
+ std::vector<DecodeResult> expected = {DecodeResult::kDecodeError};
+ EXPECT_EQ(Decode(buffers[0]), expected);
+ // Once AV1Decoder gets into an error state, Decode() returns kDecodeError
+ // until Reset().
+ EXPECT_EQ(Decode(buffers[1]), expected);
+}
+
+TEST_F(AV1DecoderTest, DenyDecodeNonYUV420) {
+ const std::string kYUV444Stream("blackwhite_yuv444p-frame.av1.ivf");
+ std::vector<scoped_refptr<DecoderBuffer>> buffers = ReadIVF(kYUV444Stream);
+ ASSERT_EQ(buffers.size(), 1u);
+ std::vector<DecodeResult> expected = {DecodeResult::kDecodeError};
+ EXPECT_EQ(Decode(buffers[0]), expected);
+ // Once AV1Decoder gets into an error state, Decode() returns kDecodeError
+ // until Reset().
+ EXPECT_EQ(Decode(buffers[0]), expected);
+}
+
+TEST_F(AV1DecoderTest, DecodeFilmGrain) {
+ // Note: This video also contains show_existing_frame.
+ const std::string kFilmGrainStream("av1-film_grain.ivf");
+ std::vector<scoped_refptr<DecoderBuffer>> buffers = ReadIVF(kFilmGrainStream);
+ ASSERT_FALSE(buffers.empty());
+ constexpr size_t kDecodedFrames = 11;
+ constexpr size_t kOutputFrames = 10;
+ constexpr gfx::Size kFrameSize(352, 288);
+ constexpr gfx::Size kRenderSize(352, 288);
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ std::vector<DecodeResult> expected = {DecodeResult::kConfigChange};
+ std::vector<DecodeResult> results;
+
+ // TODO(hiroh): test that CreateAV1Picture is called with the right parameter
+ // which depends on the frame
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(_))
+ .Times(kDecodedFrames)
+ .WillRepeatedly(Return(base::MakeRefCounted<FakeAV1Picture>()));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(
+ MatchesFrameSizeAndRenderSize(kFrameSize, kRenderSize),
+ MatchesYUV420SequenceHeader(kProfile, /*bitdepth=*/8, kFrameSize,
+ /*film_grain_params_present=*/true),
+ _, NonEmptyTileBuffers(), _))
+ .Times(kDecodedFrames)
+ .WillRepeatedly(Return(true));
+ EXPECT_CALL(*mock_accelerator_, OutputPicture(_))
+ .Times(kOutputFrames)
+ .WillRepeatedly(Return(true));
+
+ for (auto buffer : buffers) {
+ for (DecodeResult r : Decode(buffer))
+ results.push_back(r);
+ expected.push_back(DecodeResult::kRanOutOfStreamData);
+ }
+ EXPECT_EQ(results, expected);
+}
+
+// TODO(b/175895249): Test in isolation each of the conditions that trigger a
+// kConfigChange event.
+TEST_F(AV1DecoderTest, ConfigChange) {
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ constexpr auto kMediaProfile = VideoCodecProfile::AV1PROFILE_PROFILE_MAIN;
+ const std::string kSimpleStreams[] = {"bear-av1.webm",
+ "bear-av1-480x360.webm"};
+ constexpr gfx::Size kFrameSizes[] = {{320, 240}, {480, 360}};
+ constexpr gfx::Size kRenderSizes[] = {{320, 240}, {480, 360}};
+ std::vector<DecodeResult> expected;
+ std::vector<DecodeResult> results;
+ for (size_t i = 0; i < base::size(kSimpleStreams); ++i) {
+ std::vector<scoped_refptr<DecoderBuffer>> buffers =
+ ReadWebm(kSimpleStreams[i]);
+ ASSERT_FALSE(buffers.empty());
+ expected.push_back(DecodeResult::kConfigChange);
+ for (auto buffer : buffers) {
+ ::testing::InSequence sequence;
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(MatchesFrameHeader(kFrameSizes[i], kRenderSizes[i],
+ /*show_existing_frame=*/false,
+ /*show_frame=*/true),
+ MatchesYUV420SequenceHeader(
+ kProfile, /*bitdepth=*/8, kFrameSizes[i],
+ /*film_grain_params_present=*/false),
+ _, NonEmptyTileBuffers(), MatchesFrameData(buffer)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_accelerator_,
+ OutputPicture(SameAV1PictureInstance(av1_picture)))
+ .WillOnce(Return(true));
+ for (DecodeResult r : Decode(buffer))
+ results.push_back(r);
+ expected.push_back(DecodeResult::kRanOutOfStreamData);
+ EXPECT_EQ(decoder_->GetProfile(), kMediaProfile);
+ EXPECT_EQ(decoder_->GetPicSize(), kFrameSizes[i]);
+ EXPECT_EQ(decoder_->GetVisibleRect(), gfx::Rect(kRenderSizes[i]));
+ EXPECT_EQ(decoder_->GetBitDepth(), 8u);
+ testing::Mock::VerifyAndClearExpectations(mock_accelerator_);
+ }
+ }
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, Reset) {
+ constexpr gfx::Size kFrameSize(320, 240);
+ constexpr gfx::Size kRenderSize(320, 240);
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ constexpr auto kMediaProfile = VideoCodecProfile::AV1PROFILE_PROFILE_MAIN;
+ constexpr uint8_t kBitDepth = 8u;
+ const std::string kSimpleStream("bear-av1.webm");
+ std::vector<DecodeResult> expected;
+ std::vector<DecodeResult> results;
+
+ std::vector<scoped_refptr<DecoderBuffer>> buffers = ReadWebm(kSimpleStream);
+ ASSERT_FALSE(buffers.empty());
+ expected.push_back(DecodeResult::kConfigChange);
+ for (int k = 0; k < 2; k++) {
+ for (auto buffer : buffers) {
+ ::testing::InSequence sequence;
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(
+ MatchesFrameHeader(kFrameSize, kRenderSize,
+ /*show_existing_frame=*/false,
+ /*show_frame=*/true),
+ MatchesYUV420SequenceHeader(kProfile, /*bitdepth=*/8, kFrameSize,
+ /*film_grain_params_present=*/false),
+ _, NonEmptyTileBuffers(), MatchesFrameData(buffer)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_accelerator_,
+ OutputPicture(SameAV1PictureInstance(av1_picture)))
+ .WillOnce(Return(true));
+ for (DecodeResult r : Decode(buffer))
+ results.push_back(r);
+ expected.push_back(DecodeResult::kRanOutOfStreamData);
+ EXPECT_EQ(decoder_->GetProfile(), kMediaProfile);
+ EXPECT_EQ(decoder_->GetPicSize(), kFrameSize);
+ EXPECT_EQ(decoder_->GetVisibleRect(), gfx::Rect(kRenderSize));
+ EXPECT_EQ(decoder_->GetBitDepth(), kBitDepth);
+ testing::Mock::VerifyAndClearExpectations(mock_accelerator_);
+ }
+
+ Reset();
+ // Ensures Reset() doesn't clear the stored stream states.
+ EXPECT_EQ(decoder_->GetProfile(), kMediaProfile);
+ EXPECT_EQ(decoder_->GetPicSize(), kFrameSize);
+ EXPECT_EQ(decoder_->GetVisibleRect(), gfx::Rect(kRenderSize));
+ EXPECT_EQ(decoder_->GetBitDepth(), kBitDepth);
+ }
+ EXPECT_EQ(results, expected);
+}
+
+TEST_F(AV1DecoderTest, ResetAndConfigChange) {
+ constexpr auto kProfile = libgav1::BitstreamProfile::kProfile0;
+ constexpr auto kMediaProfile = VideoCodecProfile::AV1PROFILE_PROFILE_MAIN;
+ const std::string kSimpleStreams[] = {"bear-av1.webm",
+ "bear-av1-480x360.webm"};
+ constexpr gfx::Size kFrameSizes[] = {{320, 240}, {480, 360}};
+ constexpr gfx::Size kRenderSizes[] = {{320, 240}, {480, 360}};
+ constexpr uint8_t kBitDepth = 8u;
+ std::vector<DecodeResult> expected;
+ std::vector<DecodeResult> results;
+
+ for (size_t i = 0; i < base::size(kSimpleStreams); ++i) {
+ std::vector<scoped_refptr<DecoderBuffer>> buffers =
+ ReadWebm(kSimpleStreams[i]);
+ ASSERT_FALSE(buffers.empty());
+ expected.push_back(DecodeResult::kConfigChange);
+ for (auto buffer : buffers) {
+ ::testing::InSequence sequence;
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+ EXPECT_CALL(
+ *mock_accelerator_,
+ SubmitDecode(MatchesFrameHeader(kFrameSizes[i], kRenderSizes[i],
+ /*show_existing_frame=*/false,
+ /*show_frame=*/true),
+ MatchesYUV420SequenceHeader(
+ kProfile, /*bitdepth=*/8, kFrameSizes[i],
+ /*film_grain_params_present=*/false),
+ _, NonEmptyTileBuffers(), MatchesFrameData(buffer)))
+ .WillOnce(Return(true));
+ EXPECT_CALL(*mock_accelerator_,
+ OutputPicture(SameAV1PictureInstance(av1_picture)))
+ .WillOnce(Return(true));
+ for (DecodeResult r : Decode(buffer))
+ results.push_back(r);
+ expected.push_back(DecodeResult::kRanOutOfStreamData);
+ EXPECT_EQ(decoder_->GetProfile(), kMediaProfile);
+ EXPECT_EQ(decoder_->GetPicSize(), kFrameSizes[i]);
+ EXPECT_EQ(decoder_->GetVisibleRect(), gfx::Rect(kRenderSizes[i]));
+ EXPECT_EQ(decoder_->GetBitDepth(), kBitDepth);
+ testing::Mock::VerifyAndClearExpectations(mock_accelerator_);
+ }
+
+ Reset();
+ // Ensures Reset() doesn't clear the stored stream states.
+ EXPECT_EQ(decoder_->GetProfile(), kMediaProfile);
+ EXPECT_EQ(decoder_->GetPicSize(), kFrameSizes[i]);
+ EXPECT_EQ(decoder_->GetVisibleRect(), gfx::Rect(kRenderSizes[i]));
+ EXPECT_EQ(decoder_->GetBitDepth(), kBitDepth);
+ }
+ EXPECT_EQ(results, expected);
+}
+
+// This test ensures that the AV1Decoder fails gracefully if for some reason,
+// the reference frame state tracked by AV1Decoder becomes inconsistent with the
+// state tracked by libgav1.
+TEST_F(AV1DecoderTest, InconsistentReferenceFrameState) {
+ const std::string kSimpleStream("bear-av1.webm");
+ std::vector<scoped_refptr<DecoderBuffer>> buffers = ReadWebm(kSimpleStream);
+ ASSERT_GE(buffers.size(), 2u);
+
+ // In this test stream, the first frame is an intra frame and the second one
+ // is not. Let's start by decoding the first frame and inspecting the
+ // reference frame state.
+ {
+ ::testing::InSequence sequence;
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+
+ AV1ReferenceFrameVector ref_frames;
+ EXPECT_CALL(*mock_accelerator_,
+ SubmitDecode(SameAV1PictureInstance(av1_picture), _, _, _, _))
+ .WillOnce(DoAll(SaveArg<2>(&ref_frames), Return(true)));
+ EXPECT_CALL(*mock_accelerator_,
+ OutputPicture(SameAV1PictureInstance(av1_picture)))
+ .WillOnce(Return(true));
+
+ // Before decoding, let's make sure that libgav1 doesn't think any reference
+ // frames are valid.
+ const libgav1::DecoderState* decoder_state = GetDecoderState();
+ ASSERT_TRUE(decoder_state);
+ EXPECT_EQ(base::STLCount(decoder_state->reference_frame, nullptr),
+ base::checked_cast<long>(decoder_state->reference_frame.size()));
+
+ // And to be consistent, AV1Decoder should not be tracking any reference
+ // frames yet.
+ const AV1ReferenceFrameVector& internal_ref_frames = GetReferenceFrames();
+ EXPECT_EQ(base::STLCount(internal_ref_frames, nullptr),
+ base::checked_cast<long>(internal_ref_frames.size()));
+
+ // Now try to decode one frame and make sure that the frame is intra.
+ std::vector<DecodeResult> expected = {DecodeResult::kConfigChange,
+ DecodeResult::kRanOutOfStreamData};
+ std::vector<DecodeResult> results = Decode(buffers[0]);
+ EXPECT_EQ(results, expected);
+ EXPECT_TRUE(libgav1::IsIntraFrame(av1_picture->frame_header.frame_type));
+
+ // SubmitDecode() should have received the reference frames before they were
+ // updated. That means that it should have received no reference frames
+ // since this SubmitDecode() refers to the first frame.
+ EXPECT_EQ(base::STLCount(ref_frames, nullptr),
+ base::checked_cast<long>(ref_frames.size()));
+
+ // Now let's inspect the current state of things (which is after the
+ // reference frames have been updated): libgav1 should have decided that all
+ // reference frames are valid.
+ ASSERT_TRUE(decoder_state);
+ EXPECT_EQ(base::STLCount(decoder_state->reference_frame, nullptr), 0);
+
+ // And to be consistent, all the reference frames tracked by the AV1Decoder
+ // should also be valid and they should be pointing to the only AV1Picture
+ // so far.
+ EXPECT_TRUE(
+ std::all_of(internal_ref_frames.begin(), internal_ref_frames.end(),
+ [&av1_picture](const scoped_refptr<AV1Picture>& ref_frame) {
+ return ref_frame.get() == av1_picture.get();
+ }));
+ testing::Mock::VerifyAndClearExpectations(mock_accelerator_);
+ }
+
+ // Now we will purposefully mess up the reference frame state tracked by the
+ // AV1Decoder by removing one of the reference frames. This should cause the
+ // decode of the second frame to fail because the AV1Decoder should detect the
+ // inconsistency.
+ GetReferenceFrames()[1] = nullptr;
+ auto av1_picture = base::MakeRefCounted<AV1Picture>();
+ EXPECT_CALL(*mock_accelerator_, CreateAV1Picture(/*apply_grain=*/false))
+ .WillOnce(Return(av1_picture));
+ std::vector<DecodeResult> expected = {DecodeResult::kDecodeError};
+ std::vector<DecodeResult> results = Decode(buffers[1]);
+ EXPECT_EQ(results, expected);
+
+ // Just for rigor, let's check the state at the moment of failure. First, the
+ // current frame should be an inter frame (and its header should have been
+ // stored in the AV1Picture).
+ EXPECT_EQ(av1_picture->frame_header.frame_type, libgav1::kFrameInter);
+
+ // Next, let's check the reference frames that frame needs.
+ for (int8_t i = 0; i < libgav1::kNumInterReferenceFrameTypes; ++i)
+ EXPECT_EQ(av1_picture->frame_header.reference_frame_index[i], i);
+
+ // Finally, let's check that libgav1 thought that all the reference frames
+ // were valid.
+ const libgav1::DecoderState* decoder_state = GetDecoderState();
+ ASSERT_TRUE(decoder_state);
+ EXPECT_EQ(base::STLCount(decoder_state->reference_frame, nullptr), 0);
+}
+
+// TODO(hiroh): Add more tests: reference frame tracking, render size change,
+// profile change, bit depth change, render size different than the frame size,
+// visible rectangle change in the middle of video sequence, reset while waiting
+// for buffers, flushing.
+} // namespace media
diff --git a/chromium/media/gpu/av1_picture.cc b/chromium/media/gpu/av1_picture.cc
new file mode 100644
index 00000000000..393f1072d51
--- /dev/null
+++ b/chromium/media/gpu/av1_picture.cc
@@ -0,0 +1,30 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/av1_picture.h"
+#include <memory>
+
+namespace media {
+AV1Picture::AV1Picture() = default;
+AV1Picture::~AV1Picture() = default;
+
+scoped_refptr<AV1Picture> AV1Picture::Duplicate() {
+ scoped_refptr<AV1Picture> dup_pic = CreateDuplicate();
+ if (!dup_pic)
+ return nullptr;
+
+ // Copy members of AV1Picture and CodecPicture.
+ // A proper bitstream id is set in AV1Decoder.
+ // Note that decrypt_config_ is not used in here, so skip copying it.
+ dup_pic->frame_header = frame_header;
+ dup_pic->set_bitstream_id(bitstream_id());
+ dup_pic->set_visible_rect(visible_rect());
+ dup_pic->set_colorspace(get_colorspace());
+ return dup_pic;
+}
+
+scoped_refptr<AV1Picture> AV1Picture::CreateDuplicate() {
+ return nullptr;
+}
+} // namespace media
diff --git a/chromium/media/gpu/av1_picture.h b/chromium/media/gpu/av1_picture.h
new file mode 100644
index 00000000000..505c9cd4670
--- /dev/null
+++ b/chromium/media/gpu/av1_picture.h
@@ -0,0 +1,38 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_AV1_PICTURE_H_
+#define MEDIA_GPU_AV1_PICTURE_H_
+
+#include <memory>
+
+#include "media/gpu/codec_picture.h"
+#include "media/gpu/media_gpu_export.h"
+#include "third_party/libgav1/src/src/utils/types.h"
+
+namespace media {
+// AV1Picture carries the parsed frame header needed for decoding an AV1 frame.
+// It also owns the decoded frame itself.
+class MEDIA_GPU_EXPORT AV1Picture : public CodecPicture {
+ public:
+ AV1Picture();
+ AV1Picture(const AV1Picture&) = delete;
+ AV1Picture& operator=(const AV1Picture&) = delete;
+
+ // Create a duplicate instance and copy the data to it. It is used to support
+ // the AV1 show_existing_frame feature. Return the scoped_refptr pointing to
+ // the duplicate instance, or nullptr on failure.
+ scoped_refptr<AV1Picture> Duplicate();
+
+ libgav1::ObuFrameHeader frame_header = {};
+
+ protected:
+ ~AV1Picture() override;
+
+ private:
+ // Create a duplicate instance.
+ virtual scoped_refptr<AV1Picture> CreateDuplicate();
+};
+} // namespace media
+#endif // MEDIA_GPU_AV1_PICTURE_H_
diff --git a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
index 8a2d1933ab6..04b8f52b440 100644
--- a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
+++ b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
@@ -27,14 +27,17 @@ namespace {
// Gets a list of the available functions for creating VideoDecoders.
VideoDecoderPipeline::CreateDecoderFunctions GetCreateDecoderFunctions() {
+ // Usually only one of USE_VAAPI or USE_V4L2_CODEC is defined on ChromeOS,
+ // except for Chromeboxes with companion video acceleration chips, which have
+ // both. In those cases prefer the V4L2 creation function.
constexpr VideoDecoderPipeline::CreateDecoderFunction kCreateVDFuncs[] = {
-#if BUILDFLAG(USE_VAAPI)
- &VaapiVideoDecoder::Create,
-#endif // BUILDFLAG(USE_VAAPI)
-
#if BUILDFLAG(USE_V4L2_CODEC)
&V4L2VideoDecoder::Create,
#endif // BUILDFLAG(USE_V4L2_CODEC)
+
+#if BUILDFLAG(USE_VAAPI)
+ &VaapiVideoDecoder::Create,
+#endif // BUILDFLAG(USE_VAAPI)
};
return VideoDecoderPipeline::CreateDecoderFunctions(
diff --git a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h
index b2ff2b7b85a..dc4992dae6a 100644
--- a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h
+++ b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.h
@@ -9,8 +9,8 @@
#include "base/memory/scoped_refptr.h"
#include "media/base/media_log.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/gpu/media_gpu_export.h"
-#include "media/video/supported_video_decoder_config.h"
namespace base {
class SequencedTaskRunner;
diff --git a/chromium/media/gpu/chromeos/fourcc.cc b/chromium/media/gpu/chromeos/fourcc.cc
index fb179e65bb1..af87c3fd762 100644
--- a/chromium/media/gpu/chromeos/fourcc.cc
+++ b/chromium/media/gpu/chromeos/fourcc.cc
@@ -98,6 +98,7 @@ base::Optional<Fourcc> Fourcc::FromVideoPixelFormat(
case PIXEL_FORMAT_Y16:
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
+ case PIXEL_FORMAT_RGBAF16:
case PIXEL_FORMAT_UNKNOWN:
break;
}
@@ -139,6 +140,7 @@ base::Optional<Fourcc> Fourcc::FromVideoPixelFormat(
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
case PIXEL_FORMAT_BGRA:
+ case PIXEL_FORMAT_RGBAF16:
case PIXEL_FORMAT_UNKNOWN:
break;
}
diff --git a/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.cc b/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.cc
index 62978d30410..0954dd8fda5 100644
--- a/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.cc
+++ b/chromium/media/gpu/chromeos/gpu_memory_buffer_video_frame_mapper.cc
@@ -69,8 +69,13 @@ scoped_refptr<VideoFrame> GpuMemoryBufferVideoFrameMapper::Map(
video_frame->timestamp());
}
- if (!mapped_frame)
+ if (!mapped_frame) {
+ gmb->Unmap();
return nullptr;
+ }
+
+ mapped_frame->set_color_space(video_frame->ColorSpace());
+ mapped_frame->metadata().MergeMetadataFrom(video_frame->metadata());
// Pass |video_frame| so that it outlives |mapped_frame| and the mapped buffer
// is unmapped on destruction.
diff --git a/chromium/media/gpu/chromeos/image_processor_test.cc b/chromium/media/gpu/chromeos/image_processor_test.cc
index c13ee59f2d3..193c9206ed1 100644
--- a/chromium/media/gpu/chromeos/image_processor_test.cc
+++ b/chromium/media/gpu/chromeos/image_processor_test.cc
@@ -223,7 +223,7 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_MemToMem) {
EXPECT_TRUE(ip_client->WaitForFrameProcessors());
}
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
// We don't yet have the function to create Dmabuf-backed VideoFrame on
// platforms except ChromeOS. So MemToDmabuf test is limited on ChromeOS.
TEST_P(ImageProcessorParamTest, ConvertOneTime_DmabufToMem) {
@@ -287,7 +287,7 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_GmbToGmb) {
EXPECT_EQ(ip_client->GetNumOfProcessedImages(), 1u);
EXPECT_TRUE(ip_client->WaitForFrameProcessors());
}
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// BGRA -> NV12
// I420 -> NV12
@@ -335,7 +335,7 @@ INSTANTIATE_TEST_SUITE_P(
std::make_tuple(kNV12Image180, kNV12Image90),
std::make_tuple(kNV12Image180, kNV12Image)));
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
// TODO(hiroh): Add more tests.
// MEM->DMABUF (V4L2VideoEncodeAccelerator),
#endif
diff --git a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
index a9de6bc113d..1c7566c7dcf 100644
--- a/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
+++ b/chromium/media/gpu/chromeos/libyuv_image_processor_backend.cc
@@ -19,60 +19,6 @@ namespace media {
namespace {
-// TODO(https://bugs.chromium.org/p/libyuv/issues/detail?id=838): Remove
-// this once libyuv implements NV12Scale and use the libyuv::NV12Scale().
-// This is copy-pasted from
-// third_party/webrtc/common_video/libyuv/include/webrtc_libyuv.h.
-void NV12Scale(uint8_t* tmp_buffer,
- const uint8_t* src_y,
- int src_stride_y,
- const uint8_t* src_uv,
- int src_stride_uv,
- int src_width,
- int src_height,
- uint8_t* dst_y,
- int dst_stride_y,
- uint8_t* dst_uv,
- int dst_stride_uv,
- int dst_width,
- int dst_height) {
- const int src_chroma_width = (src_width + 1) / 2;
- const int src_chroma_height = (src_height + 1) / 2;
-
- if (src_width == dst_width && src_height == dst_height) {
- // No scaling.
- libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, src_width,
- src_height);
- libyuv::CopyPlane(src_uv, src_stride_uv, dst_uv, dst_stride_uv,
- src_chroma_width * 2, src_chroma_height);
- return;
- }
-
- // Scaling.
- // Allocate temporary memory for spitting UV planes and scaling them.
- const int dst_chroma_width = (dst_width + 1) / 2;
- const int dst_chroma_height = (dst_height + 1) / 2;
-
- uint8_t* const src_u = tmp_buffer;
- uint8_t* const src_v = src_u + src_chroma_width * src_chroma_height;
- uint8_t* const dst_u = src_v + src_chroma_width * src_chroma_height;
- uint8_t* const dst_v = dst_u + dst_chroma_width * dst_chroma_height;
-
- // Split source UV plane into separate U and V plane using the temporary data.
- libyuv::SplitUVPlane(src_uv, src_stride_uv, src_u, src_chroma_width, src_v,
- src_chroma_width, src_chroma_width, src_chroma_height);
-
- // Scale the planes.
- libyuv::I420Scale(
- src_y, src_stride_y, src_u, src_chroma_width, src_v, src_chroma_width,
- src_width, src_height, dst_y, dst_stride_y, dst_u, dst_chroma_width,
- dst_v, dst_chroma_width, dst_width, dst_height, libyuv::kFilterBox);
-
- // Merge the UV planes into the destination.
- libyuv::MergeUVPlane(dst_u, dst_chroma_width, dst_v, dst_chroma_width, dst_uv,
- dst_stride_uv, dst_chroma_width, dst_chroma_height);
-}
-
// TODO(https://bugs.chromium.org/p/libyuv/issues/detail?id=840): Remove
// this once libyuv implements NV12Rotate() and use the libyuv::NV12Rotate().
bool NV12Rotate(uint8_t* tmp_buffer,
@@ -254,19 +200,6 @@ std::unique_ptr<ImageProcessorBackend> LibYUVImageProcessorBackend::Create(
<< output_config.visible_rect.ToString();
return nullptr;
}
- // Down-scaling support only.
- // This restriction is to simplify |intermediate_frame_| creation. It is
- // used as |tmp_buffer| in NV12Scale().
- // TODO(hiroh): Remove this restriction once libyuv:NV12Scale() is arrived.
- if (!gfx::Rect(input_config.visible_rect.size())
- .Contains(gfx::Rect(output_config.visible_rect.size())) &&
- relative_rotation == VIDEO_ROTATION_0) {
- VLOGF(2) << "Down-scaling support only, input_config.visible_rect="
- << input_config.visible_rect.ToString()
- << ", output_config.visible_rect="
- << output_config.visible_rect.ToString();
- return nullptr;
- }
}
scoped_refptr<VideoFrame> intermediate_frame;
@@ -429,19 +362,19 @@ int LibYUVImageProcessorBackend::DoConversion(const VideoFrame* const input,
}
// Scaling mode.
- // The size of |tmp_buffer| of NV12Scale() should be
- // input_visible_rect().GetArea() / 2 +
- // output_visible_rect().GetArea() / 2. Although |intermediate_frame_|
- // is much larger than the required size, we use the frame to simplify
- // the code.
- NV12Scale(intermediate_frame_->data(0),
- input->visible_data(VideoFrame::kYPlane),
- input->stride(VideoFrame::kYPlane),
- input->visible_data(VideoFrame::kUPlane),
- input->stride(VideoFrame::kUPlane),
- input->visible_rect().width(), input->visible_rect().height(),
- Y_UV_DATA(output), output->visible_rect().width(),
- output->visible_rect().height());
+ libyuv::NV12Scale(input->visible_data(VideoFrame::kYPlane),
+ input->stride(VideoFrame::kYPlane),
+ input->visible_data(VideoFrame::kUVPlane),
+ input->stride(VideoFrame::kUVPlane),
+ input->visible_rect().width(),
+ input->visible_rect().height(),
+ output->visible_data(VideoFrame::kYPlane),
+ output->stride(VideoFrame::kYPlane),
+ output->visible_data(VideoFrame::kUVPlane),
+ output->stride(VideoFrame::kUVPlane),
+ output->visible_rect().width(),
+ output->visible_rect().height(),
+ libyuv::kFilterBilinear);
return 0;
default:
VLOGF(1) << "Unexpected input format: " << input->format();
diff --git a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
index 515146c48ba..c7ce5363d4b 100644
--- a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
+++ b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
@@ -237,8 +237,8 @@ void MailboxVideoFrameConverter::WrapMailboxAndVideoFrameAndOutput(
GetRectSizeFromOrigin(frame->visible_rect()), frame->visible_rect(),
frame->natural_size(), frame->timestamp());
mailbox_frame->set_color_space(frame->ColorSpace());
- mailbox_frame->set_metadata(*(frame->metadata()));
- mailbox_frame->metadata()->read_lock_fences_enabled = true;
+ mailbox_frame->set_metadata(frame->metadata());
+ mailbox_frame->metadata().read_lock_fences_enabled = true;
output_cb_.Run(mailbox_frame);
}
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
index 5c1cdd7792c..401aa08aa93 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
@@ -267,7 +267,7 @@ void PlatformVideoFramePool::OnFrameReleased(
if (IsSameFormat_Locked(origin_frame->format(), origin_frame->coded_size(),
origin_frame->visible_rect(),
- origin_frame->metadata()->hw_protected)) {
+ origin_frame->metadata().hw_protected)) {
InsertFreeFrame_Locked(std::move(origin_frame));
}
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
index ce31a4e5454..24dd4216171 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
@@ -138,8 +138,7 @@ gfx::GpuMemoryBufferHandle AllocateGpuMemoryBufferHandle(
gfx::BufferUsage buffer_usage,
base::ScopedClosureRunner& destroy_cb) {
DCHECK(factory ||
- buffer_usage ==
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE);
+ buffer_usage == gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
gfx::GpuMemoryBufferHandle gmb_handle;
auto buffer_format = VideoPixelFormatToGfxBufferFormat(pixel_format);
if (!buffer_format)
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_utils.h b/chromium/media/gpu/chromeos/platform_video_frame_utils.h
index e01a9818bd6..f1bd0d85aef 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_utils.h
+++ b/chromium/media/gpu/chromeos/platform_video_frame_utils.h
@@ -27,7 +27,7 @@ namespace media {
// GpuMemoryBuffer and it must outlive the returned VideoFrame. If it's null,
// the buffer is allocated using the render node (this is intended to be used
// only for the internals of video encoding when the usage is
-// SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE). It's safe to call this function
+// VEA_READ_CAMERA_AND_CPU_READ_WRITE). It's safe to call this function
// concurrently from multiple threads (as long as either
// |gpu_memory_buffer_factory| is thread-safe or nullptr).
MEDIA_GPU_EXPORT scoped_refptr<VideoFrame> CreateGpuMemoryBufferVideoFrame(
@@ -45,7 +45,7 @@ MEDIA_GPU_EXPORT scoped_refptr<VideoFrame> CreateGpuMemoryBufferVideoFrame(
// video frame's storage and it must outlive the returned VideoFrame. If it's
// null, the buffer is allocated using the render node (this is intended to be
// used only for the internals of video encoding when the usage is
-// SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE). It's safe to call this function
+// VEA_READ_CAMERA_AND_CPU_READ_WRITE). It's safe to call this function
// concurrently from multiple threads (as long as either
// |gpu_memory_buffer_factory| is thread-safe or nullptr).
MEDIA_GPU_EXPORT scoped_refptr<VideoFrame> CreatePlatformVideoFrame(
@@ -63,7 +63,7 @@ MEDIA_GPU_EXPORT scoped_refptr<VideoFrame> CreatePlatformVideoFrame(
// If |gpu_memory_buffer_factory| is not null, it's used to allocate the
// video frame's storage. If it's null, the storage is allocated using the
// render node (this is intended to be used only for the internals of video
-// encoding when the usage is SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE). It's
+// encoding when the usage is VEA_READ_CAMERA_AND_CPU_READ_WRITE). It's
// safe to call this function concurrently from multiple threads (as long as
// either |gpu_memory_buffer_factory| is thread-safe or nullptr).
MEDIA_GPU_EXPORT base::Optional<VideoFrameLayout> GetPlatformVideoFrameLayout(
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc b/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc
index e9152486be3..ba698cb838d 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_utils_unittest.cc
@@ -107,6 +107,13 @@ class FakeGpuMemoryBufferFactory : public gpu::GpuMemoryBufferFactory {
gpu_memory_buffers_[client_id].erase(id);
}
+ bool FillSharedMemoryRegionWithBufferContents(
+ gfx::GpuMemoryBufferHandle buffer_handle,
+ base::UnsafeSharedMemoryRegion shared_memory) override {
+ NOTIMPLEMENTED();
+ return false;
+ }
+
// Type-checking downcast routine.
gpu::ImageFactory* AsImageFactory() override {
NOTIMPLEMENTED();
@@ -159,7 +166,7 @@ TEST(PlatformVideoFrameUtilsTest, CreateVideoFrame) {
constexpr gfx::Size kNaturalSize(kCodedSize);
constexpr auto kTimeStamp = base::TimeDelta::FromMilliseconds(1234);
constexpr gfx::BufferUsage kBufferUsage =
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE;
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE;
auto gpu_memory_buffer_factory =
std::make_unique<FakeGpuMemoryBufferFactory>();
diff --git a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
index 4b5ca97fd64..e582898f58c 100644
--- a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
+++ b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
@@ -408,10 +408,9 @@ base::Optional<Picture> VdVideoDecodeAccelerator::GetPicture(
}
int32_t picture_buffer_id = it->second;
int32_t bitstream_id = FakeTimestampToBitstreamId(frame.timestamp());
- bool allow_overlay = frame.metadata()->allow_overlay;
return base::make_optional(Picture(picture_buffer_id, bitstream_id,
frame.visible_rect(), frame.ColorSpace(),
- allow_overlay));
+ frame.metadata().allow_overlay));
}
// static
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
index 8ff5ac44366..aed6668f16e 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
@@ -157,10 +157,14 @@ void VideoDecoderPipeline::DestroyAsync(
std::string VideoDecoderPipeline::GetDisplayName() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
-
return "VideoDecoderPipeline";
}
+VideoDecoderType VideoDecoderPipeline::GetDecoderType() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ return VideoDecoderType::kChromeOs;
+}
+
bool VideoDecoderPipeline::IsPlatformDecoder() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
@@ -190,7 +194,7 @@ void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb,
- const WaitingCB& /* waiting_cb */) {
+ const WaitingCB& waiting_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
VLOGF(2) << "config: " << config.AsHumanReadableString();
@@ -218,18 +222,21 @@ void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
}
#endif // !BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
- needs_bitstream_conversion_ = (config.codec() == kCodecH264);
+ needs_bitstream_conversion_ =
+ (config.codec() == kCodecH264) || (config.codec() == kCodecHEVC);
decoder_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&VideoDecoderPipeline::InitializeTask,
- decoder_weak_this_, config, cdm_context,
- std::move(init_cb), std::move(output_cb)));
+ FROM_HERE,
+ base::BindOnce(&VideoDecoderPipeline::InitializeTask, decoder_weak_this_,
+ config, cdm_context, std::move(init_cb),
+ std::move(output_cb), std::move(waiting_cb)));
}
void VideoDecoderPipeline::InitializeTask(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
- const OutputCB& output_cb) {
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb) {
DVLOGF(3);
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(!init_cb_);
@@ -242,19 +249,22 @@ void VideoDecoderPipeline::InitializeTask(const VideoDecoderConfig& config,
// resolution. Subsequent initializations are marked by |decoder_| already
// existing.
if (!decoder_) {
- CreateAndInitializeVD(config, cdm_context, Status());
+ CreateAndInitializeVD(config, cdm_context, std::move(waiting_cb), Status());
} else {
decoder_->Initialize(
config, cdm_context,
base::BindOnce(&VideoDecoderPipeline::OnInitializeDone,
- decoder_weak_this_, config, cdm_context, Status()),
+ decoder_weak_this_, config, cdm_context, waiting_cb,
+ Status()),
base::BindRepeating(&VideoDecoderPipeline::OnFrameDecoded,
- decoder_weak_this_));
+ decoder_weak_this_),
+ waiting_cb);
}
}
void VideoDecoderPipeline::CreateAndInitializeVD(VideoDecoderConfig config,
CdmContext* cdm_context,
+ const WaitingCB& waiting_cb,
Status parent_error) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(init_cb_);
@@ -280,7 +290,7 @@ void VideoDecoderPipeline::CreateAndInitializeVD(VideoDecoderConfig config,
DVLOGF(2) << "|decoder_| creation failed, trying again with the next "
"available create function.";
return CreateAndInitializeVD(
- config, cdm_context,
+ config, cdm_context, std::move(waiting_cb),
AppendOrForwardStatus(parent_error,
StatusCode::kDecoderFailedCreation));
}
@@ -288,14 +298,16 @@ void VideoDecoderPipeline::CreateAndInitializeVD(VideoDecoderConfig config,
decoder_->Initialize(
config, cdm_context,
base::BindOnce(&VideoDecoderPipeline::OnInitializeDone,
- decoder_weak_this_, config, cdm_context,
+ decoder_weak_this_, config, cdm_context, waiting_cb,
std::move(parent_error)),
base::BindRepeating(&VideoDecoderPipeline::OnFrameDecoded,
- decoder_weak_this_));
+ decoder_weak_this_),
+ waiting_cb);
}
void VideoDecoderPipeline::OnInitializeDone(VideoDecoderConfig config,
CdmContext* cdm_context,
+ const WaitingCB& waiting_cb,
Status parent_error,
Status status) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
@@ -314,7 +326,7 @@ void VideoDecoderPipeline::OnInitializeDone(VideoDecoderConfig config,
DVLOGF(3) << "|decoder_| initialization failed, trying again with the next "
"available create function.";
decoder_ = nullptr;
- CreateAndInitializeVD(config, cdm_context,
+ CreateAndInitializeVD(config, cdm_context, waiting_cb,
AppendOrForwardStatus(parent_error, std::move(status)));
}
@@ -431,9 +443,9 @@ void VideoDecoderPipeline::OnFrameConverted(scoped_refptr<VideoFrame> frame) {
}
// Flag that the video frame is capable of being put in an overlay.
- frame->metadata()->allow_overlay = true;
+ frame->metadata().allow_overlay = true;
// Flag that the video frame was decoded in a power efficient way.
- frame->metadata()->power_efficient = true;
+ frame->metadata().power_efficient = true;
// MojoVideoDecoderService expects the |output_cb_| to be called on the client
// task runner, even though media::VideoDecoder states frames should be output
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.h b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
index d286ae2e6b1..c658efbe54a 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.h
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
@@ -87,7 +87,8 @@ class MEDIA_GPU_EXPORT DecoderInterface {
virtual void Initialize(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
- const OutputCB& output_cb) = 0;
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb) = 0;
// Requests a |buffer| to be decoded. The decode result will be returned via
// |decode_cb|.
@@ -147,6 +148,7 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
static void DestroyAsync(std::unique_ptr<VideoDecoderPipeline>);
// VideoDecoder implementation
+ VideoDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
bool IsPlatformDecoder() const override;
int GetMaxDecodeRequests() const override;
@@ -183,15 +185,18 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
void InitializeTask(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
- const OutputCB& output_cb);
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb);
void ResetTask(base::OnceClosure closure);
void DecodeTask(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb);
void CreateAndInitializeVD(VideoDecoderConfig config,
CdmContext* cdm_context,
+ const WaitingCB& waiting_cb,
Status parent_error);
void OnInitializeDone(VideoDecoderConfig config,
CdmContext* cdm_context,
+ const WaitingCB& waiting_cb,
Status parent_error,
Status status);
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
index f46ef9ea434..bb8136f68c8 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
@@ -56,9 +56,12 @@ class MockDecoder : public DecoderInterface {
base::WeakPtr<DecoderInterface::Client>(nullptr)) {}
~MockDecoder() override = default;
- MOCK_METHOD4(
- Initialize,
- void(const VideoDecoderConfig&, CdmContext*, InitCB, const OutputCB&));
+ MOCK_METHOD5(Initialize,
+ void(const VideoDecoderConfig&,
+ CdmContext*,
+ InitCB,
+ const OutputCB&,
+ const WaitingCB&));
MOCK_METHOD2(Decode, void(scoped_refptr<DecoderBuffer>, DecodeCB));
MOCK_METHOD1(Reset, void(base::OnceClosure));
MOCK_METHOD0(ApplyResolutionChange, void());
@@ -128,7 +131,7 @@ class VideoDecoderPipelineTest
scoped_refptr<base::SequencedTaskRunner> /* decoder_task_runner */,
base::WeakPtr<DecoderInterface::Client> /* client */) {
std::unique_ptr<MockDecoder> decoder(new MockDecoder());
- EXPECT_CALL(*decoder, Initialize(_, _, _, _))
+ EXPECT_CALL(*decoder, Initialize(_, _, _, _, _))
.WillOnce(::testing::WithArgs<2>([](VideoDecoder::InitCB init_cb) {
std::move(init_cb).Run(OkStatus());
}));
@@ -140,7 +143,7 @@ class VideoDecoderPipelineTest
scoped_refptr<base::SequencedTaskRunner> /* decoder_task_runner */,
base::WeakPtr<DecoderInterface::Client> /* client */) {
std::unique_ptr<MockDecoder> decoder(new MockDecoder());
- EXPECT_CALL(*decoder, Initialize(_, _, _, _))
+ EXPECT_CALL(*decoder, Initialize(_, _, _, _, _))
.WillOnce(::testing::WithArgs<2>([](VideoDecoder::InitCB init_cb) {
std::move(init_cb).Run(StatusCode::kDecoderFailedInitialization);
}));
@@ -163,9 +166,8 @@ TEST_P(VideoDecoderPipelineTest, Initialize) {
SetCreateDecoderFunctions(GetParam().create_decoder_functions);
base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
EXPECT_CALL(*this, OnInit(MatchesStatusCode(GetParam().status_code)))
- .WillOnce(RunClosure(quit_closure));
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
InitializeDecoder();
run_loop.Run();
diff --git a/chromium/media/gpu/decode_surface_handler.h b/chromium/media/gpu/decode_surface_handler.h
index 4183f31f083..35a7586406a 100644
--- a/chromium/media/gpu/decode_surface_handler.h
+++ b/chromium/media/gpu/decode_surface_handler.h
@@ -6,10 +6,7 @@
#define MEDIA_GPU_DECODE_SURFACE_HANDLER_H_
#include "base/memory/scoped_refptr.h"
-
-namespace gfx {
-class Rect;
-}
+#include "ui/gfx/geometry/rect.h"
namespace media {
@@ -25,15 +22,37 @@ class DecodeSurfaceHandler {
DecodeSurfaceHandler() = default;
virtual ~DecodeSurfaceHandler() = default;
- // Returns a T for decoding into, if available, or nullptr.
+ // Returns a T for decoding into and for output, if available, or nullptr.
virtual scoped_refptr<T> CreateSurface() = 0;
+ // Used by implementations that scale the video between decode and output. In
+ // those cases, the CreateSurface() call will be used for allocating the
+ // output surfaces and CreateDecodeSurface() will be used for decoding
+ // surfaces. This mode can be detected by calling IsScalingDecode().
+ virtual scoped_refptr<T> CreateDecodeSurface() { return nullptr; }
+
+ // Returns true if there are separate surfaces for decoding and output due to
+ // a scaling operation being performed between the two.
+ virtual bool IsScalingDecode() { return false; }
+
+ // Returns the visible rect relative to the output surface if we are in
+ // scaling mode. The |decode_visible_rect| should be passed in as well as the
+ // |output_picture_size| for validation. The returned rect will only differ if
+ // IsScalingDecode() is true.
+ virtual const gfx::Rect GetOutputVisibleRect(
+ const gfx::Rect& decode_visible_rect,
+ const gfx::Size& output_picture_size) {
+ CHECK(gfx::Rect(output_picture_size).Contains(decode_visible_rect));
+ return decode_visible_rect;
+ }
+
// Called by the client to indicate that |dec_surface| is ready to be
- // outputted. This can actually be called before decode is finished in
- // hardware; this method must guarantee that |dec_surface|s are processed in
- // the same order as SurfaceReady is called. (On Intel, this order doesn't
- // need to be explicitly maintained since the driver will enforce it, together
- // with any necessary dependencies).
+ // outputted. |dec_surface| must be obtained from CreateSurface() and NOT from
+ // CreateDecodeSurface(). This can actually be called before decode is
+ // finished in hardware; this method must guarantee that |dec_surface|s are
+ // processed in the same order as SurfaceReady() is called. (On Intel, this
+ // order doesn't need to be explicitly maintained since the driver will
+ // enforce it, together with any necessary dependencies).
virtual void SurfaceReady(scoped_refptr<T> dec_surface,
int32_t bitstream_id,
const gfx::Rect& visible_rect,
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
index 37cf3a0e089..7f4e0eb6ed8 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
@@ -142,13 +142,17 @@ GpuVideoDecodeAcceleratorFactory::CreateVDA(
#if defined(OS_WIN)
&GpuVideoDecodeAcceleratorFactory::CreateDXVAVDA,
#endif
-#if BUILDFLAG(USE_VAAPI)
- &GpuVideoDecodeAcceleratorFactory::CreateVaapiVDA,
-#endif
+ // Usually only one of USE_VAAPI or USE_V4L2_CODEC is defined on ChromeOS,
+ // except for Chromeboxes with companion video acceleration chips, which have
+ // both. In those cases prefer the V4L2 creation function.
#if BUILDFLAG(USE_V4L2_CODEC)
&GpuVideoDecodeAcceleratorFactory::CreateV4L2VDA,
&GpuVideoDecodeAcceleratorFactory::CreateV4L2SVDA,
#endif
+#if BUILDFLAG(USE_VAAPI)
+ &GpuVideoDecodeAcceleratorFactory::CreateVaapiVDA,
+#endif
+
#if defined(OS_MAC)
&GpuVideoDecodeAcceleratorFactory::CreateVTVDA,
#endif
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_helpers.h b/chromium/media/gpu/gpu_video_decode_accelerator_helpers.h
index 6f7bfb80e01..9cb4c981c27 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_helpers.h
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_helpers.h
@@ -7,8 +7,8 @@
#include "base/callback.h"
#include "base/memory/weak_ptr.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/gpu/media_gpu_export.h"
-#include "media/video/supported_video_decoder_config.h"
#include "media/video/video_decode_accelerator.h"
namespace gl {
diff --git a/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc b/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc
index 8d665bb0320..3ddcd3cf273 100644
--- a/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_video_encode_accelerator_factory.cc
@@ -5,10 +5,12 @@
#include "media/gpu/gpu_video_encode_accelerator_factory.h"
#include "base/bind.h"
+#include "base/feature_list.h"
#include "base/memory/ptr_util.h"
#include "build/build_config.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_preferences.h"
+#include "media/base/media_switches.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/gpu_video_accelerator_util.h"
#include "media/gpu/macros.h"
@@ -23,8 +25,6 @@
#include "media/gpu/mac/vt_video_encode_accelerator_mac.h"
#endif
#if defined(OS_WIN)
-#include "base/feature_list.h"
-#include "media/base/media_switches.h"
#include "media/gpu/windows/media_foundation_video_encode_accelerator_win.h"
#endif
#if BUILDFLAG(USE_VAAPI)
@@ -93,8 +93,13 @@ std::vector<VEAFactoryFunction> GetVEAFactoryFunctions(
return vea_factory_functions;
#if BUILDFLAG(USE_VAAPI)
+#if defined(OS_LINUX)
+ if (base::FeatureList::IsEnabled(kVaapiVideoEncodeLinux))
+ vea_factory_functions.push_back(base::BindRepeating(&CreateVaapiVEA));
+#else
vea_factory_functions.push_back(base::BindRepeating(&CreateVaapiVEA));
#endif
+#endif
#if BUILDFLAG(USE_V4L2_CODEC)
vea_factory_functions.push_back(base::BindRepeating(&CreateV4L2VEA));
#endif
@@ -185,6 +190,13 @@ GpuVideoEncodeAcceleratorFactory::GetSupportedProfiles(
});
}
+ if (gpu_workarounds.disable_accelerated_h264_encode) {
+ base::EraseIf(profiles, [](const auto& vea_profile) {
+ return vea_profile.profile >= H264PROFILE_MIN &&
+ vea_profile.profile <= H264PROFILE_MAX;
+ });
+ }
+
return profiles;
}
diff --git a/chromium/media/gpu/h264_decoder.cc b/chromium/media/gpu/h264_decoder.cc
index f679f986eba..2fab1f0e13b 100644
--- a/chromium/media/gpu/h264_decoder.cc
+++ b/chromium/media/gpu/h264_decoder.cc
@@ -17,6 +17,73 @@
#include "media/video/h264_level_limits.h"
namespace media {
+namespace {
+
+bool ParseBitDepth(const H264SPS& sps, uint8_t& bit_depth) {
+ // Spec 7.4.2.1.1
+ if (sps.bit_depth_luma_minus8 != sps.bit_depth_chroma_minus8) {
+ DVLOG(1) << "H264Decoder doesn't support different bit depths between luma"
+ << "and chroma, bit_depth_luma_minus8="
+ << sps.bit_depth_luma_minus8
+ << ", bit_depth_chroma_minus8=" << sps.bit_depth_chroma_minus8;
+ return false;
+ }
+ DCHECK_GE(sps.bit_depth_luma_minus8, 0);
+ DCHECK_LE(sps.bit_depth_luma_minus8, 6);
+ switch (sps.bit_depth_luma_minus8) {
+ case 0:
+ bit_depth = 8u;
+ break;
+ case 2:
+ bit_depth = 10u;
+ break;
+ case 4:
+ bit_depth = 12u;
+ break;
+ case 6:
+ bit_depth = 14u;
+ break;
+ default:
+ DVLOG(1) << "Invalid bit depth: "
+ << base::checked_cast<int>(sps.bit_depth_luma_minus8 + 8);
+ return false;
+ }
+ return true;
+}
+
+bool IsValidBitDepth(uint8_t bit_depth, VideoCodecProfile profile) {
+ // Spec A.2.
+ switch (profile) {
+ case H264PROFILE_BASELINE:
+ case H264PROFILE_MAIN:
+ case H264PROFILE_EXTENDED:
+ case H264PROFILE_HIGH:
+ return bit_depth == 8u;
+ case H264PROFILE_HIGH10PROFILE:
+ case H264PROFILE_HIGH422PROFILE:
+ return bit_depth == 8u || bit_depth == 10u;
+ case H264PROFILE_HIGH444PREDICTIVEPROFILE:
+ return bit_depth == 8u || bit_depth == 10u || bit_depth == 12u ||
+ bit_depth == 14u;
+ case H264PROFILE_SCALABLEBASELINE:
+ case H264PROFILE_SCALABLEHIGH:
+ // Spec G.10.1.
+ return bit_depth == 8u;
+ case H264PROFILE_STEREOHIGH:
+ case H264PROFILE_MULTIVIEWHIGH:
+ // Spec H.10.1.1 and H.10.1.2.
+ return bit_depth == 8u;
+ default:
+ NOTREACHED();
+ return false;
+ }
+}
+
+bool IsYUV420Sequence(const H264SPS& sps) {
+ // Spec 6.2
+ return sps.chroma_format_idc == 1;
+}
+} // namespace
H264Decoder::H264Accelerator::H264Accelerator() = default;
@@ -28,6 +95,16 @@ H264Decoder::H264Accelerator::Status H264Decoder::H264Accelerator::SetStream(
return H264Decoder::H264Accelerator::Status::kNotSupported;
}
+H264Decoder::H264Accelerator::Status
+H264Decoder::H264Accelerator::ParseEncryptedSliceHeader(
+ const std::vector<base::span<const uint8_t>>& data,
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::vector<uint8_t>& sps_nalu_data,
+ const std::vector<uint8_t>& pps_nalu_data,
+ H264SliceHeader* slice_header_out) {
+ return H264Decoder::H264Accelerator::Status::kNotSupported;
+}
+
H264Decoder::H264Decoder(std::unique_ptr<H264Accelerator> accelerator,
VideoCodecProfile profile,
const VideoColorSpace& container_color_space)
@@ -41,8 +118,6 @@ H264Decoder::H264Decoder(std::unique_ptr<H264Accelerator> accelerator,
profile_(profile),
accelerator_(std::move(accelerator)) {
DCHECK(accelerator_);
- decoder_buffer_is_complete_frame_ =
- base::FeatureList::IsEnabled(media::kH264DecoderBufferIsCompleteFrame);
Reset();
}
@@ -74,14 +149,22 @@ void H264Decoder::Reset() {
accelerator_->Reset();
last_output_poc_ = std::numeric_limits<int>::min();
+ encrypted_sei_nalus_.clear();
+ sei_subsamples_.clear();
+
+ recovery_frame_num_.reset();
+ recovery_frame_cnt_.reset();
+
// If we are in kDecoding, we can resume without processing an SPS.
+ // The state becomes kDecoding again, (1) at the first IDR slice or (2) at
+ // the first slice after the recovery point SEI.
if (state_ == kDecoding)
state_ = kAfterReset;
}
-void H264Decoder::PrepareRefPicLists(const H264SliceHeader* slice_hdr) {
- ConstructReferencePicListsP(slice_hdr);
- ConstructReferencePicListsB(slice_hdr);
+void H264Decoder::PrepareRefPicLists() {
+ ConstructReferencePicListsP();
+ ConstructReferencePicListsB();
}
bool H264Decoder::ModifyReferencePicLists(const H264SliceHeader* slice_hdr,
@@ -360,8 +443,7 @@ struct LongTermPicNumAscCompare {
}
};
-void H264Decoder::ConstructReferencePicListsP(
- const H264SliceHeader* slice_hdr) {
+void H264Decoder::ConstructReferencePicListsP() {
// RefPicList0 (8.2.4.2.1) [[1] [2]], where:
// [1] shortterm ref pics sorted by descending pic_num,
// [2] longterm ref pics by ascending long_term_pic_num.
@@ -395,8 +477,7 @@ struct POCDescCompare {
}
};
-void H264Decoder::ConstructReferencePicListsB(
- const H264SliceHeader* slice_hdr) {
+void H264Decoder::ConstructReferencePicListsB() {
// RefPicList0 (8.2.4.2.3) [[1] [2] [3]], where:
// [1] shortterm ref pics with POC < curr_pic's POC sorted by descending POC,
// [2] shortterm ref pics with POC > curr_pic's POC by ascending POC,
@@ -726,7 +807,7 @@ H264Decoder::H264Accelerator::Status H264Decoder::StartNewFrame(
return H264Accelerator::Status::kFail;
UpdatePicNums(frame_num);
- PrepareRefPicLists(slice_hdr);
+ PrepareRefPicLists();
return accelerator_->SubmitFrameMetadata(sps, pps, dpb_, ref_pic_list_p0_,
ref_pic_list_b0_, ref_pic_list_b1_,
@@ -922,6 +1003,14 @@ bool H264Decoder::FinishPicture(scoped_refptr<H264Picture> pic) {
DVLOG(4) << "Finishing picture frame_num: " << pic->frame_num
<< ", entries in DPB: " << dpb_.size();
+ if (recovery_frame_cnt_) {
+ // This is the first picture after the recovery point SEI message. Computes
+ // the frame_num of the frame that should be output from (Spec D.2.8).
+ recovery_frame_num_ =
+ (*recovery_frame_cnt_ + pic->frame_num) % max_frame_num_;
+ DVLOG(3) << "recovery_frame_num_" << *recovery_frame_num_;
+ recovery_frame_cnt_.reset();
+ }
// The ownership of pic will either be transferred to DPB - if the picture is
// still needed (for output and/or reference) - or we will release it
@@ -955,8 +1044,15 @@ bool H264Decoder::FinishPicture(scoped_refptr<H264Picture> pic) {
DVLOG_IF(1, num_remaining <= max_num_reorder_frames_)
<< "Invalid stream: max_num_reorder_frames not preserved";
- if (!OutputPic(*output_candidate))
- return false;
+ if (!recovery_frame_num_ ||
+ // If we are decoding ahead to reach a SEI recovery point, skip
+ // outputting all pictures before it, to avoid outputting corrupted
+ // frames.
+ (*output_candidate)->frame_num == *recovery_frame_num_) {
+ recovery_frame_num_ = base::nullopt;
+ if (!OutputPic(*output_candidate))
+ return false;
+ }
if (!(*output_candidate)->ref) {
// Current picture hasn't been inserted into DPB yet, so don't remove it
@@ -1086,18 +1182,33 @@ bool H264Decoder::ProcessSPS(int sps_id, bool* need_new_buffers) {
DVLOG(1) << "Invalid DPB size: " << max_dpb_size;
return false;
}
+ if (!IsYUV420Sequence(*sps)) {
+ DVLOG(1) << "Only YUV 4:2:0 is supported";
+ return false;
+ }
VideoCodecProfile new_profile =
H264Parser::ProfileIDCToVideoCodecProfile(sps->profile_idc);
+ uint8_t new_bit_depth = 0;
+ if (!ParseBitDepth(*sps, new_bit_depth))
+ return false;
+ if (!IsValidBitDepth(new_bit_depth, new_profile)) {
+ DVLOG(1) << "Invalid bit depth=" << base::strict_cast<int>(new_bit_depth)
+ << ", profile=" << GetProfileName(new_profile);
+ return false;
+ }
+
if (pic_size_ != new_pic_size || dpb_.max_num_pics() != max_dpb_size ||
- profile_ != new_profile) {
+ profile_ != new_profile || bit_depth_ != new_bit_depth) {
if (!Flush())
return false;
DVLOG(1) << "Codec profile: " << GetProfileName(new_profile)
<< ", level: " << level << ", DPB size: " << max_dpb_size
- << ", Picture size: " << new_pic_size.ToString();
+ << ", Picture size: " << new_pic_size.ToString()
+ << ", bit depth: " << base::strict_cast<int>(new_bit_depth);
*need_new_buffers = true;
profile_ = new_profile;
+ bit_depth_ = new_bit_depth;
pic_size_ = new_pic_size;
dpb_.set_max_num_pics(max_dpb_size);
}
@@ -1168,6 +1279,22 @@ bool H264Decoder::HandleFrameNumGap(int frame_num) {
return true;
}
+H264Decoder::H264Accelerator::Status H264Decoder::ProcessEncryptedSliceHeader(
+ const std::vector<SubsampleEntry>& subsamples) {
+ DCHECK(curr_nalu_);
+ DCHECK(curr_slice_hdr_);
+ std::vector<base::span<const uint8_t>> spans(encrypted_sei_nalus_.size() + 1);
+ spans.assign(encrypted_sei_nalus_.begin(), encrypted_sei_nalus_.end());
+ spans.emplace_back(curr_nalu_->data, curr_nalu_->size);
+ std::vector<SubsampleEntry> all_subsamples(sei_subsamples_.size() + 1);
+ all_subsamples.assign(sei_subsamples_.begin(), sei_subsamples_.end());
+ all_subsamples.insert(all_subsamples.end(), subsamples.begin(),
+ subsamples.end());
+ return accelerator_->ParseEncryptedSliceHeader(spans, all_subsamples,
+ last_sps_nalu_, last_pps_nalu_,
+ curr_slice_hdr_.get());
+}
+
H264Decoder::H264Accelerator::Status H264Decoder::PreprocessCurrentSlice() {
const H264SliceHeader* slice_hdr = curr_slice_hdr_.get();
DCHECK(slice_hdr);
@@ -1215,8 +1342,13 @@ H264Decoder::H264Accelerator::Status H264Decoder::ProcessCurrentSlice() {
max_pic_num_ = 2 * max_frame_num_;
H264Picture::Vector ref_pic_list0, ref_pic_list1;
- if (!ModifyReferencePicLists(slice_hdr, &ref_pic_list0, &ref_pic_list1))
+ // If we are using full sample encryption then we do not have the information
+ // we need to update the ref pic lists here, but that's OK because the
+ // accelerator doesn't actually need to submit them in this case.
+ if (!slice_hdr->full_sample_encryption &&
+ !ModifyReferencePicLists(slice_hdr, &ref_pic_list0, &ref_pic_list1)) {
return H264Accelerator::Status::kFail;
+ }
const H264PPS* pps = parser_.GetPPS(curr_pps_id_);
if (!pps)
@@ -1263,6 +1395,8 @@ void H264Decoder::SetStream(int32_t id, const DecoderBuffer& decoder_buffer) {
current_stream_ = ptr;
current_stream_size_ = size;
current_stream_has_been_changed_ = true;
+ encrypted_sei_nalus_.clear();
+ sei_subsamples_.clear();
if (decrypt_config) {
parser_.SetEncryptedStream(ptr, size, decrypt_config->subsamples());
current_decrypt_config_ = decrypt_config->Clone();
@@ -1309,9 +1443,7 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
curr_nalu_.reset(new H264NALU());
par_res = parser_.AdvanceToNextNALU(curr_nalu_.get());
if (par_res == H264Parser::kEOStream) {
- if (decoder_buffer_is_complete_frame_)
- CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
-
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
return kRanOutOfStreamData;
} else if (par_res != H264Parser::kOk) {
SET_ERROR_AND_RETURN();
@@ -1322,8 +1454,9 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
switch (curr_nalu_->nal_unit_type) {
case H264NALU::kNonIDRSlice:
- // We can't resume from a non-IDR slice.
- if (state_ == kError || state_ == kAfterReset)
+ // We can't resume from a non-IDR slice unless recovery point SEI
+ // process is going.
+ if (state_ == kError || (state_ == kAfterReset && !recovery_frame_cnt_))
break;
FALLTHROUGH;
@@ -1344,11 +1477,32 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
// additional key has been provided, for example), then the remaining
// steps will be executed.
if (!curr_slice_hdr_) {
- curr_slice_hdr_.reset(new H264SliceHeader());
- par_res =
- parser_.ParseSliceHeader(*curr_nalu_, curr_slice_hdr_.get());
- if (par_res != H264Parser::kOk)
- SET_ERROR_AND_RETURN();
+ curr_slice_hdr_ = std::make_unique<H264SliceHeader>();
+ state_ = kParseSliceHeader;
+ }
+
+ if (state_ == kParseSliceHeader) {
+ // Check if the slice header is encrypted.
+ bool parsed_header = false;
+ if (current_decrypt_config_) {
+ const std::vector<SubsampleEntry>& subsamples =
+ parser_.GetCurrentSubsamples();
+ // There is only a single clear byte for the NALU information for
+ // full sample encryption, and the rest is encrypted.
+ if (!subsamples.empty() && subsamples[0].clear_bytes == 1) {
+ CHECK_ACCELERATOR_RESULT(ProcessEncryptedSliceHeader(subsamples));
+ parsed_header = true;
+ curr_slice_hdr_->pic_parameter_set_id = last_parsed_pps_id_;
+ encrypted_sei_nalus_.clear();
+ sei_subsamples_.clear();
+ }
+ }
+ if (!parsed_header) {
+ par_res =
+ parser_.ParseSliceHeader(*curr_nalu_, curr_slice_hdr_.get());
+ if (par_res != H264Parser::kOk)
+ SET_ERROR_AND_RETURN();
+ }
state_ = kTryPreprocessCurrentSlice;
}
@@ -1398,6 +1552,8 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
if (!ProcessSPS(sps_id, &need_new_buffers))
SET_ERROR_AND_RETURN();
+ last_sps_nalu_.assign(curr_nalu_->data,
+ curr_nalu_->data + curr_nalu_->size);
if (state_ == kNeedStreamMetadata)
state_ = kAfterReset;
@@ -1414,13 +1570,13 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
}
case H264NALU::kPPS: {
- int pps_id;
-
CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
- par_res = parser_.ParsePPS(&pps_id);
+ par_res = parser_.ParsePPS(&last_parsed_pps_id_);
if (par_res != H264Parser::kOk)
SET_ERROR_AND_RETURN();
+ last_pps_nalu_.assign(curr_nalu_->data,
+ curr_nalu_->data + curr_nalu_->size);
break;
}
@@ -1433,6 +1589,47 @@ H264Decoder::DecodeResult H264Decoder::Decode() {
CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
break;
+ case H264NALU::kSEIMessage:
+ if (current_decrypt_config_) {
+ // If there are encrypted SEI NALUs as part of CENCv1, then we also
+ // need to save those so we can send them into the accelerator so it
+ // can decrypt the sample properly (otherwise it would be starting
+ // partway into a block).
+ const std::vector<SubsampleEntry>& subsamples =
+ parser_.GetCurrentSubsamples();
+ if (!subsamples.empty()) {
+ encrypted_sei_nalus_.emplace_back(curr_nalu_->data,
+ curr_nalu_->size);
+ DCHECK_EQ(1u, subsamples.size());
+ sei_subsamples_.push_back(subsamples[0]);
+ }
+ }
+ if (state_ == kAfterReset && !recovery_frame_cnt_ &&
+ !recovery_frame_num_) {
+ // If we are after reset, we can also resume from a SEI recovery point
+ // (spec D.2.8) if one is present. However, if we are already in the
+ // process of handling one, skip any subsequent ones until we are done
+ // processing.
+ H264SEIMessage sei{};
+ if (parser_.ParseSEI(&sei) != H264Parser::kOk)
+ SET_ERROR_AND_RETURN();
+
+ if (sei.type == H264SEIMessage::kSEIRecoveryPoint) {
+ recovery_frame_cnt_ = sei.recovery_point.recovery_frame_cnt;
+ if (0 > *recovery_frame_cnt_ ||
+ *recovery_frame_cnt_ >= max_frame_num_) {
+ DVLOG(1) << "Invalid recovery_frame_cnt=" << *recovery_frame_cnt_
+ << " (it must be [0, max_frame_num_-1="
+ << max_frame_num_ - 1 << "])";
+ SET_ERROR_AND_RETURN();
+ }
+ DVLOG(3) << "Recovery point SEI is found, recovery_frame_cnt_="
+ << *recovery_frame_cnt_;
+ break;
+ }
+ }
+
+ FALLTHROUGH;
default:
DVLOG(4) << "Skipping NALU type: " << curr_nalu_->nal_unit_type;
break;
@@ -1455,6 +1652,10 @@ VideoCodecProfile H264Decoder::GetProfile() const {
return profile_;
}
+uint8_t H264Decoder::GetBitDepth() const {
+ return bit_depth_;
+}
+
size_t H264Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return GetNumReferenceFrames() + kPicsInPipeline;
diff --git a/chromium/media/gpu/h264_decoder.h b/chromium/media/gpu/h264_decoder.h
index b010f307e8a..d01ab1544ea 100644
--- a/chromium/media/gpu/h264_decoder.h
+++ b/chromium/media/gpu/h264_decoder.h
@@ -90,6 +90,23 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
const H264Picture::Vector& ref_pic_listb1,
scoped_refptr<H264Picture> pic) = 0;
+ // Used for handling CENCv1 streams where the entire slice header, except
+ // for the NALU type byte, is encrypted. |data| represents the encrypted
+ // ranges which will include any SEI NALUs along with the encrypted slice
+ // NALU. |subsamples| specifies what is encrypted and should have just a
+ // single clear byte for each and the rest is encrypted. |sps_nalu_data|
+ // and |pps_nalu_data| are the SPS and PPS NALUs respectively.
+ // |slice_header_out| should have its fields filled in upon successful
+ // return. Returns kOk if successful, kFail if there are errors, or
+ // kTryAgain if the accelerator needs additional data before being able to
+ // proceed.
+ virtual Status ParseEncryptedSliceHeader(
+ const std::vector<base::span<const uint8_t>>& data,
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::vector<uint8_t>& sps_nalu_data,
+ const std::vector<uint8_t>& pps_nalu_data,
+ H264SliceHeader* slice_header_out);
+
// Submit one slice for the current frame, passing the current |pps| and
// |pic| (same as in SubmitFrameMetadata()), the parsed header for the
// current slice in |slice_hdr|, and the reordered |ref_pic_listX|,
@@ -159,6 +176,7 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
gfx::Size GetPicSize() const override;
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
+ uint8_t GetBitDepth() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
@@ -188,6 +206,7 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// retryable error) is returned. The next time Decode() is called the call
// that previously failed will be retried and execution continues from
// there (if possible).
+ kParseSliceHeader,
kTryPreprocessCurrentSlice,
kEnsurePicture,
kTryNewFrame,
@@ -198,6 +217,12 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// Process H264 stream structures.
bool ProcessSPS(int sps_id, bool* need_new_buffers);
+
+ // Processes a CENCv1 encrypted slice header and fills in |curr_slice_hdr_|
+ // with the relevant parsed fields.
+ H264Accelerator::Status ProcessEncryptedSliceHeader(
+ const std::vector<SubsampleEntry>& subsamples);
+
// Process current slice header to discover if we need to start a new picture,
// finishing up the current one.
H264Accelerator::Status PreprocessCurrentSlice();
@@ -222,7 +247,7 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
bool UpdateMaxNumReorderFrames(const H264SPS* sps);
// Prepare reference picture lists for the current frame.
- void PrepareRefPicLists(const H264SliceHeader* slice_hdr);
+ void PrepareRefPicLists();
// Prepare reference picture lists for the given slice.
bool ModifyReferencePicLists(const H264SliceHeader* slice_hdr,
H264Picture::Vector* ref_pic_list0,
@@ -230,8 +255,8 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// Construct initial reference picture lists for use in decoding of
// P and B pictures (see 8.2.4 in spec).
- void ConstructReferencePicListsP(const H264SliceHeader* slice_hdr);
- void ConstructReferencePicListsB(const H264SliceHeader* slice_hdr);
+ void ConstructReferencePicListsP();
+ void ConstructReferencePicListsB();
// Helper functions for reference list construction, per spec.
int PicNumF(const H264Picture& pic);
@@ -289,13 +314,6 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// The colorspace for the h264 container.
const VideoColorSpace container_color_space_;
- // If |decoder_buffer_is_complete_frame_| is true, then it is assumed that
- // each DecoderBuffer contains a complete frame of video. After processing a
- // DecoderBuffer, if there is a pending frame, it will be sent to the
- // |accelerator_| immediately rather than waiting for a bitstream indication
- // for the next frame, EOS or an AUD.
- bool decoder_buffer_is_complete_frame_ = false;
-
// Parser in use.
H264Parser parser_;
@@ -346,10 +364,33 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
int curr_sps_id_;
int curr_pps_id_;
+ // Last PPS that was parsed. Used for full sample encryption, which has the
+ // assumption this is streaming content which does not switch between
+ // different PPSes in the stream (they are present once in the container for
+ // the stream).
+ int last_parsed_pps_id_;
+
+ // Copies of the last SPS and PPS NALUs, used for full sample encryption.
+ std::vector<uint8_t> last_sps_nalu_;
+ std::vector<uint8_t> last_pps_nalu_;
+
// Current NALU and slice header being processed.
std::unique_ptr<H264NALU> curr_nalu_;
std::unique_ptr<H264SliceHeader> curr_slice_hdr_;
+ // Encrypted SEI NALUs preceding a fully encrypted slice NALU. We need to
+ // save these that are part of a single sample so they can all be decrypted
+ // together.
+ std::vector<base::span<const uint8_t>> encrypted_sei_nalus_;
+ std::vector<SubsampleEntry> sei_subsamples_;
+
+ // These are base::nullopt unless get recovery point SEI message after Reset.
+ // A frame_num of the frame at output order that is correct in content.
+ base::Optional<int> recovery_frame_num_;
+ // A value in the recovery point SEI message to compute |recovery_frame_num_|
+ // later.
+ base::Optional<int> recovery_frame_cnt_;
+
// Output picture size.
gfx::Size pic_size_;
// Output visible cropping rect.
@@ -357,6 +398,8 @@ class MEDIA_GPU_EXPORT H264Decoder : public AcceleratedVideoDecoder {
// Profile of input bitstream.
VideoCodecProfile profile_;
+ // Bit depth of input bitstream.
+ uint8_t bit_depth_ = 0;
// PicOrderCount of the previously outputted frame.
int last_output_poc_;
diff --git a/chromium/media/gpu/h264_decoder_unittest.cc b/chromium/media/gpu/h264_decoder_unittest.cc
index 3c969de02e6..6b1c4d7a0e0 100644
--- a/chromium/media/gpu/h264_decoder_unittest.cc
+++ b/chromium/media/gpu/h264_decoder_unittest.cc
@@ -30,7 +30,6 @@ using ::testing::MatcherInterface;
using ::testing::MatchResultListener;
using ::testing::Mock;
using ::testing::Return;
-using ::testing::WithArg;
namespace media {
namespace {
@@ -43,6 +42,11 @@ const std::string kHighFrame0 = "bear-320x192-high-frame-0.h264";
const std::string kHighFrame1 = "bear-320x192-high-frame-1.h264";
const std::string kHighFrame2 = "bear-320x192-high-frame-2.h264";
const std::string kHighFrame3 = "bear-320x192-high-frame-3.h264";
+const std::string k10BitFrame0 = "bear-320x180-10bit-frame-0.h264";
+const std::string k10BitFrame1 = "bear-320x180-10bit-frame-1.h264";
+const std::string k10BitFrame2 = "bear-320x180-10bit-frame-2.h264";
+const std::string k10BitFrame3 = "bear-320x180-10bit-frame-3.h264";
+const std::string kYUV444Frame = "blackwhite_yuv444p-frame.h264";
// Checks whether the decrypt config in the picture matches the decrypt config
// passed to this matcher.
@@ -61,52 +65,67 @@ MATCHER(SubsampleSizeMatches, "Verify subsample sizes match buffer size") {
return subsample_total_size == buffer_size;
}
-// Given a H264NALU (arg0), compute the slice header and store a copy in
-// both |arg1| and |slice_header|. This assumes that the NALU comes from
-// kBaselineFrame0.
-ACTION_P(ComputeSliceHeader, slice_header) {
- const H264NALU& slice_nalu = arg0;
- // |arg1| and |slice_header| are H264SliceHeader*.
-
- // Ideally we could just parse |slice_nalu|, but the parser needs additional
- // data (like SPS and PPS entries) which we don't have. So this simulates
- // parsing of |slice_nalu| by simply setting the appropriate fields
-
- // Zero out |slice_header| so there is no need to set a lot of default values.
- std::memset(slice_header, 0, sizeof(H264SliceHeader));
-
- // Extract the values directly from the H264NALU provided.
- slice_header->idr_pic_flag = (slice_nalu.nal_unit_type == 5);
- slice_header->nal_ref_idc = slice_nalu.nal_ref_idc;
- slice_header->nalu_data = slice_nalu.data;
- slice_header->nalu_size = slice_nalu.size;
-
- // Don't want to duplicate all the work of H264Parser.ParseSliceHeader(),
- // so the following were determined by looking at the slice header after
- // H264_Parser.ParseSliceHeader() was called on kBaselineFrame0.
- slice_header->header_bit_size = 0x24;
- slice_header->slice_type = 7;
- slice_header->slice_qp_delta = 8;
- slice_header->dec_ref_pic_marking_bit_size = 2u;
-
- // Now that we have created our local copy of the slice header, copy it into
- // |arg1| and return success.
- std::memcpy(arg1, slice_header, sizeof(H264SliceHeader));
+// Emulates encrypted slice header parsing. We don't actually encrypt the data
+// so we can easily do this by just parsing it.
+H264Decoder::H264Accelerator::Status ParseSliceHeader(
+ const std::vector<base::span<const uint8_t>>& data,
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::vector<uint8_t>& sps_nalu_data,
+ const std::vector<uint8_t>& pps_nalu_data,
+ H264SliceHeader* slice_hdr_out) {
+ EXPECT_TRUE(!sps_nalu_data.empty());
+ EXPECT_TRUE(!pps_nalu_data.empty());
+ // Construct the bitstream for parsing.
+ std::vector<uint8_t> full_data;
+ const std::vector<uint8_t> start_code = {0u, 0u, 1u};
+ full_data.insert(full_data.end(), start_code.begin(), start_code.end());
+ full_data.insert(full_data.end(), sps_nalu_data.begin(), sps_nalu_data.end());
+ full_data.insert(full_data.end(), start_code.begin(), start_code.end());
+ full_data.insert(full_data.end(), pps_nalu_data.begin(), pps_nalu_data.end());
+ for (const auto& span : data) {
+ full_data.insert(full_data.end(), start_code.begin(), start_code.end());
+ full_data.insert(full_data.end(), span.begin(), span.end());
+ }
+ H264Parser parser;
+ parser.SetStream(full_data.data(), full_data.size());
+ while (true) {
+ H264NALU nalu;
+ H264Parser::Result res = parser.AdvanceToNextNALU(&nalu);
+ if (res == H264Parser::kEOStream)
+ break;
+ EXPECT_EQ(H264Parser::kOk, res);
+ switch (nalu.nal_unit_type) {
+ case H264NALU::kSPS:
+ int sps_id;
+ EXPECT_EQ(H264Parser::kOk, parser.ParseSPS(&sps_id));
+ break;
+ case H264NALU::kPPS:
+ int pps_id;
+ EXPECT_EQ(H264Parser::kOk, parser.ParsePPS(&pps_id));
+ break;
+ case H264NALU::kIDRSlice: // fallthrough
+ case H264NALU::kNonIDRSlice:
+ EXPECT_EQ(H264Parser::kOk,
+ parser.ParseSliceHeader(nalu, slice_hdr_out));
+ slice_hdr_out->full_sample_encryption = true;
+ break;
+ }
+ }
return H264Decoder::H264Accelerator::Status::kOk;
}
-// Compare 2 H264SliceHeader objects for equality.
-MATCHER_P(SliceHeaderMatches, slice_header, "Verify H264SliceHeader objects") {
- // Rather than match pointers, the contents must be the same.
- return std::memcmp(arg, slice_header, sizeof(H264SliceHeader)) == 0;
-}
-
class MockH264Accelerator : public H264Decoder::H264Accelerator {
public:
MockH264Accelerator() = default;
MOCK_METHOD0(CreateH264Picture, scoped_refptr<H264Picture>());
MOCK_METHOD1(SubmitDecode, Status(scoped_refptr<H264Picture> pic));
+ MOCK_METHOD5(ParseEncryptedSliceHeader,
+ Status(const std::vector<base::span<const uint8_t>>& data,
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::vector<uint8_t>& sps_nalu_data,
+ const std::vector<uint8_t>& pps_nalu_data,
+ H264SliceHeader* slice_hdr_out));
MOCK_METHOD7(SubmitFrameMetadata,
Status(const H264SPS* sps,
const H264PPS* pps,
@@ -146,8 +165,11 @@ class H264DecoderTest : public ::testing::Test {
// Keeps decoding the input bitstream set at |SetInputFrameFiles| until the
// decoder has consumed all bitstreams or returned from
- // |H264Decoder::Decode|. Returns the same result as |H264Decoder::Decode|.
- AcceleratedVideoDecoder::DecodeResult Decode();
+ // |H264Decoder::Decode|. If |full_sample_encryption| is true, then it sets
+ // a DecryptConfig for the the DecoderBuffer that indicates all but the first
+ // byte are encrypted. Returns the same result as |H264Decoder::Decode|.
+ AcceleratedVideoDecoder::DecodeResult Decode(
+ bool full_sample_encryption = false);
protected:
std::unique_ptr<H264Decoder> decoder_;
@@ -188,7 +210,8 @@ void H264DecoderTest::SetInputFrameFiles(
input_frame_files_.push(f);
}
-AcceleratedVideoDecoder::DecodeResult H264DecoderTest::Decode() {
+AcceleratedVideoDecoder::DecodeResult H264DecoderTest::Decode(
+ bool full_sample_encryption) {
while (true) {
auto result = decoder_->Decode();
int32_t bitstream_id = 0;
@@ -200,6 +223,15 @@ AcceleratedVideoDecoder::DecodeResult H264DecoderTest::Decode() {
CHECK(base::ReadFileToString(input_file, &bitstream_));
decoder_buffer_ = DecoderBuffer::CopyFrom(
reinterpret_cast<const uint8_t*>(bitstream_.data()), bitstream_.size());
+ if (full_sample_encryption) {
+ // We only use this in 2 tests, each use the same data where the offset to
+ // the byte after the NALU type for the slice header is 669.
+ constexpr int kOffsetToSliceHeader = 669;
+ decoder_buffer_->set_decrypt_config(DecryptConfig::CreateCencConfig(
+ "kFakeKeyId", std::string(DecryptConfig::kDecryptionKeySize, 'x'),
+ {SubsampleEntry(kOffsetToSliceHeader,
+ bitstream_.size() - kOffsetToSliceHeader)}));
+ }
EXPECT_NE(decoder_buffer_.get(), nullptr);
decoder_->SetStream(bitstream_id++, *decoder_buffer_);
}
@@ -237,6 +269,7 @@ TEST_F(H264DecoderTest, DecodeSingleFrame) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
EXPECT_CALL(*accelerator_, CreateH264Picture()).WillOnce(Return(nullptr));
@@ -255,11 +288,34 @@ TEST_F(H264DecoderTest, DecodeSingleFrame) {
ASSERT_TRUE(decoder_->Flush());
}
+// This is for CENCv1 full sample encryption.
+TEST_F(H264DecoderTest, DecodeSingleEncryptedFrame) {
+ SetInputFrameFiles({kBaselineFrame0});
+ ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode(true));
+ EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
+ EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, ParseEncryptedSliceHeader(_, _, _, _, _))
+ .WillOnce(Invoke(&ParseSliceHeader));
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(_));
+ EXPECT_CALL(*accelerator_, OutputPicture(_));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ ASSERT_TRUE(decoder_->Flush());
+}
+
TEST_F(H264DecoderTest, SkipNonIDRFrames) {
SetInputFrameFiles({kBaselineFrame1, kBaselineFrame2, kBaselineFrame0});
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
{
InSequence sequence;
@@ -280,6 +336,7 @@ TEST_F(H264DecoderTest, DecodeProfileBaseline) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(4);
@@ -305,18 +362,38 @@ TEST_F(H264DecoderTest, DecodeProfileBaseline) {
ASSERT_TRUE(decoder_->Flush());
}
-// TODO(jkardatzke): Remove this test if we keep the flag for DecoderBuffers
-// are complete frames because this code path will never get called.
-TEST_F(H264DecoderTest, DISABLED_OutputPictureFailureCausesFlushToFail) {
- // Provide one frame so that Decode() will not try to output a frame, so
- // Flush() will.
- SetInputFrameFiles({
- kBaselineFrame0,
- });
+TEST_F(H264DecoderTest, Decode10BitStream) {
+ SetInputFrameFiles({k10BitFrame0, k10BitFrame1, k10BitFrame2, k10BitFrame3});
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
- EXPECT_CALL(*accelerator_, OutputPicture(_)).WillRepeatedly(Return(false));
+ EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
+ EXPECT_EQ(gfx::Rect(320, 180), decoder_->GetVisibleRect());
+ EXPECT_EQ(H264PROFILE_HIGH10PROFILE, decoder_->GetProfile());
+ EXPECT_EQ(10u, decoder_->GetBitDepth());
+ EXPECT_LE(14u, decoder_->GetRequiredNumOfPictures());
+
+ // One picture will be kept in the DPB for reordering. The second picture
+ // should be outputted after feeding the third and fourth frames.
+ EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _)).Times(4);
+
+ Expectation decode_poc0, decode_poc2, decode_poc4, decode_poc6;
+ {
+ InSequence decode_order;
+ decode_poc0 = EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(0)));
+ decode_poc6 = EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(6)));
+ decode_poc2 = EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(2)));
+ decode_poc4 = EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(4)));
+ }
+ {
+ InSequence display_order;
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0))).After(decode_poc0);
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(2))).After(decode_poc2);
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(4))).After(decode_poc4);
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(6))).After(decode_poc6);
+ }
ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
- ASSERT_FALSE(decoder_->Flush());
+ ASSERT_TRUE(decoder_->Flush());
}
TEST_F(H264DecoderTest, OutputPictureFailureCausesDecodeToFail) {
@@ -335,10 +412,11 @@ TEST_F(H264DecoderTest, DecodeProfileHigh) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_HIGH, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(16u, decoder_->GetRequiredNumOfPictures());
- // Two pictures will be kept in DPB for reordering. The first picture should
- // be outputted after feeding the third frame.
+ // Two pictures will be kept in the DPB for reordering. The first picture
+ // should be outputted after feeding the third frame.
EXPECT_CALL(*accelerator_, CreateH264Picture()).Times(4);
EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _)).Times(4);
EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _)).Times(4);
@@ -362,6 +440,12 @@ TEST_F(H264DecoderTest, DecodeProfileHigh) {
ASSERT_TRUE(decoder_->Flush());
}
+TEST_F(H264DecoderTest, DenyDecodeNonYUV420) {
+ // YUV444 frame causes kDecodeError.
+ SetInputFrameFiles({kYUV444Frame});
+ ASSERT_EQ(AcceleratedVideoDecoder::kDecodeError, Decode());
+}
+
TEST_F(H264DecoderTest, SwitchBaselineToHigh) {
SetInputFrameFiles({
kBaselineFrame0, kHighFrame0, kHighFrame1, kHighFrame2, kHighFrame3,
@@ -369,6 +453,7 @@ TEST_F(H264DecoderTest, SwitchBaselineToHigh) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
{
@@ -382,6 +467,7 @@ TEST_F(H264DecoderTest, SwitchBaselineToHigh) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_HIGH, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(16u, decoder_->GetRequiredNumOfPictures());
ASSERT_TRUE(Mock::VerifyAndClearExpectations(&*accelerator_));
@@ -417,6 +503,7 @@ TEST_F(H264DecoderTest, SwitchHighToBaseline) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_HIGH, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(16u, decoder_->GetRequiredNumOfPictures());
{
@@ -430,6 +517,7 @@ TEST_F(H264DecoderTest, SwitchHighToBaseline) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
ASSERT_TRUE(Mock::VerifyAndClearExpectations(&*accelerator_));
@@ -457,6 +545,25 @@ TEST_F(H264DecoderTest, SwitchHighToBaseline) {
ASSERT_TRUE(decoder_->Flush());
}
+TEST_F(H264DecoderTest, SwitchYUV420ToNonYUV420) {
+ SetInputFrameFiles({kBaselineFrame0, kYUV444Frame});
+ // The first frame, YUV420, is decoded with no error.
+ ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
+ EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(_));
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
+ }
+ // The second frame, YUV444, causes kDecodeError.
+ ASSERT_EQ(AcceleratedVideoDecoder::kDecodeError, Decode());
+}
+
// Verify that the decryption config is passed to the accelerator.
TEST_F(H264DecoderTest, SetEncryptedStream) {
std::string bitstream;
@@ -488,15 +595,50 @@ TEST_F(H264DecoderTest, SetEncryptedStream) {
decoder_->SetStream(0, *buffer);
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, decoder_->Decode());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, decoder_->Decode());
EXPECT_TRUE(decoder_->Flush());
}
+TEST_F(H264DecoderTest, ParseEncryptedSliceHeaderRetry) {
+ SetInputFrameFiles({kBaselineFrame0});
+ ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode(true));
+ EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
+ EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
+
+ EXPECT_CALL(*accelerator_, ParseEncryptedSliceHeader(_, _, _, _, _))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode(true));
+
+ // Try again, assuming key still not set. Only ParseEncryptedSliceHeader()
+ // should be called again.
+ EXPECT_CALL(*accelerator_, ParseEncryptedSliceHeader(_, _, _, _, _))
+ .WillOnce(Return(H264Decoder::H264Accelerator::Status::kTryAgain));
+ ASSERT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode(true));
+
+ // Assume key has been provided now, next call to Decode() should proceed.
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, ParseEncryptedSliceHeader(_, _, _, _, _))
+ .WillOnce(Invoke(&ParseSliceHeader));
+ EXPECT_CALL(*accelerator_, CreateH264Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(WithPoc(0)));
+ EXPECT_CALL(*accelerator_, OutputPicture(WithPoc(0)));
+ }
+ ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(true));
+
+ ASSERT_TRUE(decoder_->Flush());
+}
+
TEST_F(H264DecoderTest, SubmitFrameMetadataRetry) {
SetInputFrameFiles({kBaselineFrame0});
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
{
@@ -532,6 +674,7 @@ TEST_F(H264DecoderTest, SubmitSliceRetry) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
{
@@ -567,6 +710,7 @@ TEST_F(H264DecoderTest, SubmitDecodeRetry) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
{
@@ -615,6 +759,7 @@ TEST_F(H264DecoderTest, SetStreamRetry) {
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 192), decoder_->GetPicSize());
EXPECT_EQ(H264PROFILE_BASELINE, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_LE(9u, decoder_->GetRequiredNumOfPictures());
{
diff --git a/chromium/media/gpu/h265_decoder.cc b/chromium/media/gpu/h265_decoder.cc
index 4c44016a0c6..64d5106de33 100644
--- a/chromium/media/gpu/h265_decoder.cc
+++ b/chromium/media/gpu/h265_decoder.cc
@@ -5,6 +5,7 @@
#include <algorithm>
#include "base/logging.h"
+#include "base/notreached.h"
#include "media/base/limits.h"
#include "media/gpu/h265_decoder.h"
@@ -19,6 +20,35 @@ struct POCAscCompare {
}
};
+bool ParseBitDepth(const H265SPS& sps, uint8_t& bit_depth) {
+ // Spec 7.4.3.2.1
+ if (sps.bit_depth_y != sps.bit_depth_c) {
+ DVLOG(1) << "Different bit depths among planes is not supported";
+ return false;
+ }
+ bit_depth = base::checked_cast<uint8_t>(sps.bit_depth_y);
+ return true;
+}
+
+bool IsValidBitDepth(uint8_t bit_depth, VideoCodecProfile profile) {
+ // Spec A.3.
+ switch (profile) {
+ case HEVCPROFILE_MAIN:
+ return bit_depth == 8u;
+ case HEVCPROFILE_MAIN10:
+ return bit_depth == 8u || bit_depth == 10u;
+ case HEVCPROFILE_MAIN_STILL_PICTURE:
+ return bit_depth == 8u;
+ default:
+ NOTREACHED();
+ return false;
+ }
+}
+
+bool IsYUV420Sequence(const H265SPS& sps) {
+ // Spec 6.2
+ return sps.chroma_format_idc == 1;
+}
} // namespace
H265Decoder::H265Accelerator::H265Accelerator() = default;
@@ -185,16 +215,8 @@ H265Decoder::DecodeResult H265Decoder::Decode() {
case H265NALU::CRA_NUT:
if (!curr_slice_hdr_) {
curr_slice_hdr_.reset(new H265SliceHeader());
- if (last_slice_hdr_) {
- // This is a multi-slice picture, so we should copy all of the prior
- // slice header data to the new slice and use those as the default
- // values that don't have syntax elements present.
- memcpy(curr_slice_hdr_.get(), last_slice_hdr_.get(),
- sizeof(H265SliceHeader));
- last_slice_hdr_.reset();
- }
- par_res =
- parser_.ParseSliceHeader(*curr_nalu_, curr_slice_hdr_.get());
+ par_res = parser_.ParseSliceHeader(*curr_nalu_, curr_slice_hdr_.get(),
+ last_slice_hdr_.get());
if (par_res == H265Parser::kMissingParameterSet) {
// We may still be able to recover if we skip until we find the
// SPS/PPS.
@@ -318,6 +340,10 @@ VideoCodecProfile H265Decoder::GetProfile() const {
return profile_;
}
+uint8_t H265Decoder::GetBitDepth() const {
+ return bit_depth_;
+}
+
size_t H265Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return GetNumReferenceFrames() + kPicsInPipeline;
@@ -348,6 +374,10 @@ bool H265Decoder::ProcessPPS(int pps_id, bool* need_new_buffers) {
DVLOG(2) << "New visible rect: " << new_visible_rect.ToString();
visible_rect_ = new_visible_rect;
}
+ if (!IsYUV420Sequence(*sps)) {
+ DVLOG(1) << "Only YUV 4:2:0 is supported";
+ return false;
+ }
// Equation 7-8
max_pic_order_cnt_lsb_ =
@@ -355,16 +385,25 @@ bool H265Decoder::ProcessPPS(int pps_id, bool* need_new_buffers) {
VideoCodecProfile new_profile = H265Parser::ProfileIDCToVideoCodecProfile(
sps->profile_tier_level.general_profile_idc);
-
+ uint8_t new_bit_depth = 0;
+ if (!ParseBitDepth(*sps, new_bit_depth))
+ return false;
+ if (!IsValidBitDepth(new_bit_depth, new_profile)) {
+ DVLOG(1) << "Invalid bit depth=" << base::strict_cast<int>(new_bit_depth)
+ << ", profile=" << GetProfileName(new_profile);
+ return false;
+ }
if (pic_size_ != new_pic_size || dpb_.max_num_pics() != sps->max_dpb_size ||
- profile_ != new_profile) {
+ profile_ != new_profile || bit_depth_ != new_bit_depth) {
if (!Flush())
return false;
DVLOG(1) << "Codec profile: " << GetProfileName(new_profile)
<< ", level(x30): " << sps->profile_tier_level.general_level_idc
<< ", DPB size: " << sps->max_dpb_size
- << ", Picture size: " << new_pic_size.ToString();
+ << ", Picture size: " << new_pic_size.ToString()
+ << ", bit_depth: " << base::strict_cast<int>(new_bit_depth);
profile_ = new_profile;
+ bit_depth_ = new_bit_depth;
pic_size_ = new_pic_size;
dpb_.set_max_num_pics(sps->max_dpb_size);
if (need_new_buffers)
@@ -486,41 +525,57 @@ bool H265Decoder::CalcRefPicPocs(const H265SPS* sps,
// Equation 8-5.
int i, j, k;
for (i = 0, j = 0, k = 0; i < curr_st_ref_pic_set.num_negative_pics; ++i) {
- if (curr_st_ref_pic_set.used_by_curr_pic_s0[i]) {
- poc_st_curr_before_[j++] =
- curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s0[i];
- } else {
- poc_st_foll_[k++] =
- curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s0[i];
+ base::CheckedNumeric<int> poc = curr_pic_->pic_order_cnt_val_;
+ poc += curr_st_ref_pic_set.delta_poc_s0[i];
+ if (!poc.IsValid()) {
+ DVLOG(1) << "Invalid POC";
+ return false;
}
+ if (curr_st_ref_pic_set.used_by_curr_pic_s0[i])
+ poc_st_curr_before_[j++] = poc.ValueOrDefault(0);
+ else
+ poc_st_foll_[k++] = poc.ValueOrDefault(0);
}
num_poc_st_curr_before_ = j;
for (i = 0, j = 0; i < curr_st_ref_pic_set.num_positive_pics; ++i) {
- if (curr_st_ref_pic_set.used_by_curr_pic_s1[i]) {
- poc_st_curr_after_[j++] =
- curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s1[i];
- } else {
- poc_st_foll_[k++] =
- curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s1[i];
+ base::CheckedNumeric<int> poc = curr_pic_->pic_order_cnt_val_;
+ poc += curr_st_ref_pic_set.delta_poc_s1[i];
+ if (!poc.IsValid()) {
+ DVLOG(1) << "Invalid POC";
+ return false;
}
+ if (curr_st_ref_pic_set.used_by_curr_pic_s1[i])
+ poc_st_curr_after_[j++] = poc.ValueOrDefault(0);
+ else
+ poc_st_foll_[k++] = poc.ValueOrDefault(0);
}
num_poc_st_curr_after_ = j;
num_poc_st_foll_ = k;
for (i = 0, j = 0, k = 0;
i < slice_hdr->num_long_term_sps + slice_hdr->num_long_term_pics; ++i) {
- int poc_lt = slice_hdr->poc_lsb_lt[i];
+ base::CheckedNumeric<int> poc_lt = slice_hdr->poc_lsb_lt[i];
if (slice_hdr->delta_poc_msb_present_flag[i]) {
- poc_lt +=
- curr_pic_->pic_order_cnt_val_ -
- (slice_hdr->delta_poc_msb_cycle_lt[i] * max_pic_order_cnt_lsb_) -
- (curr_pic_->pic_order_cnt_val_ & (max_pic_order_cnt_lsb_ - 1));
+ poc_lt += curr_pic_->pic_order_cnt_val_;
+ base::CheckedNumeric<int> poc_delta =
+ slice_hdr->delta_poc_msb_cycle_lt[i];
+ poc_delta *= max_pic_order_cnt_lsb_;
+ if (!poc_delta.IsValid()) {
+ DVLOG(1) << "Invalid POC";
+ return false;
+ }
+ poc_lt -= poc_delta.ValueOrDefault(0);
+ poc_lt -= curr_pic_->pic_order_cnt_val_ & (max_pic_order_cnt_lsb_ - 1);
+ }
+ if (!poc_lt.IsValid()) {
+ DVLOG(1) << "Invalid POC";
+ return false;
}
if (slice_hdr->used_by_curr_pic_lt[i]) {
- poc_lt_curr_[j] = poc_lt;
+ poc_lt_curr_[j] = poc_lt.ValueOrDefault(0);
curr_delta_poc_msb_present_flag_[j++] =
slice_hdr->delta_poc_msb_present_flag[i];
} else {
- poc_lt_foll_[k] = poc_lt;
+ poc_lt_foll_[k] = poc_lt.ValueOrDefault(0);
foll_delta_poc_msb_present_flag_[k++] =
slice_hdr->delta_poc_msb_present_flag[i];
}
diff --git a/chromium/media/gpu/h265_decoder.h b/chromium/media/gpu/h265_decoder.h
index 820e136e8fe..99315f65cce 100644
--- a/chromium/media/gpu/h265_decoder.h
+++ b/chromium/media/gpu/h265_decoder.h
@@ -166,6 +166,7 @@ class MEDIA_GPU_EXPORT H265Decoder final : public AcceleratedVideoDecoder {
gfx::Size GetPicSize() const override;
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
+ uint8_t GetBitDepth() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
@@ -320,10 +321,12 @@ class MEDIA_GPU_EXPORT H265Decoder final : public AcceleratedVideoDecoder {
// Profile of input bitstream.
VideoCodecProfile profile_;
+ // Bit depth of input bitstream.
+ uint8_t bit_depth_ = 0;
const std::unique_ptr<H265Accelerator> accelerator_;
};
} // namespace media
-#endif // MEDIA_GPU_H265_DECODER_H_ \ No newline at end of file
+#endif // MEDIA_GPU_H265_DECODER_H_
diff --git a/chromium/media/gpu/h265_decoder_unittest.cc b/chromium/media/gpu/h265_decoder_unittest.cc
index ffc0a85da5f..ad34fc42a13 100644
--- a/chromium/media/gpu/h265_decoder_unittest.cc
+++ b/chromium/media/gpu/h265_decoder_unittest.cc
@@ -37,6 +37,11 @@ constexpr char kFrame2[] = "bear-frame2.hevc";
constexpr char kFrame3[] = "bear-frame3.hevc";
constexpr char kFrame4[] = "bear-frame4.hevc";
constexpr char kFrame5[] = "bear-frame5.hevc";
+constexpr char k10BitFrame0[] = "bear-320x180-10bit-frame-0.hevc";
+constexpr char k10BitFrame1[] = "bear-320x180-10bit-frame-1.hevc";
+constexpr char k10BitFrame2[] = "bear-320x180-10bit-frame-2.hevc";
+constexpr char k10BitFrame3[] = "bear-320x180-10bit-frame-3.hevc";
+constexpr char kYUV444Frame[] = "blackwhite_yuv444p-frame.hevc";
// Checks whether the decrypt config in the picture matches the decrypt config
// passed to this matcher.
@@ -194,6 +199,7 @@ TEST_F(H265DecoderTest, DecodeSingleFrame) {
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
// Also test running out of surfaces.
@@ -218,6 +224,7 @@ TEST_F(H265DecoderTest, SkipNonIDRFrames) {
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
{
InSequence sequence;
@@ -237,6 +244,7 @@ TEST_F(H265DecoderTest, DecodeProfileMain) {
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(6);
@@ -268,6 +276,47 @@ TEST_F(H265DecoderTest, DecodeProfileMain) {
EXPECT_TRUE(decoder_->Flush());
}
+TEST_F(H265DecoderTest, Decode10BitStream) {
+ SetInputFrameFiles({k10BitFrame0, k10BitFrame1, k10BitFrame2, k10BitFrame3});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(gfx::Rect(320, 180), decoder_->GetVisibleRect());
+ EXPECT_EQ(HEVCPROFILE_MAIN10, decoder_->GetProfile());
+ EXPECT_EQ(10u, decoder_->GetBitDepth());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _)).Times(4);
+
+ // Two pictures will be kept in the DPB for reordering. The second and third
+ // pictures should be outputted after feeding the fourth frame.
+ Expectation decode_poc0, decode_poc1, decode_poc2, decode_poc3;
+ {
+ InSequence decode_order;
+ decode_poc0 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0)));
+ decode_poc3 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(3)));
+ decode_poc2 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(2)));
+ decode_poc1 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(1)));
+ }
+ {
+ InSequence display_order;
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0))).After(decode_poc0);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(1))).After(decode_poc1);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(2))).After(decode_poc2);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(3))).After(decode_poc3);
+ }
+
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, DenyDecodeNonYUV420) {
+ // YUV444 frame causes kDecodeError.
+ SetInputFrameFiles({kYUV444Frame});
+ ASSERT_EQ(AcceleratedVideoDecoder::kDecodeError, Decode());
+}
+
TEST_F(H265DecoderTest, OutputPictureFailureCausesDecodeToFail) {
// Provide enough data that Decode() will try to output a frame.
SetInputFrameFiles({kSpsPps, kFrame0, kFrame1, kFrame2, kFrame3});
@@ -314,6 +363,7 @@ TEST_F(H265DecoderTest, SetEncryptedStream) {
decoder_->SetStream(0, *buffer);
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, decoder_->Decode());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, decoder_->Decode());
EXPECT_TRUE(decoder_->Flush());
}
@@ -323,6 +373,7 @@ TEST_F(H265DecoderTest, SubmitFrameMetadataRetry) {
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
{
@@ -358,6 +409,7 @@ TEST_F(H265DecoderTest, SubmitSliceRetry) {
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
{
@@ -393,6 +445,7 @@ TEST_F(H265DecoderTest, SubmitDecodeRetry) {
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
{
@@ -442,6 +495,7 @@ TEST_F(H265DecoderTest, SetStreamRetry) {
EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode(false));
EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
{
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager.cc b/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
index 8e32c714d86..2601f75d2f8 100644
--- a/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager.cc
@@ -13,6 +13,7 @@
#include "base/logging.h"
#include "base/synchronization/lock.h"
#include "base/thread_annotations.h"
+#include "components/viz/common/resources/resource_format_utils.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/video_util.h"
@@ -75,7 +76,7 @@ class PictureBufferManagerImpl : public PictureBufferManager {
uint32_t planes,
gfx::Size texture_size,
uint32_t texture_target,
- bool use_shared_image) override {
+ VideoDecodeAccelerator::TextureAllocationMode mode) override {
DVLOG(2) << __func__;
DCHECK(gpu_task_runner_);
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
@@ -83,9 +84,10 @@ class PictureBufferManagerImpl : public PictureBufferManager {
DCHECK(planes);
DCHECK_LE(planes, static_cast<uint32_t>(VideoFrame::kMaxPlanes));
- if (!use_shared_image) {
- // TODO(sandersd): Consider requiring that CreatePictureBuffers() is
- // called with the context current.
+ // TODO(sandersd): Consider requiring that CreatePictureBuffers() is
+ // called with the context current.
+ if (mode ==
+ VideoDecodeAccelerator::TextureAllocationMode::kAllocateGLTextures) {
if (!command_buffer_helper_->MakeContextCurrent()) {
DVLOG(1) << "Failed to make context current";
return std::vector<PictureBuffer>();
@@ -94,15 +96,26 @@ class PictureBufferManagerImpl : public PictureBufferManager {
std::vector<PictureBuffer> picture_buffers;
for (uint32_t i = 0; i < count; i++) {
- PictureBufferData picture_data = {pixel_format, texture_size,
- use_shared_image};
-
- if (!use_shared_image) {
+ PictureBufferData picture_data = {pixel_format, texture_size};
+ if (mode ==
+ VideoDecodeAccelerator::TextureAllocationMode::kAllocateGLTextures) {
for (uint32_t j = 0; j < planes; j++) {
+ // Use the plane size for texture-backed shared and non-shared images.
+ // Adjust the size by the subsampling factor.
+ const size_t width =
+ VideoFrame::Columns(j, pixel_format, texture_size.width());
+ const size_t height =
+ VideoFrame::Rows(j, pixel_format, texture_size.height());
+
+ picture_data.texture_sizes.emplace_back(width, height);
+
// Create a texture for this plane.
+ // When using shared images, the VDA might not require GL textures to
+ // exist.
+ // TODO(crbug.com/1011555): Do not allocate GL textures when unused.
GLuint service_id = command_buffer_helper_->CreateTexture(
- texture_target, GL_RGBA, texture_size.width(),
- texture_size.height(), GL_RGBA, GL_UNSIGNED_BYTE);
+ texture_target, GL_RGBA, width, height, GL_RGBA,
+ GL_UNSIGNED_BYTE);
DCHECK(service_id);
picture_data.service_ids.push_back(service_id);
@@ -132,8 +145,9 @@ class PictureBufferManagerImpl : public PictureBufferManager {
// TODO(sandersd): Refactor the bind image callback to use service IDs so
// that we can get rid of the client IDs altogether.
picture_buffers.emplace_back(
- picture_buffer_id, texture_size, picture_data.service_ids,
- picture_data.service_ids, texture_target, pixel_format);
+ picture_buffer_id, texture_size, picture_data.texture_sizes,
+ picture_data.service_ids, picture_data.service_ids, texture_target,
+ pixel_format);
}
return picture_buffers;
}
@@ -216,12 +230,11 @@ class PictureBufferManagerImpl : public PictureBufferManager {
// If this |picture| has a SharedImage, then keep a reference to the
// SharedImage in |picture_buffer_data| and update the gpu::MailboxHolder.
- DCHECK_EQ(picture_buffer_data.use_shared_image,
- !!picture.scoped_shared_image());
- if (auto scoped_shared_image = picture.scoped_shared_image()) {
- picture_buffer_data.scoped_shared_image = scoped_shared_image;
- picture_buffer_data.mailbox_holders[0] =
- scoped_shared_image->GetMailboxHolder();
+ for (int i = 0; i < VideoFrame::kMaxPlanes; i++) {
+ auto image = picture.scoped_shared_image(i);
+ if (image)
+ picture_buffer_data.mailbox_holders[i] = image->GetMailboxHolder();
+ picture_buffer_data.scoped_shared_images[i] = std::move(image);
}
// Create and return a VideoFrame for the picture buffer.
@@ -234,12 +247,12 @@ class PictureBufferManagerImpl : public PictureBufferManager {
frame->set_color_space(picture.color_space());
- frame->metadata()->allow_overlay = picture.allow_overlay();
- frame->metadata()->read_lock_fences_enabled =
+ frame->metadata().allow_overlay = picture.allow_overlay();
+ frame->metadata().read_lock_fences_enabled =
picture.read_lock_fences_enabled();
// TODO(sandersd): Provide an API for VDAs to control this.
- frame->metadata()->power_efficient = true;
+ frame->metadata().power_efficient = true;
return frame;
}
@@ -313,7 +326,9 @@ class PictureBufferManagerImpl : public PictureBufferManager {
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
std::vector<GLuint> service_ids;
- scoped_refptr<Picture::ScopedSharedImage> scoped_shared_image;
+ std::array<scoped_refptr<Picture::ScopedSharedImage>,
+ VideoFrame::kMaxPlanes>
+ scoped_shared_images;
{
base::AutoLock lock(picture_buffers_lock_);
const auto& it = picture_buffers_.find(picture_buffer_id);
@@ -321,15 +336,12 @@ class PictureBufferManagerImpl : public PictureBufferManager {
DCHECK(it->second.dismissed);
DCHECK(!it->second.IsInUse());
service_ids = std::move(it->second.service_ids);
- scoped_shared_image = std::move(it->second.scoped_shared_image);
+ scoped_shared_images = std::move(it->second.scoped_shared_images);
picture_buffers_.erase(it);
}
- // If this PictureBuffer is using a SharedImage, let it fall out of scope.
- if (scoped_shared_image) {
- DCHECK(service_ids.empty());
+ if (service_ids.empty())
return;
- }
if (!command_buffer_helper_->MakeContextCurrent())
return;
@@ -348,10 +360,12 @@ class PictureBufferManagerImpl : public PictureBufferManager {
struct PictureBufferData {
VideoPixelFormat pixel_format;
gfx::Size texture_size;
- bool use_shared_image = false;
std::vector<GLuint> service_ids;
gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes];
- scoped_refptr<Picture::ScopedSharedImage> scoped_shared_image;
+ std::vector<gfx::Size> texture_sizes;
+ std::array<scoped_refptr<Picture::ScopedSharedImage>,
+ VideoFrame::kMaxPlanes>
+ scoped_shared_images;
bool dismissed = false;
// The same picture buffer can be output from the VDA multiple times
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager.h b/chromium/media/gpu/ipc/service/picture_buffer_manager.h
index 2c9b76cc077..877cf04eab9 100644
--- a/chromium/media/gpu/ipc/service/picture_buffer_manager.h
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager.h
@@ -19,6 +19,7 @@
#include "media/base/video_types.h"
#include "media/gpu/command_buffer_helper.h"
#include "media/video/picture.h"
+#include "media/video/video_decode_accelerator.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
@@ -62,7 +63,6 @@ class PictureBufferManager
// |planes|: Number of image planes (textures) in the picture.
// |texture_size|: Size of textures to create.
// |texture_target|: Type of textures to create.
- // |use_shared_image|: True if the created buffers should use shared images.
//
// Must be called on the GPU thread.
//
@@ -79,7 +79,7 @@ class PictureBufferManager
uint32_t planes,
gfx::Size texture_size,
uint32_t texture_target,
- bool use_shared_image) = 0;
+ VideoDecodeAccelerator::TextureAllocationMode mode) = 0;
// Dismisses a picture buffer from the pool.
//
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc b/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
index d74bc70841e..d98288dfda6 100644
--- a/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
@@ -12,6 +12,7 @@
#include "base/test/task_environment.h"
#include "media/base/simple_sync_token_client.h"
#include "media/gpu/test/fake_command_buffer_helper.h"
+#include "media/video/video_decode_accelerator.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
@@ -39,14 +40,19 @@ class PictureBufferManagerImplTest : public testing::Test {
pbm_->Initialize(environment_.GetMainThreadTaskRunner(), cbh_);
}
- std::vector<PictureBuffer> CreateARGBPictureBuffers(uint32_t count) {
+ std::vector<PictureBuffer> CreateARGBPictureBuffers(
+ uint32_t count,
+ VideoDecodeAccelerator::TextureAllocationMode mode =
+ VideoDecodeAccelerator::TextureAllocationMode::kAllocateGLTextures) {
return pbm_->CreatePictureBuffers(count, PIXEL_FORMAT_ARGB, 1,
- gfx::Size(320, 240), GL_TEXTURE_2D,
- false /* use_shared_image */);
+ gfx::Size(320, 240), GL_TEXTURE_2D, mode);
}
- PictureBuffer CreateARGBPictureBuffer() {
- std::vector<PictureBuffer> picture_buffers = CreateARGBPictureBuffers(1);
+ PictureBuffer CreateARGBPictureBuffer(
+ VideoDecodeAccelerator::TextureAllocationMode mode =
+ VideoDecodeAccelerator::TextureAllocationMode::kAllocateGLTextures) {
+ std::vector<PictureBuffer> picture_buffers =
+ CreateARGBPictureBuffers(1, mode);
DCHECK_EQ(picture_buffers.size(), 1U);
return picture_buffers[0];
}
@@ -96,6 +102,17 @@ TEST_F(PictureBufferManagerImplTest, CreatePictureBuffer) {
EXPECT_TRUE(cbh_->HasTexture(pb.client_texture_ids()[0]));
}
+TEST_F(PictureBufferManagerImplTest, CreatePictureBuffer_SharedImage) {
+ Initialize();
+ PictureBuffer pb1 = CreateARGBPictureBuffer(
+ VideoDecodeAccelerator::TextureAllocationMode::kDoNotAllocateGLTextures);
+ EXPECT_EQ(pb1.client_texture_ids().size(), 0u);
+
+ PictureBuffer pb2 = CreateARGBPictureBuffer(
+ VideoDecodeAccelerator::TextureAllocationMode::kAllocateGLTextures);
+ EXPECT_TRUE(cbh_->HasTexture(pb2.client_texture_ids()[0]));
+}
+
TEST_F(PictureBufferManagerImplTest, CreatePictureBuffer_ContextLost) {
Initialize();
cbh_->ContextLost();
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.cc b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
index 4614f4c577d..d07b25930c3 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
@@ -210,10 +210,17 @@ VdaVideoDecoder::~VdaVideoDecoder() {
std::string VdaVideoDecoder::GetDisplayName() const {
DVLOG(3) << __func__;
DCHECK(parent_task_runner_->BelongsToCurrentThread());
-
return "VdaVideoDecoder";
}
+VideoDecoderType VdaVideoDecoder::GetDecoderType() const {
+ DVLOG(3) << __func__;
+ DCHECK(parent_task_runner_->BelongsToCurrentThread());
+ // TODO(tmathmeyer) query the accelerator for it's implementation type and
+ // return that instead.
+ return VideoDecoderType::kVda;
+}
+
void VdaVideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
@@ -279,7 +286,7 @@ void VdaVideoDecoder::Initialize(const VideoDecoderConfig& config,
// (https://crbug.com/929565). We should support reinitialization for profile
// changes. We limit this support as small as possible for safety.
const bool is_profile_change =
-#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_VAAPI)
+#if BUILDFLAG(IS_CHROMEOS_ASH) && BUILDFLAG(USE_VAAPI)
config_.profile() != config.profile();
#else
false;
@@ -544,7 +551,7 @@ void VdaVideoDecoder::ProvidePictureBuffersAsync(uint32_t count,
std::vector<PictureBuffer> picture_buffers =
picture_buffer_manager_->CreatePictureBuffers(
count, pixel_format, planes, texture_size, texture_target,
- vda_->SupportsSharedImagePictureBuffers());
+ vda_->GetSharedImageTextureAllocationMode());
if (picture_buffers.empty()) {
parent_task_runner_->PostTask(
FROM_HERE,
@@ -768,6 +775,10 @@ gpu::SharedImageStub* VdaVideoDecoder::GetSharedImageStub() const {
return command_buffer_helper_->GetSharedImageStub();
}
+CommandBufferHelper* VdaVideoDecoder::GetCommandBufferHelper() const {
+ return command_buffer_helper_.get();
+}
+
void VdaVideoDecoder::NotifyErrorOnParentThread(
VideoDecodeAccelerator::Error error) {
DVLOG(1) << __func__ << "(" << error << ")";
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.h b/chromium/media/gpu/ipc/service/vda_video_decoder.h
index 37746cac31e..c314c29144a 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder.h
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.h
@@ -77,6 +77,7 @@ class VdaVideoDecoder : public VideoDecoder,
static void DestroyAsync(std::unique_ptr<VdaVideoDecoder>);
// media::VideoDecoder implementation.
+ VideoDecoderType GetDecoderType() const override;
std::string GetDisplayName() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
@@ -129,6 +130,7 @@ class VdaVideoDecoder : public VideoDecoder,
void NotifyResetDone() override;
void NotifyError(VideoDecodeAccelerator::Error error) override;
gpu::SharedImageStub* GetSharedImageStub() const override;
+ CommandBufferHelper* GetCommandBufferHelper() const override;
// Tasks and thread hopping.
static void CleanupOnGpuThread(std::unique_ptr<VdaVideoDecoder>);
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
index 1d887b20494..cc50642650a 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
@@ -190,14 +190,14 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
RunUntilIdle();
}
- scoped_refptr<VideoFrame> PictureReady_NoRunUntilIdle(
- int32_t bitstream_buffer_id,
- int32_t picture_buffer_id,
- gfx::Rect visible_rect = gfx::Rect(1920, 1080)) {
- scoped_refptr<VideoFrame> frame;
+ void PictureReady_NoRunUntilIdle(scoped_refptr<VideoFrame>* out_frame,
+ int32_t bitstream_buffer_id,
+ int32_t picture_buffer_id,
+ gfx::Rect visible_rect = gfx::Rect(1920,
+ 1080)) {
Picture picture(picture_buffer_id, bitstream_buffer_id, visible_rect,
gfx::ColorSpace::CreateSRGB(), true);
- EXPECT_CALL(output_cb_, Run(_)).WillOnce(SaveArg<0>(&frame));
+ EXPECT_CALL(output_cb_, Run(_)).WillOnce(SaveArg<0>(out_frame));
if (GetParam()) {
// TODO(sandersd): The first time a picture is output, VDAs will do so on
// the GPU thread (because GpuVideoDecodeAccelerator required that). Test
@@ -209,15 +209,15 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
base::BindOnce(&VideoDecodeAccelerator::Client::PictureReady,
base::Unretained(client_), picture));
}
- return frame;
}
scoped_refptr<VideoFrame> PictureReady(
int32_t bitstream_buffer_id,
int32_t picture_buffer_id,
gfx::Rect visible_rect = gfx::Rect(1920, 1080)) {
- scoped_refptr<VideoFrame> frame = PictureReady_NoRunUntilIdle(
- bitstream_buffer_id, picture_buffer_id, visible_rect);
+ scoped_refptr<VideoFrame> frame;
+ PictureReady_NoRunUntilIdle(&frame, bitstream_buffer_id, picture_buffer_id,
+ visible_rect);
RunUntilIdle();
return frame;
}
@@ -411,8 +411,8 @@ TEST_P(VdaVideoDecoderTest, Decode_OutputAndDismiss) {
int32_t bitstream_id = Decode(base::TimeDelta());
NotifyEndOfBitstreamBuffer(bitstream_id);
int32_t picture_buffer_id = ProvidePictureBuffer();
- scoped_refptr<VideoFrame> frame =
- PictureReady_NoRunUntilIdle(bitstream_id, picture_buffer_id);
+ scoped_refptr<VideoFrame> frame;
+ PictureReady_NoRunUntilIdle(&frame, bitstream_id, picture_buffer_id);
DismissPictureBuffer(picture_buffer_id);
// Dropping the frame still requires a SyncPoint to wait on.
diff --git a/chromium/media/gpu/mac/BUILD.gn b/chromium/media/gpu/mac/BUILD.gn
index 54995e87ec4..b537122064d 100644
--- a/chromium/media/gpu/mac/BUILD.gn
+++ b/chromium/media/gpu/mac/BUILD.gn
@@ -6,19 +6,12 @@ import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//media/gpu/args.gni")
import("//media/media_options.gni")
-import("//tools/generate_stubs/rules.gni")
import("//ui/gl/features.gni")
assert(is_mac)
import("//build/config/mac/mac_sdk.gni")
-generate_stubs("vt_beta_stubs") {
- extra_header = "vt_beta_stubs_header.fragment"
- sigs = [ "vt_beta.sig" ]
- output_name = "vt_beta_stubs"
-}
-
source_set("mac") {
defines = [ "MEDIA_GPU_IMPLEMENTATION" ]
visibility = [ "//media/gpu" ]
@@ -43,7 +36,6 @@ source_set("mac") {
"VideoToolbox.framework",
]
deps = [
- ":vt_beta_stubs",
"//base",
"//components/crash/core/common:crash_key",
"//gpu/command_buffer/service:gles2",
diff --git a/chromium/media/gpu/mac/vt_beta.h b/chromium/media/gpu/mac/vt_beta.h
deleted file mode 100644
index b5a46a2a8a0..00000000000
--- a/chromium/media/gpu/mac/vt_beta.h
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_MAC_VT_BETA_H_
-#define MEDIA_GPU_MAC_VT_BETA_H_
-
-// Dynamic library loader.
-#include "media/gpu/mac/vt_beta_stubs.h"
-
-// CoreMedia and VideoToolbox types.
-#include "media/gpu/mac/vt_beta_stubs_header.fragment"
-
-// CoreMedia and VideoToolbox functions.
-extern "C" {
-#include "media/gpu/mac/vt_beta.sig"
-} // extern "C"
-
-#endif // MEDIA_GPU_MAC_VT_BETA_H_
diff --git a/chromium/media/gpu/mac/vt_beta.sig b/chromium/media/gpu/mac/vt_beta.sig
deleted file mode 100644
index aa18e939d1d..00000000000
--- a/chromium/media/gpu/mac/vt_beta.sig
+++ /dev/null
@@ -1,6 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This function isn't available until we're using the macOS 11.0 SDK.
-void VTRegisterSupplementalVideoDecoderIfAvailable(CMVideoCodecType codecType);
diff --git a/chromium/media/gpu/mac/vt_beta_stubs_header.fragment b/chromium/media/gpu/mac/vt_beta_stubs_header.fragment
deleted file mode 100644
index 87928abf42b..00000000000
--- a/chromium/media/gpu/mac/vt_beta_stubs_header.fragment
+++ /dev/null
@@ -1,5 +0,0 @@
-// Copyright 2020 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <CoreMedia/CoreMedia.h>
diff --git a/chromium/media/gpu/mac/vt_config_util.h b/chromium/media/gpu/mac/vt_config_util.h
index ac3d6d0bd2d..d3b4a5e3e31 100644
--- a/chromium/media/gpu/mac/vt_config_util.h
+++ b/chromium/media/gpu/mac/vt_config_util.h
@@ -12,7 +12,6 @@
#include "media/base/video_codecs.h"
#include "media/base/video_color_space.h"
#include "media/gpu/media_gpu_export.h"
-#include "media/video/video_decode_accelerator.h"
#include "ui/gfx/hdr_metadata.h"
namespace media {
diff --git a/chromium/media/gpu/mac/vt_config_util.mm b/chromium/media/gpu/mac/vt_config_util.mm
index 4a0da1a0d6b..09d03a517ee 100644
--- a/chromium/media/gpu/mac/vt_config_util.mm
+++ b/chromium/media/gpu/mac/vt_config_util.mm
@@ -6,9 +6,8 @@
#import <Foundation/Foundation.h>
-#include <simd/simd.h>
-
#include "base/mac/foundation_util.h"
+#include "media/base/mac/color_space_util_mac.h"
namespace {
@@ -43,11 +42,7 @@ CFStringRef GetPrimaries(media::VideoColorSpace::PrimaryID primary_id) {
return kCMFormatDescriptionColorPrimaries_ITU_R_709_2;
case media::VideoColorSpace::PrimaryID::BT2020:
- if (@available(macos 10.11, *))
- return kCMFormatDescriptionColorPrimaries_ITU_R_2020;
- DLOG(WARNING) << "kCMFormatDescriptionColorPrimaries_ITU_R_2020 "
- "unsupported prior to 10.11";
- return nil;
+ return kCMFormatDescriptionColorPrimaries_ITU_R_2020;
case media::VideoColorSpace::PrimaryID::SMPTE170M:
case media::VideoColorSpace::PrimaryID::SMPTE240M:
@@ -57,18 +52,10 @@ CFStringRef GetPrimaries(media::VideoColorSpace::PrimaryID primary_id) {
return kCMFormatDescriptionColorPrimaries_EBU_3213;
case media::VideoColorSpace::PrimaryID::SMPTEST431_2:
- if (@available(macos 10.11, *))
- return kCMFormatDescriptionColorPrimaries_DCI_P3;
- DLOG(WARNING) << "kCMFormatDescriptionColorPrimaries_DCI_P3 unsupported "
- "prior to 10.11";
- return nil;
+ return kCMFormatDescriptionColorPrimaries_DCI_P3;
case media::VideoColorSpace::PrimaryID::SMPTEST432_1:
- if (@available(macos 10.11, *))
- return kCMFormatDescriptionColorPrimaries_P3_D65;
- DLOG(WARNING) << "kCMFormatDescriptionColorPrimaries_P3_D65 unsupported "
- "prior to 10.11";
- return nil;
+ return kCMFormatDescriptionColorPrimaries_P3_D65;
default:
DLOG(ERROR) << "Unsupported primary id: " << static_cast<int>(primary_id);
@@ -105,11 +92,7 @@ CFStringRef GetTransferFunction(
case media::VideoColorSpace::TransferID::BT2020_10:
case media::VideoColorSpace::TransferID::BT2020_12:
- if (@available(macos 10.11, *))
- return kCMFormatDescriptionTransferFunction_ITU_R_2020;
- DLOG(WARNING) << "kCMFormatDescriptionTransferFunction_ITU_R_2020 "
- "unsupported prior to 10.11";
- return nil;
+ return kCMFormatDescriptionTransferFunction_ITU_R_2020;
case media::VideoColorSpace::TransferID::SMPTEST2084:
if (@available(macos 10.13, *))
@@ -150,11 +133,7 @@ CFStringRef GetMatrix(media::VideoColorSpace::MatrixID matrix_id) {
return kCMFormatDescriptionYCbCrMatrix_ITU_R_709_2;
case media::VideoColorSpace::MatrixID::BT2020_NCL:
- if (@available(macos 10.11, *))
- return kCMFormatDescriptionYCbCrMatrix_ITU_R_2020;
- DLOG(WARNING) << "kCVImageBufferYCbCrMatrix_ITU_R_2020 "
- "unsupported prior to 10.11";
- return nil;
+ return kCMFormatDescriptionYCbCrMatrix_ITU_R_2020;
case media::VideoColorSpace::MatrixID::FCC:
case media::VideoColorSpace::MatrixID::SMPTE170M:
@@ -175,24 +154,10 @@ CFStringRef GetMatrix(media::VideoColorSpace::MatrixID matrix_id) {
void SetContentLightLevelInfo(const gfx::HDRMetadata& hdr_metadata,
NSMutableDictionary<NSString*, id>* extensions) {
if (@available(macos 10.13, *)) {
- // This is a SMPTEST2086 Content Light Level Information box.
- struct ContentLightLevelInfoSEI {
- uint16_t max_content_light_level;
- uint16_t max_frame_average_light_level;
- } __attribute__((packed, aligned(2)));
- static_assert(sizeof(ContentLightLevelInfoSEI) == 4, "Must be 4 bytes");
-
- // Values are stored in big-endian...
- ContentLightLevelInfoSEI sei;
- sei.max_content_light_level =
- __builtin_bswap16(hdr_metadata.max_content_light_level);
- sei.max_frame_average_light_level =
- __builtin_bswap16(hdr_metadata.max_frame_average_light_level);
-
- NSData* nsdata_sei = [NSData dataWithBytes:&sei length:4];
SetDictionaryValue(extensions,
kCMFormatDescriptionExtension_ContentLightLevelInfo,
- nsdata_sei);
+ base::mac::CFToNSCast(
+ media::GenerateContentLightLevelInfo(hdr_metadata)));
} else {
DLOG(WARNING) << "kCMFormatDescriptionExtension_ContentLightLevelInfo "
"unsupported prior to 10.13";
@@ -202,46 +167,10 @@ void SetContentLightLevelInfo(const gfx::HDRMetadata& hdr_metadata,
void SetMasteringMetadata(const gfx::HDRMetadata& hdr_metadata,
NSMutableDictionary<NSString*, id>* extensions) {
if (@available(macos 10.13, *)) {
- // This is a SMPTEST2086 Mastering Display Color Volume box.
- struct MasteringDisplayColorVolumeSEI {
- vector_ushort2 primaries[3]; // GBR
- vector_ushort2 white_point;
- uint32_t luminance_max;
- uint32_t luminance_min;
- } __attribute__((packed, aligned(4)));
- static_assert(sizeof(MasteringDisplayColorVolumeSEI) == 24,
- "Must be 24 bytes");
-
- // Make a copy which we can manipulate.
- auto md = hdr_metadata.mastering_metadata;
-
- constexpr float kColorCoordinateUpperBound = 50000.0f;
- md.primary_r.Scale(kColorCoordinateUpperBound);
- md.primary_g.Scale(kColorCoordinateUpperBound);
- md.primary_b.Scale(kColorCoordinateUpperBound);
- md.white_point.Scale(kColorCoordinateUpperBound);
-
- constexpr float kUnitOfMasteringLuminance = 10000.0f;
- md.luminance_max *= kUnitOfMasteringLuminance;
- md.luminance_min *= kUnitOfMasteringLuminance;
-
- // Values are stored in big-endian...
- MasteringDisplayColorVolumeSEI sei;
- sei.primaries[0].x = __builtin_bswap16(md.primary_g.x() + 0.5f);
- sei.primaries[0].y = __builtin_bswap16(md.primary_g.y() + 0.5f);
- sei.primaries[1].x = __builtin_bswap16(md.primary_b.x() + 0.5f);
- sei.primaries[1].y = __builtin_bswap16(md.primary_b.y() + 0.5f);
- sei.primaries[2].x = __builtin_bswap16(md.primary_r.x() + 0.5f);
- sei.primaries[2].y = __builtin_bswap16(md.primary_r.y() + 0.5f);
- sei.white_point.x = __builtin_bswap16(md.white_point.x() + 0.5f);
- sei.white_point.y = __builtin_bswap16(md.white_point.y() + 0.5f);
- sei.luminance_max = __builtin_bswap32(md.luminance_max + 0.5f);
- sei.luminance_min = __builtin_bswap32(md.luminance_min + 0.5f);
-
- NSData* nsdata_sei = [NSData dataWithBytes:&sei length:24];
SetDictionaryValue(
extensions, kCMFormatDescriptionExtension_MasteringDisplayColorVolume,
- nsdata_sei);
+ base::mac::CFToNSCast(
+ media::GenerateMasteringDisplayColorVolume(hdr_metadata)));
} else {
DLOG(WARNING) << "kCMFormatDescriptionExtension_"
"MasteringDisplayColorVolume unsupported prior to 10.13";
diff --git a/chromium/media/gpu/mac/vt_config_util_unittest.cc b/chromium/media/gpu/mac/vt_config_util_unittest.cc
index 6144e692b52..3c264e1aec8 100644
--- a/chromium/media/gpu/mac/vt_config_util_unittest.cc
+++ b/chromium/media/gpu/mac/vt_config_util_unittest.cc
@@ -363,12 +363,10 @@ TEST(VTConfigUtil, GetImageBufferColorSpace_BT2020_PQ) {
// When BT.2020 is unavailable the default should be BT.709.
if (base::mac::IsAtLeastOS10_13()) {
EXPECT_EQ(cs.ToGfxColorSpace(), image_buffer_cs);
- } else if (base::mac::IsAtLeastOS10_11()) {
- // 10.11 and 10.12 don't have HDR transfer functions.
+ } else {
+ // 10.12 doesn't have HDR transfer functions.
cs.transfer = VideoColorSpace::TransferID::BT709;
EXPECT_EQ(cs.ToGfxColorSpace(), image_buffer_cs);
- } else {
- EXPECT_EQ(gfx::ColorSpace::CreateREC709(), image_buffer_cs);
}
}
@@ -384,35 +382,29 @@ TEST(VTConfigUtil, GetImageBufferColorSpace_BT2020_HLG) {
// When BT.2020 is unavailable the default should be BT.709.
if (base::mac::IsAtLeastOS10_13()) {
EXPECT_EQ(cs.ToGfxColorSpace(), image_buffer_cs);
- } else if (base::mac::IsAtLeastOS10_11()) {
- // 10.11 and 10.12 don't have HDR transfer functions.
+ } else {
+ // 10.12 doesn't have HDR transfer functions.
cs.transfer = VideoColorSpace::TransferID::BT709;
EXPECT_EQ(cs.ToGfxColorSpace(), image_buffer_cs);
- } else {
- EXPECT_EQ(gfx::ColorSpace::CreateREC709(), image_buffer_cs);
}
}
TEST(VTConfigUtil, FormatDescriptionInvalid) {
- if (__builtin_available(macos 10.11, *)) {
- auto format_descriptor =
- CreateFormatDescription(CFSTR("Cows"), CFSTR("Go"), CFSTR("Moo"));
- ASSERT_TRUE(format_descriptor);
- auto cs = GetFormatDescriptionColorSpace(format_descriptor);
- EXPECT_EQ(gfx::ColorSpace::CreateREC709(), cs);
- }
+ auto format_descriptor =
+ CreateFormatDescription(CFSTR("Cows"), CFSTR("Go"), CFSTR("Moo"));
+ ASSERT_TRUE(format_descriptor);
+ auto cs = GetFormatDescriptionColorSpace(format_descriptor);
+ EXPECT_EQ(gfx::ColorSpace::CreateREC709(), cs);
}
TEST(VTConfigUtil, FormatDescriptionBT709) {
- if (__builtin_available(macos 10.11, *)) {
- auto format_descriptor = CreateFormatDescription(
- kCMFormatDescriptionColorPrimaries_ITU_R_709_2,
- kCMFormatDescriptionTransferFunction_ITU_R_709_2,
- kCMFormatDescriptionYCbCrMatrix_ITU_R_709_2);
- ASSERT_TRUE(format_descriptor);
- auto cs = GetFormatDescriptionColorSpace(format_descriptor);
- EXPECT_EQ(ToBT709_APPLE(gfx::ColorSpace::CreateREC709()), cs);
- }
+ auto format_descriptor =
+ CreateFormatDescription(kCMFormatDescriptionColorPrimaries_ITU_R_709_2,
+ kCMFormatDescriptionTransferFunction_ITU_R_709_2,
+ kCMFormatDescriptionYCbCrMatrix_ITU_R_709_2);
+ ASSERT_TRUE(format_descriptor);
+ auto cs = GetFormatDescriptionColorSpace(format_descriptor);
+ EXPECT_EQ(ToBT709_APPLE(gfx::ColorSpace::CreateREC709()), cs);
}
} // namespace media
diff --git a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
index 09e92feccf6..2340c462b28 100644
--- a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
+++ b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
@@ -46,7 +46,6 @@
#include "media/base/media_switches.h"
#include "media/filters/vp9_parser.h"
#include "media/gpu/mac/vp9_super_frame_bitstream_filter.h"
-#include "media/gpu/mac/vt_beta_stubs.h"
#include "media/gpu/mac/vt_config_util.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gl/gl_context.h"
@@ -318,20 +317,8 @@ bool InitializeVideoToolboxInternal() {
session.reset();
- if (base::mac::IsAtLeastOS11()) {
- // Until our target sdk version is 11.0 we need to dynamically link the
- // VTRegisterSupplementalVideoDecoderIfAvailable() symbol in.
- media_gpu_mac::StubPathMap paths;
- paths[media_gpu_mac::kModuleVt_beta].push_back(FILE_PATH_LITERAL(
- "/System/Library/Frameworks/VideoToolbox.framework/VideoToolbox"));
- if (!media_gpu_mac::InitializeStubs(paths))
- return true; // VP9 support is optional.
-
-// __builtin_available doesn't work for 11.0 yet; https://crbug.com/1115294
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wunguarded-availability-new"
+ if (__builtin_available(macOS 11.0, *)) {
VTRegisterSupplementalVideoDecoderIfAvailable(kCMVideoCodecType_VP9);
-#pragma clang diagnostic pop
// Create a VP9 decoding session.
if (!CreateVideoToolboxSession(
@@ -881,6 +868,21 @@ void VTVideoDecodeAccelerator::DecodeTask(scoped_refptr<DecoderBuffer> buffer,
break;
}
+ case H264NALU::kSEIMessage: {
+ H264SEIMessage sei_msg;
+ result = parser_.ParseSEI(&sei_msg);
+ if (result == H264Parser::kOk &&
+ sei_msg.type == H264SEIMessage::kSEIRecoveryPoint &&
+ sei_msg.recovery_point.recovery_frame_cnt == 0) {
+ // We only support immediate recovery points. Supporting future points
+ // would require dropping |recovery_frame_cnt| frames when needed.
+ frame->has_recovery_point = true;
+ }
+ nalus.push_back(nalu);
+ data_size += kNALUHeaderLength + nalu.size;
+ break;
+ }
+
case H264NALU::kSliceDataA:
case H264NALU::kSliceDataB:
case H264NALU::kSliceDataC:
@@ -956,7 +958,7 @@ void VTVideoDecodeAccelerator::DecodeTask(scoped_refptr<DecoderBuffer> buffer,
}
}
- if (frame->is_idr)
+ if (frame->is_idr || frame->has_recovery_point)
waiting_for_idr_ = false;
// If no IDR has been seen yet, skip decoding. Note that Flash sends
@@ -1524,8 +1526,10 @@ bool VTVideoDecodeAccelerator::SendFrame(const Frame& frame) {
SFT_PLATFORM_ERROR);
}
gl_image->DisableInUseByWindowServer();
+
gfx::ColorSpace color_space = GetImageBufferColorSpace(frame.image);
gl_image->SetColorSpaceForYUVToRGBConversion(color_space);
+ gl_image->SetColorSpaceShallow(color_space);
scoped_refptr<Picture::ScopedSharedImage> scoped_shared_image;
if (picture_info->uses_shared_images) {
@@ -1706,6 +1710,12 @@ bool VTVideoDecodeAccelerator::SupportsSharedImagePictureBuffers() const {
return true;
}
+VideoDecodeAccelerator::TextureAllocationMode
+VTVideoDecodeAccelerator::GetSharedImageTextureAllocationMode() const {
+ return VideoDecodeAccelerator::TextureAllocationMode::
+ kDoNotAllocateGLTextures;
+}
+
// static
VideoDecodeAccelerator::SupportedProfiles
VTVideoDecodeAccelerator::GetSupportedProfiles(
@@ -1721,8 +1731,6 @@ VTVideoDecodeAccelerator::GetSupportedProfiles(
continue;
if (!base::mac::IsAtLeastOS11())
continue;
- if (!base::FeatureList::IsEnabled(kVideoToolboxVp9Decoding))
- continue;
if (__builtin_available(macOS 10.13, *)) {
if ((supported_profile == VP9PROFILE_PROFILE0 ||
supported_profile == VP9PROFILE_PROFILE2) &&
diff --git a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
index 9a9c95f7ae6..e196dc0a2b6 100644
--- a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
+++ b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
@@ -66,6 +66,7 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner)
override;
bool SupportsSharedImagePictureBuffers() const override;
+ TextureAllocationMode GetSharedImageTextureAllocationMode() const override;
// MemoryDumpProvider implementation.
bool OnMemoryDump(const base::trace_event::MemoryDumpArgs& args,
@@ -116,6 +117,7 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
// Slice header information.
bool has_slice = false;
bool is_idr = false;
+ bool has_recovery_point = false;
bool has_mmco5 = false;
int32_t pic_order_cnt = 0;
int32_t reorder_window = 0;
diff --git a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
index 40301bf2c28..5b7a020573d 100644
--- a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
+++ b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
@@ -287,7 +287,7 @@ void VTVideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
force_keyframe ? kCFBooleanTrue : kCFBooleanFalse);
base::TimeTicks ref_time =
- frame->metadata()->reference_time.value_or(base::TimeTicks::Now());
+ frame->metadata().reference_time.value_or(base::TimeTicks::Now());
auto timestamp_cm =
CMTimeMake(frame->timestamp().InMicroseconds(), USEC_PER_SEC);
// Wrap information we'll need after the frame is encoded in a heap object.
diff --git a/chromium/media/gpu/test/BUILD.gn b/chromium/media/gpu/test/BUILD.gn
index 12a8e2a9d39..e3856feccd9 100644
--- a/chromium/media/gpu/test/BUILD.gn
+++ b/chromium/media/gpu/test/BUILD.gn
@@ -196,7 +196,11 @@ static_library("video_encoder_test_environment") {
":helpers",
":video_test_environment",
]
- deps = [ "//media/gpu" ]
+ deps = [
+ "//base:base",
+ "//build:chromeos_buildflags",
+ "//media/gpu",
+ ]
}
if (use_vaapi || use_v4l2_codec) {
@@ -219,7 +223,7 @@ if (use_vaapi || use_v4l2_codec) {
}
}
-if (is_ash) {
+if (is_chromeos_ash) {
static_library("local_gpu_memory_buffer_manager") {
testonly = true
sources = [
diff --git a/chromium/media/gpu/v4l2/BUILD.gn b/chromium/media/gpu/v4l2/BUILD.gn
index 7b5b8bfb21c..7f8bbcd7e5d 100644
--- a/chromium/media/gpu/v4l2/BUILD.gn
+++ b/chromium/media/gpu/v4l2/BUILD.gn
@@ -35,8 +35,8 @@ source_set("v4l2") {
"v4l2_device.h",
"v4l2_device_poller.cc",
"v4l2_device_poller.h",
- "v4l2_h264_accelerator.cc",
- "v4l2_h264_accelerator.h",
+ "v4l2_h264_accelerator_chromium.cc",
+ "v4l2_h264_accelerator_chromium.h",
"v4l2_h264_accelerator_legacy.cc",
"v4l2_h264_accelerator_legacy.h",
"v4l2_image_processor_backend.cc",
@@ -109,7 +109,7 @@ source_set("v4l2") {
deps += [ ":libv4l2_stubs" ]
}
- if (is_ash) {
+ if (is_chromeos_ash) {
sources += [
"v4l2_jpeg_encode_accelerator.cc",
"v4l2_jpeg_encode_accelerator.h",
diff --git a/chromium/media/gpu/v4l2/v4l2_device.cc b/chromium/media/gpu/v4l2/v4l2_device.cc
index bd2a23c8775..6b4fe768342 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.cc
+++ b/chromium/media/gpu/v4l2/v4l2_device.cc
@@ -958,8 +958,7 @@ base::Optional<struct v4l2_format> V4L2Queue::SetFormat(uint32_t fourcc,
struct v4l2_format format = BuildV4L2Format(type_, fourcc, size, buffer_size);
if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0 ||
format.fmt.pix_mp.pixelformat != fourcc) {
- VPQLOGF(2) << "Failed to set format (format_fourcc=0x" << std::hex << fourcc
- << ")";
+ VPQLOGF(2) << "Failed to set format fourcc: " << FourccToString(fourcc);
return base::nullopt;
}
@@ -973,8 +972,7 @@ base::Optional<struct v4l2_format> V4L2Queue::TryFormat(uint32_t fourcc,
struct v4l2_format format = BuildV4L2Format(type_, fourcc, size, buffer_size);
if (device_->Ioctl(VIDIOC_TRY_FMT, &format) != 0 ||
format.fmt.pix_mp.pixelformat != fourcc) {
- VPQLOGF(2) << "Tried format not supported (format_fourcc=0x" << std::hex
- << fourcc << ")";
+ VPQLOGF(2) << "Failed to try format fourcc: " << FourccToString(fourcc);
return base::nullopt;
}
@@ -1486,12 +1484,15 @@ uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
}
}
-// static
-VideoCodecProfile V4L2Device::V4L2ProfileToVideoCodecProfile(VideoCodec codec,
- uint32_t profile) {
+namespace {
+
+VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
+ uint32_t v4l2_profile) {
switch (codec) {
case kCodecH264:
- switch (profile) {
+ switch (v4l2_profile) {
+ // H264 Stereo amd Multiview High are not tested and the use is
+ // minuscule, skip.
case V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE:
case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE:
return H264PROFILE_BASELINE;
@@ -1501,14 +1502,10 @@ VideoCodecProfile V4L2Device::V4L2ProfileToVideoCodecProfile(VideoCodec codec,
return H264PROFILE_EXTENDED;
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH:
return H264PROFILE_HIGH;
- case V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH:
- return H264PROFILE_STEREOHIGH;
- case V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH:
- return H264PROFILE_MULTIVIEWHIGH;
}
break;
case kCodecVP8:
- switch (profile) {
+ switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_VP8_PROFILE_0:
case V4L2_MPEG_VIDEO_VP8_PROFILE_1:
case V4L2_MPEG_VIDEO_VP8_PROFILE_2:
@@ -1517,27 +1514,25 @@ VideoCodecProfile V4L2Device::V4L2ProfileToVideoCodecProfile(VideoCodec codec,
}
break;
case kCodecVP9:
- switch (profile) {
+ switch (v4l2_profile) {
+ // VP9 Profile 1 and 3 are not tested and the use is minuscule, skip.
case V4L2_MPEG_VIDEO_VP9_PROFILE_0:
return VP9PROFILE_PROFILE0;
- case V4L2_MPEG_VIDEO_VP9_PROFILE_1:
- return VP9PROFILE_PROFILE1;
case V4L2_MPEG_VIDEO_VP9_PROFILE_2:
return VP9PROFILE_PROFILE2;
- case V4L2_MPEG_VIDEO_VP9_PROFILE_3:
- return VP9PROFILE_PROFILE3;
}
break;
default:
- VLOGF(2) << "Unknown codec: " << codec;
+ VLOGF(2) << "Unsupported codec: " << GetCodecName(codec);
}
- VLOGF(2) << "Unknown profile: " << profile;
+ VLOGF(2) << "Unsupported V4L2 profile: " << v4l2_profile;
return VIDEO_CODEC_PROFILE_UNKNOWN;
}
+} // namespace
+
std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
- uint32_t pix_fmt,
- bool is_encoder) {
+ uint32_t pix_fmt) {
auto get_supported_profiles = [this](
VideoCodec codec,
std::vector<VideoCodecProfile>* profiles) {
@@ -1559,9 +1554,9 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
v4l2_queryctrl query_ctrl;
memset(&query_ctrl, 0, sizeof(query_ctrl));
query_ctrl.id = query_id;
- if (Ioctl(VIDIOC_QUERYCTRL, &query_ctrl) != 0) {
+ if (Ioctl(VIDIOC_QUERYCTRL, &query_ctrl) != 0)
return false;
- }
+
v4l2_querymenu query_menu;
memset(&query_menu, 0, sizeof(query_menu));
query_menu.id = query_ctrl.id;
@@ -1570,7 +1565,7 @@ std::vector<VideoCodecProfile> V4L2Device::V4L2PixFmtToVideoCodecProfiles(
query_menu.index++) {
if (Ioctl(VIDIOC_QUERYMENU, &query_menu) == 0) {
const VideoCodecProfile profile =
- V4L2Device::V4L2ProfileToVideoCodecProfile(codec, query_menu.index);
+ V4L2ProfileToVideoCodecProfile(codec, query_menu.index);
if (profile != VIDEO_CODEC_PROFILE_UNKNOWN)
profiles->push_back(profile);
}
@@ -2088,7 +2083,7 @@ V4L2Device::EnumerateSupportedDecodeProfiles(const size_t num_formats,
&profile.max_resolution);
const auto video_codec_profiles =
- V4L2PixFmtToVideoCodecProfiles(pixelformat, false);
+ V4L2PixFmtToVideoCodecProfiles(pixelformat);
for (const auto& video_codec_profile : video_codec_profiles) {
profile.profile = video_codec_profile;
@@ -2119,7 +2114,7 @@ V4L2Device::EnumerateSupportedEncodeProfiles() {
&profile.max_resolution);
const auto video_codec_profiles =
- V4L2PixFmtToVideoCodecProfiles(pixelformat, true);
+ V4L2PixFmtToVideoCodecProfiles(pixelformat);
for (const auto& video_codec_profile : video_codec_profiles) {
profile.profile = video_codec_profile;
@@ -2172,7 +2167,9 @@ base::Optional<struct v4l2_event> V4L2Device::DequeueEvent() {
memset(&event, 0, sizeof(event));
if (Ioctl(VIDIOC_DQEVENT, &event) != 0) {
- VPLOGF(3) << "Failed to dequeue event";
+ // The ioctl will fail if there are no pending events. This is part of the
+ // normal flow, so keep this log level low.
+ VPLOGF(4) << "Failed to dequeue event";
return base::nullopt;
}
@@ -2227,7 +2224,19 @@ bool V4L2Device::SetExtCtrls(uint32_t ctrl_class,
if (request_ref)
request_ref->ApplyCtrls(&ext_ctrls);
- return Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) == 0;
+ const int result = Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls);
+ if (result < 0) {
+ if (ext_ctrls.error_idx == ext_ctrls.count)
+ VPLOGF(1) << "VIDIOC_S_EXT_CTRLS: validation failed while trying to set "
+ "controls";
+ else
+ VPLOGF(1) << "VIDIOC_S_EXT_CTRLS: unable to set control (0x" << std::hex
+ << ctrls[ext_ctrls.error_idx].ctrl.id << ") at index ("
+ << ext_ctrls.error_idx << ") to 0x"
+ << ctrls[ext_ctrls.error_idx].ctrl.value;
+ }
+
+ return result == 0;
}
base::Optional<struct v4l2_ext_control> V4L2Device::GetCtrl(uint32_t ctrl_id) {
@@ -2249,6 +2258,31 @@ base::Optional<struct v4l2_ext_control> V4L2Device::GetCtrl(uint32_t ctrl_id) {
return ctrl;
}
+bool V4L2Device::SetGOPLength(uint32_t gop_length) {
+ if (!SetExtCtrls(V4L2_CTRL_CLASS_MPEG,
+ {V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_GOP_SIZE, gop_length)})) {
+ // Some platforms allow setting the GOP length to 0 as
+ // a way of turning off keyframe placement. If the platform
+ // does not support turning off periodic keyframe placement,
+ // set the GOP to the maximum supported value.
+ if (gop_length == 0) {
+ v4l2_query_ext_ctrl queryctrl;
+ memset(&queryctrl, 0, sizeof(queryctrl));
+
+ queryctrl.id = V4L2_CTRL_CLASS_MPEG | V4L2_CID_MPEG_VIDEO_GOP_SIZE;
+ if (Ioctl(VIDIOC_QUERY_EXT_CTRL, &queryctrl) == 0) {
+ VPLOGF(3) << "Unable to set GOP to 0, instead using max : "
+ << queryctrl.maximum;
+ return SetExtCtrls(
+ V4L2_CTRL_CLASS_MPEG,
+ {V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_GOP_SIZE, queryctrl.maximum)});
+ }
+ }
+ return false;
+ }
+ return true;
+}
+
class V4L2Request {
public:
// Apply the passed controls to the request.
diff --git a/chromium/media/gpu/v4l2/v4l2_device.h b/chromium/media/gpu/v4l2/v4l2_device.h
index 849d1cd75a0..a27c3a2f1c5 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.h
+++ b/chromium/media/gpu/v4l2/v4l2_device.h
@@ -599,11 +599,8 @@ class MEDIA_GPU_EXPORT V4L2Device
// If there is no corresponding single- or multi-planar format, returns 0.
static uint32_t VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
bool slice_based);
- static VideoCodecProfile V4L2ProfileToVideoCodecProfile(VideoCodec codec,
- uint32_t profile);
std::vector<VideoCodecProfile> V4L2PixFmtToVideoCodecProfiles(
- uint32_t pix_fmt,
- bool is_encoder);
+ uint32_t pix_fmt);
static uint32_t V4L2PixFmtToDrmFormat(uint32_t format);
// Calculates the largest plane's allocation size requested by a V4L2 device.
static gfx::Size AllocatedSizeFromV4L2Format(
@@ -793,6 +790,9 @@ class MEDIA_GPU_EXPORT V4L2Device
// exposed by the device.
base::Optional<struct v4l2_ext_control> GetCtrl(uint32_t ctrl_id);
+ // Set periodic keyframe placement (group of pictures length)
+ bool SetGOPLength(uint32_t gop_length);
+
protected:
friend class base::RefCountedThreadSafe<V4L2Device>;
V4L2Device();
diff --git a/chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_chromium.cc
index 5734f8bebd5..672cc7fb32a 100644
--- a/chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_chromium.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/gpu/v4l2/v4l2_h264_accelerator.h"
+#include "media/gpu/v4l2/v4l2_h264_accelerator_chromium.h"
// TODO(987856): prevent legacy headers being included from videodev2.h until
// v4.14
@@ -49,7 +49,7 @@ class V4L2H264Picture : public H264Picture {
DISALLOW_COPY_AND_ASSIGN(V4L2H264Picture);
};
-V4L2H264Accelerator::V4L2H264Accelerator(
+V4L2ChromiumH264Accelerator::V4L2ChromiumH264Accelerator(
V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device)
: num_slices_(0),
@@ -59,9 +59,9 @@ V4L2H264Accelerator::V4L2H264Accelerator(
DCHECK(surface_handler_);
}
-V4L2H264Accelerator::~V4L2H264Accelerator() {}
+V4L2ChromiumH264Accelerator::~V4L2ChromiumH264Accelerator() {}
-scoped_refptr<H264Picture> V4L2H264Accelerator::CreateH264Picture() {
+scoped_refptr<H264Picture> V4L2ChromiumH264Accelerator::CreateH264Picture() {
scoped_refptr<V4L2DecodeSurface> dec_surface =
surface_handler_->CreateSurface();
if (!dec_surface)
@@ -70,7 +70,7 @@ scoped_refptr<H264Picture> V4L2H264Accelerator::CreateH264Picture() {
return new V4L2H264Picture(dec_surface);
}
-void V4L2H264Accelerator::H264PictureListToDPBIndicesList(
+void V4L2ChromiumH264Accelerator::H264PictureListToDPBIndicesList(
const H264Picture::Vector& src_pic_list,
uint8_t dst_list[kDPBIndicesListSize]) {
size_t i;
@@ -83,7 +83,7 @@ void V4L2H264Accelerator::H264PictureListToDPBIndicesList(
dst_list[i++] = VIDEO_MAX_FRAME;
}
-void V4L2H264Accelerator::H264DPBToV4L2DPB(
+void V4L2ChromiumH264Accelerator::H264DPBToV4L2DPB(
const H264DPB& dpb,
std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces) {
memset(priv_->v4l2_decode_param.dpb, 0, sizeof(priv_->v4l2_decode_param.dpb));
@@ -113,7 +113,7 @@ void V4L2H264Accelerator::H264DPBToV4L2DPB(
}
}
-H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitFrameMetadata(
+H264Decoder::H264Accelerator::Status V4L2ChromiumH264Accelerator::SubmitFrameMetadata(
const H264SPS* sps,
const H264PPS* pps,
const H264DPB& dpb,
@@ -298,7 +298,7 @@ H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitFrameMetadata(
return Status::kOk;
}
-H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitSlice(
+H264Decoder::H264Accelerator::Status V4L2ChromiumH264Accelerator::SubmitSlice(
const H264PPS* pps,
const H264SliceHeader* slice_hdr,
const H264Picture::Vector& ref_pic_list0,
@@ -423,7 +423,7 @@ H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitSlice(
: Status::kFail;
}
-H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitDecode(
+H264Decoder::H264Accelerator::Status V4L2ChromiumH264Accelerator::SubmitDecode(
scoped_refptr<H264Picture> pic) {
scoped_refptr<V4L2DecodeSurface> dec_surface =
H264PictureToV4L2DecodeSurface(pic.get());
@@ -467,7 +467,7 @@ H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitDecode(
return Status::kOk;
}
-bool V4L2H264Accelerator::OutputPicture(scoped_refptr<H264Picture> pic) {
+bool V4L2ChromiumH264Accelerator::OutputPicture(scoped_refptr<H264Picture> pic) {
// TODO(crbug.com/647725): Insert correct color space.
surface_handler_->SurfaceReady(H264PictureToV4L2DecodeSurface(pic.get()),
pic->bitstream_id(), pic->visible_rect(),
@@ -475,14 +475,14 @@ bool V4L2H264Accelerator::OutputPicture(scoped_refptr<H264Picture> pic) {
return true;
}
-void V4L2H264Accelerator::Reset() {
+void V4L2ChromiumH264Accelerator::Reset() {
num_slices_ = 0;
memset(&priv_->v4l2_decode_param, 0, sizeof(priv_->v4l2_decode_param));
memset(&priv_->v4l2_slice_params, 0, sizeof(priv_->v4l2_slice_params));
}
scoped_refptr<V4L2DecodeSurface>
-V4L2H264Accelerator::H264PictureToV4L2DecodeSurface(H264Picture* pic) {
+V4L2ChromiumH264Accelerator::H264PictureToV4L2DecodeSurface(H264Picture* pic) {
V4L2H264Picture* v4l2_pic = pic->AsV4L2H264Picture();
CHECK(v4l2_pic);
return v4l2_pic->dec_surface();
diff --git a/chromium/media/gpu/v4l2/v4l2_h264_accelerator.h b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_chromium.h
index 74e54674730..0e0171f9766 100644
--- a/chromium/media/gpu/v4l2/v4l2_h264_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_chromium.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_H_
-#define MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_H_
+#ifndef MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_CHROMIUM_H_
+#define MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_CHROMIUM_H_
#include <memory>
#include <vector>
@@ -20,13 +20,14 @@ class V4L2DecodeSurface;
class V4L2DecodeSurfaceHandler;
struct V4L2H264AcceleratorPrivate;
-class V4L2H264Accelerator : public H264Decoder::H264Accelerator {
+// H.264 accelerator supported the old Chromium-only ABI with the kernel.
+class V4L2ChromiumH264Accelerator : public H264Decoder::H264Accelerator {
public:
using Status = H264Decoder::H264Accelerator::Status;
- explicit V4L2H264Accelerator(V4L2DecodeSurfaceHandler* surface_handler,
+ explicit V4L2ChromiumH264Accelerator(V4L2DecodeSurfaceHandler* surface_handler,
V4L2Device* device);
- ~V4L2H264Accelerator() override;
+ ~V4L2ChromiumH264Accelerator() override;
// H264Decoder::H264Accelerator implementation.
scoped_refptr<H264Picture> CreateH264Picture() override;
@@ -69,9 +70,9 @@ class V4L2H264Accelerator : public H264Decoder::H264Accelerator {
// outside of the compilation unit.
const std::unique_ptr<V4L2H264AcceleratorPrivate> priv_;
- DISALLOW_COPY_AND_ASSIGN(V4L2H264Accelerator);
+ DISALLOW_COPY_AND_ASSIGN(V4L2ChromiumH264Accelerator);
};
} // namespace media
-#endif // MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_H_
+#endif // MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_CHROMIUM_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
index 1df3dff74fc..3662fe0c222 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
@@ -40,7 +40,7 @@
#include "media/gpu/chromeos/platform_video_frame_utils.h"
#include "media/gpu/macros.h"
#include "media/gpu/v4l2/v4l2_decode_surface.h"
-#include "media/gpu/v4l2/v4l2_h264_accelerator.h"
+#include "media/gpu/v4l2/v4l2_h264_accelerator_chromium.h"
#include "media/gpu/v4l2/v4l2_h264_accelerator_legacy.h"
#include "media/gpu/v4l2/v4l2_image_processor_backend.h"
#include "media/gpu/v4l2/v4l2_vda_helpers.h"
@@ -295,7 +295,7 @@ bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config,
if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
if (supports_requests_) {
decoder_ = std::make_unique<H264Decoder>(
- std::make_unique<V4L2H264Accelerator>(this, device_.get()),
+ std::make_unique<V4L2ChromiumH264Accelerator>(this, device_.get()),
video_profile_);
} else {
decoder_ = std::make_unique<H264Decoder>(
@@ -1075,6 +1075,12 @@ void V4L2SliceVideoDecodeAccelerator::DecodeBufferTask() {
TRACE_EVENT_END0("media,gpu", "V4L2SVDA::DecodeBufferTask AVD::Decode");
switch (res) {
case AcceleratedVideoDecoder::kConfigChange:
+ if (decoder_->GetBitDepth() != 8u) {
+ LOG(ERROR) << "Unsupported bit depth: "
+ << base::strict_cast<int>(decoder_->GetBitDepth());
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return;
+ }
if (!IsSupportedProfile(decoder_->GetProfile())) {
LOG(ERROR) << "Unsupported profile: " << decoder_->GetProfile();
NOTIFY_ERROR(PLATFORM_FAILURE);
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
index d921318877e..0f95090cd42 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
@@ -31,7 +31,7 @@ constexpr int k1080pArea = 1920 * 1088;
constexpr size_t kInputBufferMaxSizeFor1080p = 1024 * 1024;
// Input bitstream buffer size for up to 4k streams.
constexpr size_t kInputBufferMaxSizeFor4k = 4 * kInputBufferMaxSizeFor1080p;
-constexpr size_t kNumInputBuffers = 16;
+constexpr size_t kNumInputBuffers = 8;
// Input format V4L2 fourccs this class supports.
constexpr uint32_t kSupportedInputFourccs[] = {
@@ -47,6 +47,9 @@ constexpr size_t kDpbOutputBufferExtraCount = limits::kMaxVideoFrames + 1;
} // namespace
// static
+base::AtomicRefCount V4L2VideoDecoder::num_instances_(0);
+
+// static
std::unique_ptr<DecoderInterface> V4L2VideoDecoder::Create(
scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
base::WeakPtr<DecoderInterface::Client> client) {
@@ -80,6 +83,7 @@ V4L2VideoDecoder::V4L2VideoDecoder(
base::WeakPtr<DecoderInterface::Client> client,
scoped_refptr<V4L2Device> device)
: DecoderInterface(std::move(decoder_task_runner), std::move(client)),
+ can_use_decoder_(num_instances_.Increment() < kMaxNumOfInstances),
device_(std::move(device)),
weak_this_factory_(this) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
@@ -110,17 +114,26 @@ V4L2VideoDecoder::~V4L2VideoDecoder() {
}
weak_this_factory_.InvalidateWeakPtrs();
+ num_instances_.Decrement();
}
void V4L2VideoDecoder::Initialize(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
- const OutputCB& output_cb) {
+ const OutputCB& output_cb,
+ const WaitingCB& /*waiting_cb*/) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(config.IsValidConfig());
DCHECK(state_ == State::kUninitialized || state_ == State::kDecoding);
DVLOGF(3);
+ if (!can_use_decoder_) {
+ VLOGF(1) << "Reached maximum number of decoder instances ("
+ << kMaxNumOfInstances << ")";
+ std::move(init_cb).Run(StatusCode::kDecoderCreationFailed);
+ return;
+ }
+
if (cdm_context || config.is_encrypted()) {
VLOGF(1) << "V4L2 decoder does not support encrypted stream";
std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
@@ -152,35 +165,25 @@ void V4L2VideoDecoder::Initialize(const VideoDecoderConfig& config,
SetState(State::kUninitialized);
}
- // Open V4L2 device.
- VideoCodecProfile profile = config.profile();
- uint32_t input_format_fourcc_stateless =
- V4L2Device::VideoCodecProfileToV4L2PixFmt(profile, true);
- if (!input_format_fourcc_stateless ||
- !device_->Open(V4L2Device::Type::kDecoder,
- input_format_fourcc_stateless)) {
- VLOGF(1) << "Failed to open device for profile: " << profile
- << " fourcc: " << FourccToString(input_format_fourcc_stateless);
- input_format_fourcc_stateless = 0;
- } else {
- VLOGF(1) << "Found V4L2 device capable of stateless decoding for "
- << FourccToString(input_format_fourcc_stateless);
- }
-
- uint32_t input_format_fourcc_stateful =
- V4L2Device::VideoCodecProfileToV4L2PixFmt(profile, false);
- if (!input_format_fourcc_stateful ||
- !device_->Open(V4L2Device::Type::kDecoder,
- input_format_fourcc_stateful)) {
- VLOGF(1) << "Failed to open device for profile: " << profile
- << " fourcc: " << FourccToString(input_format_fourcc_stateful);
- input_format_fourcc_stateful = 0;
- } else {
- VLOGF(1) << "Found V4L2 device capable of stateful decoding for "
- << FourccToString(input_format_fourcc_stateful);
+ const VideoCodecProfile profile = config.profile();
+ constexpr bool kStateful = false;
+ constexpr bool kStateless = true;
+ base::Optional<std::pair<bool, uint32_t>> api_and_format;
+ // Try both kStateful and kStateless APIs via |fourcc| and select the first
+ // combination where Open()ing the |device_| works.
+ for (const auto api : {kStateful, kStateless}) {
+ const auto fourcc = V4L2Device::VideoCodecProfileToV4L2PixFmt(profile, api);
+ constexpr uint32_t kInvalidV4L2PixFmt = 0;
+ if (fourcc == kInvalidV4L2PixFmt ||
+ !device_->Open(V4L2Device::Type::kDecoder, fourcc)) {
+ continue;
+ }
+ api_and_format = std::make_pair(api, fourcc);
+ break;
}
- if (!input_format_fourcc_stateless && !input_format_fourcc_stateful) {
+ if (!api_and_format.has_value()) {
+ VLOGF(1) << "No V4L2 API found for profile: " << GetProfileName(profile);
std::move(init_cb).Run(StatusCode::kV4l2NoDecoder);
return;
}
@@ -206,19 +209,19 @@ void V4L2VideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- uint32_t input_format_fourcc;
- if (input_format_fourcc_stateful) {
+ const auto preferred_api_and_format = api_and_format.value();
+ const uint32_t input_format_fourcc = preferred_api_and_format.second;
+ if (preferred_api_and_format.first == kStateful) {
+ VLOGF(1) << "Using a stateful API for profile: " << GetProfileName(profile)
+ << " and fourcc: " << FourccToString(input_format_fourcc);
backend_ = std::make_unique<V4L2StatefulVideoDecoderBackend>(
this, device_, profile, decoder_task_runner_);
- input_format_fourcc = input_format_fourcc_stateful;
- } else if (input_format_fourcc_stateless) {
+ } else {
+ DCHECK_EQ(preferred_api_and_format.first, kStateless);
+ VLOGF(1) << "Using a stateless API for profile: " << GetProfileName(profile)
+ << " and fourcc: " << FourccToString(input_format_fourcc);
backend_ = std::make_unique<V4L2StatelessVideoDecoderBackend>(
this, device_, profile, decoder_task_runner_);
- input_format_fourcc = input_format_fourcc_stateless;
- } else {
- VLOGF(1) << "No backend capable of taking this profile.";
- std::move(init_cb).Run(StatusCode::kV4l2FailedResourceAllocation);
- return;
}
if (!backend_->Initialize()) {
@@ -227,7 +230,6 @@ void V4L2VideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- // Setup input format.
if (!SetupInputFormat(input_format_fourcc)) {
VLOGF(1) << "Failed to setup input format.";
std::move(init_cb).Run(StatusCode::kV4l2BadFormat);
@@ -361,6 +363,7 @@ bool V4L2VideoDecoder::SetupOutputFormat(const gfx::Size& size,
return false;
}
+ VLOGF(1) << "buffer modifier: " << std::hex << layout->modifier();
if (layout->modifier() &&
layout->modifier() != gfx::NativePixmapHandle::kNoModifier) {
base::Optional<struct v4l2_format> modifier_format =
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder.h b/chromium/media/gpu/v4l2/v4l2_video_decoder.h
index 0d98e0a52fe..37d12910c22 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder.h
@@ -24,13 +24,13 @@
#include "base/threading/thread.h"
#include "base/time/time.h"
#include "media/base/cdm_context.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_types.h"
#include "media/gpu/chromeos/gpu_buffer_layout.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/v4l2/v4l2_device.h"
#include "media/gpu/v4l2/v4l2_video_decoder_backend.h"
-#include "media/video/supported_video_decoder_config.h"
#include "ui/gfx/geometry/size.h"
namespace media {
@@ -54,7 +54,8 @@ class MEDIA_GPU_EXPORT V4L2VideoDecoder
void Initialize(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
- const OutputCB& output_cb) override;
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb) override;
void Reset(base::OnceClosure closure) override;
void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
void ApplyResolutionChange() override;
@@ -135,6 +136,16 @@ class MEDIA_GPU_EXPORT V4L2VideoDecoder
// Change the state and check the state transition is valid.
void SetState(State new_state);
+ // Pages with multiple V4L2VideoDecoder instances might run out of memory
+ // (e.g. b/170870476) or crash (e.g. crbug.com/1109312). To avoid that and
+ // while the investigation goes on, limit the maximum number of simultaneous
+ // decoder instances for now. |num_instances_| tracks the number of
+ // simultaneous decoders. |can_use_decoder_| is true iff we haven't reached
+ // the maximum number of instances at the time this decoder is created.
+ static constexpr int kMaxNumOfInstances = 32;
+ static base::AtomicRefCount num_instances_;
+ const bool can_use_decoder_;
+
// The V4L2 backend, i.e. the part of the decoder that sends
// decoding jobs to the kernel.
std::unique_ptr<V4L2VideoDecoderBackend> backend_;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
index f571d83cd68..b7df2d0d611 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
@@ -518,7 +518,7 @@ void V4L2StatefulVideoDecoderBackend::OnStreamStopped(bool stop_input_queue) {
DVLOGF(3);
// If we are resetting, also reset the splitter.
- if (stop_input_queue)
+ if (frame_splitter_ && stop_input_queue)
frame_splitter_->Reset();
}
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
index d38a7fc4747..68d2bbd6c2e 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
@@ -12,6 +12,7 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
+#include "base/numerics/safe_conversions.h"
#include "base/posix/eintr_wrapper.h"
#include "base/sequenced_task_runner.h"
#include "media/base/decode_status.h"
@@ -21,7 +22,7 @@
#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
#include "media/gpu/macros.h"
#include "media/gpu/v4l2/v4l2_device.h"
-#include "media/gpu/v4l2/v4l2_h264_accelerator.h"
+#include "media/gpu/v4l2/v4l2_h264_accelerator_chromium.h"
#include "media/gpu/v4l2/v4l2_h264_accelerator_legacy.h"
#include "media/gpu/v4l2/v4l2_vp8_accelerator.h"
#include "media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h"
@@ -379,6 +380,12 @@ bool V4L2StatelessVideoDecoderBackend::PumpDecodeTask() {
while (true) {
switch (avd_->Decode()) {
case AcceleratedVideoDecoder::kConfigChange:
+ if (avd_->GetBitDepth() != 8u) {
+ VLOGF(2) << "Unsupported bit depth: "
+ << base::strict_cast<int>(avd_->GetBitDepth());
+ return false;
+ }
+
if (profile_ != avd_->GetProfile()) {
DVLOGF(3) << "Profile is changed: " << profile_ << " -> "
<< avd_->GetProfile();
@@ -639,7 +646,7 @@ bool V4L2StatelessVideoDecoderBackend::CreateAvd() {
if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX) {
if (input_queue_->SupportsRequests()) {
avd_ = std::make_unique<H264Decoder>(
- std::make_unique<V4L2H264Accelerator>(this, device_.get()), profile_);
+ std::make_unique<V4L2ChromiumH264Accelerator>(this, device_.get()), profile_);
} else {
avd_ = std::make_unique<H264Decoder>(
std::make_unique<V4L2LegacyH264Accelerator>(this, device_.get()),
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
index b7b8df614bd..fee5fbe8f22 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
@@ -283,7 +283,7 @@ void V4L2VideoEncodeAccelerator::InitializeTask(const Config& config,
native_input_mode_ =
config.storage_type.value_or(Config::StorageType::kShmem) ==
- Config::StorageType::kDmabuf;
+ Config::StorageType::kGpuMemoryBuffer;
input_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
output_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
@@ -403,7 +403,7 @@ bool V4L2VideoEncodeAccelerator::CreateImageProcessor(
auto platform_layout = GetPlatformVideoFrameLayout(
/*gpu_memory_buffer_factory=*/nullptr, output_format, output_size,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE);
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
if (!platform_layout) {
VLOGF(1) << "Failed to get Platform VideoFrameLayout";
return false;
@@ -476,7 +476,7 @@ bool V4L2VideoEncodeAccelerator::AllocateImageProcessorOutputBuffers(
output_config.fourcc.ToVideoPixelFormat(), output_config.size,
output_config.visible_rect, output_config.visible_rect.size(),
base::TimeDelta(),
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE);
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
break;
default:
VLOGF(1) << "Unsupported output storage type of image processor: "
@@ -509,7 +509,7 @@ bool V4L2VideoEncodeAccelerator::InitInputMemoryType(const Config& config) {
case Config::StorageType::kShmem:
input_memory_type_ = V4L2_MEMORY_USERPTR;
break;
- case Config::StorageType::kDmabuf:
+ case Config::StorageType::kGpuMemoryBuffer:
input_memory_type_ = V4L2_MEMORY_DMABUF;
break;
}
@@ -757,6 +757,13 @@ void V4L2VideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
bool V4L2VideoEncodeAccelerator::ReconfigureFormatIfNeeded(
const VideoFrame& frame) {
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
+ if (input_buffer_map_.empty()) {
+ // Updates |input_natural_size_| on the first VideoFrame.
+ // |input_natural_size_| is a dimension to be encoded (i.e.
+ // |encoder_input_visible_rect_.size()|), but can be different from it
+ // in simulcast case.
+ input_natural_size_ = frame.natural_size();
+ }
if (!native_input_mode_) {
// frame.coded_size() must be the size specified in
@@ -768,14 +775,13 @@ bool V4L2VideoEncodeAccelerator::ReconfigureFormatIfNeeded(
// ReconfigureFormatIfNeeded() has been called with the first VideoFrame.
// We checks here we need to (re)create ImageProcessor because the visible
// rectangle of |frame| differs from the first VideoFrame.
- // |frame.natural_size()| is the size to be encoded. It must be the same as
- // |encoder_input_visible_rect_.size()|, otherwise VEA client must recreate
- // VEA with the new encoder resolution.
- if (frame.natural_size() != encoder_input_visible_rect_.size()) {
+ // |frame.natural_size()| must be unchanged during encoding in the same
+ // VideoEncodeAccelerator instance. When it is changed, a client has to
+ // recreate VideoEncodeAccelerator.
+ if (frame.natural_size() != input_natural_size_) {
VLOGF(1) << "Encoder resolution is changed during encoding"
<< ", frame.natural_size()=" << frame.natural_size().ToString()
- << ", encoder_input_visible_rect_="
- << encoder_input_visible_rect_.ToString();
+ << ", input_natural_size_=" << input_natural_size_.ToString();
return false;
}
if (frame.coded_size() == input_frame_size_) {
@@ -854,6 +860,7 @@ void V4L2VideoEncodeAccelerator::MaybeFlushImageProcessor() {
<< "|image_processor_|. Move the flush request to the encoder";
image_processor_input_queue_.pop();
encoder_input_queue_.emplace(nullptr, false);
+ Enqueue();
}
}
@@ -1466,15 +1473,13 @@ void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
uint32_t framerate) {
if (current_bitrate_ == bitrate && current_framerate_ == framerate)
return;
+ if (bitrate == 0 || framerate == 0)
+ return;
VLOGF(2) << "bitrate=" << bitrate << ", framerate=" << framerate;
DCHECK_CALLED_ON_VALID_SEQUENCE(encoder_sequence_checker_);
TRACE_EVENT2("media,gpu", "V4L2VEA::RequestEncodingParametersChangeTask",
"bitrate", bitrate, "framerate", framerate);
-
- DCHECK_GT(bitrate, 0u);
- DCHECK_GT(framerate, 0u);
-
if (current_bitrate_ != bitrate &&
!device_->SetExtCtrls(
V4L2_CTRL_CLASS_MPEG,
@@ -1642,7 +1647,7 @@ bool V4L2VideoEncodeAccelerator::SetFormats(VideoPixelFormat input_format,
auto input_layout = GetPlatformVideoFrameLayout(
/*gpu_memory_buffer_factory=*/nullptr, input_format,
encoder_input_visible_rect_.size(),
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE);
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
if (!input_layout)
return false;
input_size = gfx::Size(input_layout->planes()[0].stride,
@@ -1761,11 +1766,12 @@ bool V4L2VideoEncodeAccelerator::InitControls(const Config& config) {
// Optional controls:
// - Enable macroblock-level bitrate control.
- // - Set GOP length, or default 0 to disable periodic key frames.
+
device_->SetExtCtrls(V4L2_CTRL_CLASS_MPEG,
- {V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE, 1),
- V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_GOP_SIZE,
- config.gop_length.value_or(0))});
+ {V4L2ExtCtrl(V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE, 1)});
+
+ // - Set GOP length, or default 0 to disable periodic key frames.
+ device_->SetGOPLength(config.gop_length.value_or(0));
return true;
}
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
index 3c5405f10f7..ad8804f3b65 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
@@ -258,6 +258,10 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
// time Encode() if the coded size is different from the expected one by VEA.
// For example, it happens in WebRTC simulcast case.
gfx::Size input_frame_size_;
+ // A natural_size() of VideoFrame on VEA::Encode(). This is updated on the
+ // first time Encode() always. The natural_size() of VideoFrames fed by
+ // VEA::Encode() must be the same as |input_natural_size_|.
+ gfx::Size input_natural_size_;
// Visible rectangle of VideoFrame to be fed to an encoder driver, in other
// words, a visible rectangle that output encoded bitstream buffers represent.
diff --git a/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.cc b/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.cc
index 0b742ffb0fb..30524f78385 100644
--- a/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.cc
+++ b/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.cc
@@ -18,6 +18,9 @@
#include "media/gpu/vp9_picture.h"
namespace media {
+
+using DecodeStatus = VP9Decoder::VP9Accelerator::Status;
+
namespace {
void FillV4L2VP9LoopFilterParams(
@@ -202,7 +205,7 @@ scoped_refptr<VP9Picture> V4L2LegacyVP9Accelerator::CreateVP9Picture() {
return new V4L2VP9Picture(std::move(dec_surface));
}
-bool V4L2LegacyVP9Accelerator::SubmitDecode(
+DecodeStatus V4L2LegacyVP9Accelerator::SubmitDecode(
scoped_refptr<VP9Picture> pic,
const Vp9SegmentationParams& segm_params,
const Vp9LoopFilterParams& lf_params,
@@ -294,7 +297,7 @@ bool V4L2LegacyVP9Accelerator::SubmitDecode(
for (size_t i = 0; i < base::size(frame_hdr->ref_frame_idx); ++i) {
uint8_t idx = frame_hdr->ref_frame_idx[i];
if (idx >= kVp9NumRefFrames)
- return false;
+ return DecodeStatus::kFail;
struct v4l2_vp9_reference_frame* v4l2_ref_frame =
&v4l2_decode_param.active_ref_frames[i];
@@ -351,7 +354,7 @@ bool V4L2LegacyVP9Accelerator::SubmitDecode(
dec_surface->PrepareSetCtrls(&ext_ctrls);
if (device_->Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) != 0) {
VPLOGF(1) << "ioctl() failed: VIDIOC_S_EXT_CTRLS";
- return false;
+ return DecodeStatus::kFail;
}
dec_surface->SetReferenceSurfaces(ref_surfaces);
@@ -359,11 +362,11 @@ bool V4L2LegacyVP9Accelerator::SubmitDecode(
if (!surface_handler_->SubmitSlice(dec_surface.get(), frame_hdr->data,
frame_hdr->frame_size))
- return false;
+ return DecodeStatus::kFail;
DVLOGF(4) << "Submitting decode for surface: " << dec_surface->ToString();
surface_handler_->DecodeSurface(dec_surface);
- return true;
+ return DecodeStatus::kOk;
}
bool V4L2LegacyVP9Accelerator::OutputPicture(scoped_refptr<VP9Picture> pic) {
diff --git a/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.h b/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.h
index 27a351a6b46..9e01a272f86 100644
--- a/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.h
+++ b/chromium/media/gpu/v4l2/v4l2_vp9_accelerator_legacy.h
@@ -28,11 +28,11 @@ class V4L2LegacyVP9Accelerator : public VP9Decoder::VP9Accelerator {
// VP9Decoder::VP9Accelerator implementation.
scoped_refptr<VP9Picture> CreateVP9Picture() override;
- bool SubmitDecode(scoped_refptr<VP9Picture> pic,
- const Vp9SegmentationParams& segm_params,
- const Vp9LoopFilterParams& lf_params,
- const Vp9ReferenceFrameVector& reference_frames,
- base::OnceClosure done_cb) override;
+ Status SubmitDecode(scoped_refptr<VP9Picture> pic,
+ const Vp9SegmentationParams& segm_params,
+ const Vp9LoopFilterParams& lf_params,
+ const Vp9ReferenceFrameVector& reference_frames,
+ base::OnceClosure done_cb) override;
bool OutputPicture(scoped_refptr<VP9Picture> pic) override;
diff --git a/chromium/media/gpu/vaapi/BUILD.gn b/chromium/media/gpu/vaapi/BUILD.gn
index af599438385..e3c96871b5a 100644
--- a/chromium/media/gpu/vaapi/BUILD.gn
+++ b/chromium/media/gpu/vaapi/BUILD.gn
@@ -20,6 +20,9 @@ generate_stubs("libva_stubs") {
if (use_x11) {
sigs += [ "va_x11.sigs" ]
}
+ if (is_chromeos_ash) {
+ sigs += [ "va_prot.sigs" ]
+ }
sigs += [ "va_drm.sigs" ]
output_name = "va_stubs"
@@ -27,10 +30,13 @@ generate_stubs("libva_stubs") {
}
source_set("vaapi") {
+ assert(use_libgav1_parser)
defines = [ "MEDIA_GPU_IMPLEMENTATION" ]
sources = [
"accelerated_video_encoder.cc",
"accelerated_video_encoder.h",
+ "av1_vaapi_video_decoder_delegate.cc",
+ "av1_vaapi_video_decoder_delegate.h",
"h264_encoder.cc",
"h264_encoder.h",
"h264_vaapi_video_decoder_delegate.cc",
@@ -108,7 +114,7 @@ source_set("vaapi") {
"//ui/gl",
]
- if (is_ash) {
+ if (is_chromeos_ash) {
sources += [
"vaapi_jpeg_encode_accelerator.cc",
"vaapi_jpeg_encode_accelerator.h",
@@ -117,6 +123,7 @@ source_set("vaapi") {
]
deps += [
+ "//chromeos/components/cdm_factory_daemon:cdm_factory_daemon_gpu",
"//components/chromeos_camera:jpeg_encode_accelerator",
"//components/chromeos_camera:mjpeg_decode_accelerator",
]
@@ -169,6 +176,7 @@ source_set("common") {
]
public_deps = [
"//base",
+ "//build/config/linux/libdrm",
"//gpu",
"//media",
"//media/gpu:common",
@@ -215,6 +223,7 @@ source_set("unit_test") {
"vaapi_image_decode_accelerator_worker_unittest.cc",
"vaapi_video_decode_accelerator_unittest.cc",
"vaapi_video_encode_accelerator_unittest.cc",
+ "vaapi_wrapper_unittest.cc",
"vp9_encoder_unittest.cc",
"vp9_temporal_layers_unittest.cc",
]
@@ -252,7 +261,7 @@ source_set("vaapi_image_decoder_test_common") {
}
# TODO(https://crbug.com/1043007): remove is_chromeos.
-if (is_ash) {
+if (is_chromeos_ash) {
source_set("jpeg_decoder_unit_test") {
testonly = true
sources = [ "vaapi_jpeg_decoder_unittest.cc" ]
@@ -317,7 +326,7 @@ test("vaapi_unittest") {
]
# TODO(https://crbug.com/1043007): remove is_chromeos.
- if (is_ash) {
+ if (is_chromeos_ash) {
deps += [
":jpeg_decoder_unit_test",
":webp_decoder_unit_test",
@@ -336,6 +345,10 @@ executable("decode_test") {
sources = [
"test/decode.cc",
"test/macros.h",
+ "test/scoped_va_config.cc",
+ "test/scoped_va_config.h",
+ "test/scoped_va_context.cc",
+ "test/scoped_va_context.h",
"test/shared_va_surface.cc",
"test/shared_va_surface.h",
"test/vaapi_device.cc",
diff --git a/chromium/media/gpu/vaapi/DEPS b/chromium/media/gpu/vaapi/DEPS
index 3fad129406a..505064a3a9b 100644
--- a/chromium/media/gpu/vaapi/DEPS
+++ b/chromium/media/gpu/vaapi/DEPS
@@ -1,5 +1,13 @@
+include_rules = [
+ "+chromeos/components/cdm_factory_daemon",
+ "+third_party/libva_protected_content/va_protected_content.h",
+]
+
specific_include_rules = {
".*_unittest\.cc": [
"+third_party/libwebp",
],
+ "vaapi_wrapper.cc": [
+ "+third_party/minigbm/src/external/i915_drm.h",
+ ],
}
diff --git a/chromium/media/gpu/vaapi/OWNERS b/chromium/media/gpu/vaapi/OWNERS
index 96c1ecee549..93cfc79bb9a 100644
--- a/chromium/media/gpu/vaapi/OWNERS
+++ b/chromium/media/gpu/vaapi/OWNERS
@@ -1,10 +1,10 @@
-dstaessens@chromium.org
-kcwu@chromium.org
mcasas@chromium.org
-posciak@chromium.org
+andrescj@chromium.org
-# (M)JPEG related stuff
-per-file *jpeg*=andrescj@chromium.org
+# For protected-mode video decoding.
+jkardatzke@google.com
-# General VA-API decoding related stuff
-per-file *image_decoder*=andrescj@chromium.org
+# Legacy owners.
+dstaessens@chromium.org
+kcwu@chromium.org
+posciak@chromium.org \ No newline at end of file
diff --git a/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc
new file mode 100644
index 00000000000..517237514fe
--- /dev/null
+++ b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.cc
@@ -0,0 +1,839 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h"
+
+#include <string.h>
+#include <va/va.h>
+#include <algorithm>
+#include <vector>
+
+#include "base/logging.h"
+#include "base/memory/scoped_refptr.h"
+#include "media/gpu/av1_picture.h"
+#include "media/gpu/decode_surface_handler.h"
+#include "media/gpu/vaapi/vaapi_common.h"
+#include "media/gpu/vaapi/vaapi_wrapper.h"
+#include "third_party/libgav1/src/src/obu_parser.h"
+#include "third_party/libgav1/src/src/utils/types.h"
+#include "third_party/libgav1/src/src/warp_prediction.h"
+
+namespace media {
+namespace {
+
+#define ARRAY_SIZE(ar) (sizeof(ar) / sizeof(ar[0]))
+#define STD_ARRAY_SIZE(ar) (std::tuple_size<decltype(ar)>::value)
+
+void FillSegmentInfo(VASegmentationStructAV1& va_seg_info,
+ const libgav1::Segmentation& segmentation) {
+ auto& va_seg_info_fields = va_seg_info.segment_info_fields.bits;
+ va_seg_info_fields.enabled = segmentation.enabled;
+ va_seg_info_fields.update_map = segmentation.update_map;
+ va_seg_info_fields.temporal_update = segmentation.temporal_update;
+ va_seg_info_fields.update_data = segmentation.update_data;
+
+ static_assert(libgav1::kMaxSegments == 8 && libgav1::kSegmentFeatureMax == 8,
+ "Invalid Segment array size");
+ static_assert(ARRAY_SIZE(segmentation.feature_data) == 8 &&
+ ARRAY_SIZE(segmentation.feature_data[0]) == 8 &&
+ ARRAY_SIZE(segmentation.feature_enabled) == 8 &&
+ ARRAY_SIZE(segmentation.feature_enabled[0]) == 8,
+ "Invalid segmentation array size");
+ static_assert(ARRAY_SIZE(va_seg_info.feature_data) == 8 &&
+ ARRAY_SIZE(va_seg_info.feature_data[0]) == 8 &&
+ ARRAY_SIZE(va_seg_info.feature_mask) == 8,
+ "Invalid feature array size");
+ for (size_t i = 0; i < libgav1::kMaxSegments; ++i) {
+ for (size_t j = 0; j < libgav1::kSegmentFeatureMax; ++j)
+ va_seg_info.feature_data[i][j] = segmentation.feature_data[i][j];
+ }
+ for (size_t i = 0; i < libgav1::kMaxSegments; ++i) {
+ uint8_t feature_mask = 0;
+ for (size_t j = 0; j < libgav1::kSegmentFeatureMax; ++j) {
+ if (segmentation.feature_enabled[i][j])
+ feature_mask |= 1 << j;
+ }
+ va_seg_info.feature_mask[i] = feature_mask;
+ }
+}
+
+void FillFilmGrainInfo(VAFilmGrainStructAV1& va_film_grain_info,
+ const libgav1::FilmGrainParams& film_grain_params) {
+ if (!film_grain_params.apply_grain)
+ return;
+
+#define COPY_FILM_GRAIN_FIELD(a) \
+ va_film_grain_info.film_grain_info_fields.bits.a = film_grain_params.a
+ COPY_FILM_GRAIN_FIELD(apply_grain);
+ COPY_FILM_GRAIN_FIELD(chroma_scaling_from_luma);
+ COPY_FILM_GRAIN_FIELD(grain_scale_shift);
+ COPY_FILM_GRAIN_FIELD(overlap_flag);
+ COPY_FILM_GRAIN_FIELD(clip_to_restricted_range);
+#undef COPY_FILM_GRAIN_FIELD
+ va_film_grain_info.film_grain_info_fields.bits.ar_coeff_lag =
+ film_grain_params.auto_regression_coeff_lag;
+ DCHECK_GE(film_grain_params.chroma_scaling, 8u);
+ DCHECK_GE(film_grain_params.auto_regression_shift, 6u);
+ va_film_grain_info.film_grain_info_fields.bits.grain_scaling_minus_8 =
+ film_grain_params.chroma_scaling - 8;
+ va_film_grain_info.film_grain_info_fields.bits.ar_coeff_shift_minus_6 =
+ film_grain_params.auto_regression_shift - 6;
+
+ constexpr size_t kFilmGrainPointYSize = 14;
+ constexpr size_t kFilmGrainPointUVSize = 10;
+ static_assert(
+ ARRAY_SIZE(va_film_grain_info.point_y_value) == kFilmGrainPointYSize &&
+ ARRAY_SIZE(va_film_grain_info.point_y_scaling) ==
+ kFilmGrainPointYSize &&
+ ARRAY_SIZE(va_film_grain_info.point_cb_value) ==
+ kFilmGrainPointUVSize &&
+ ARRAY_SIZE(va_film_grain_info.point_cb_scaling) ==
+ kFilmGrainPointUVSize &&
+ ARRAY_SIZE(va_film_grain_info.point_cr_value) ==
+ kFilmGrainPointUVSize &&
+ ARRAY_SIZE(va_film_grain_info.point_cr_scaling) ==
+ kFilmGrainPointUVSize &&
+ ARRAY_SIZE(film_grain_params.point_y_value) == kFilmGrainPointYSize &&
+ ARRAY_SIZE(film_grain_params.point_y_scaling) ==
+ kFilmGrainPointYSize &&
+ ARRAY_SIZE(film_grain_params.point_u_value) ==
+ kFilmGrainPointUVSize &&
+ ARRAY_SIZE(film_grain_params.point_u_scaling) ==
+ kFilmGrainPointUVSize &&
+ ARRAY_SIZE(film_grain_params.point_v_value) ==
+ kFilmGrainPointUVSize &&
+ ARRAY_SIZE(film_grain_params.point_v_scaling) ==
+ kFilmGrainPointUVSize,
+ "Invalid array size of film grain values");
+ DCHECK_LE(film_grain_params.num_y_points, kFilmGrainPointYSize);
+ DCHECK_LE(film_grain_params.num_u_points, kFilmGrainPointUVSize);
+ DCHECK_LE(film_grain_params.num_v_points, kFilmGrainPointUVSize);
+#define COPY_FILM_GRAIN_FIELD2(a, b) va_film_grain_info.a = film_grain_params.b
+#define COPY_FILM_GRAIN_FIELD3(a) COPY_FILM_GRAIN_FIELD2(a, a)
+ COPY_FILM_GRAIN_FIELD3(grain_seed);
+ COPY_FILM_GRAIN_FIELD3(num_y_points);
+ for (uint8_t i = 0; i < film_grain_params.num_y_points; ++i) {
+ COPY_FILM_GRAIN_FIELD3(point_y_value[i]);
+ COPY_FILM_GRAIN_FIELD3(point_y_scaling[i]);
+ }
+#undef COPY_FILM_GRAIN_FIELD3
+ COPY_FILM_GRAIN_FIELD2(num_cb_points, num_u_points);
+ for (uint8_t i = 0; i < film_grain_params.num_u_points; ++i) {
+ COPY_FILM_GRAIN_FIELD2(point_cb_value[i], point_u_value[i]);
+ COPY_FILM_GRAIN_FIELD2(point_cb_scaling[i], point_u_scaling[i]);
+ }
+ COPY_FILM_GRAIN_FIELD2(num_cr_points, num_v_points);
+ for (uint8_t i = 0; i < film_grain_params.num_v_points; ++i) {
+ COPY_FILM_GRAIN_FIELD2(point_cr_value[i], point_v_value[i]);
+ COPY_FILM_GRAIN_FIELD2(point_cr_scaling[i], point_v_scaling[i]);
+ }
+
+ constexpr size_t kAutoRegressionCoeffYSize = 24;
+ constexpr size_t kAutoRegressionCoeffUVSize = 25;
+ static_assert(
+ ARRAY_SIZE(va_film_grain_info.ar_coeffs_y) == kAutoRegressionCoeffYSize &&
+ ARRAY_SIZE(va_film_grain_info.ar_coeffs_cb) ==
+ kAutoRegressionCoeffUVSize &&
+ ARRAY_SIZE(va_film_grain_info.ar_coeffs_cr) ==
+ kAutoRegressionCoeffUVSize &&
+ ARRAY_SIZE(film_grain_params.auto_regression_coeff_y) ==
+ kAutoRegressionCoeffYSize &&
+ ARRAY_SIZE(film_grain_params.auto_regression_coeff_u) ==
+ kAutoRegressionCoeffUVSize &&
+ ARRAY_SIZE(film_grain_params.auto_regression_coeff_v) ==
+ kAutoRegressionCoeffUVSize,
+ "Invalid array size of auto-regressive coefficients");
+ const size_t num_pos_y = (film_grain_params.auto_regression_coeff_lag * 2) *
+ (film_grain_params.auto_regression_coeff_lag + 1);
+ const size_t num_pos_uv = num_pos_y + (film_grain_params.num_y_points > 0);
+ if (film_grain_params.num_y_points > 0) {
+ DCHECK_LE(num_pos_y, kAutoRegressionCoeffYSize);
+ for (size_t i = 0; i < num_pos_y; ++i)
+ COPY_FILM_GRAIN_FIELD2(ar_coeffs_y[i], auto_regression_coeff_y[i]);
+ }
+ if (film_grain_params.chroma_scaling_from_luma ||
+ film_grain_params.num_u_points > 0 ||
+ film_grain_params.num_v_points > 0) {
+ DCHECK_LE(num_pos_uv, kAutoRegressionCoeffUVSize);
+ for (size_t i = 0; i < num_pos_uv; ++i) {
+ if (film_grain_params.chroma_scaling_from_luma ||
+ film_grain_params.num_u_points > 0) {
+ COPY_FILM_GRAIN_FIELD2(ar_coeffs_cb[i], auto_regression_coeff_u[i]);
+ }
+ if (film_grain_params.chroma_scaling_from_luma ||
+ film_grain_params.num_v_points > 0) {
+ COPY_FILM_GRAIN_FIELD2(ar_coeffs_cr[i], auto_regression_coeff_v[i]);
+ }
+ }
+ }
+ if (film_grain_params.num_u_points > 0) {
+ COPY_FILM_GRAIN_FIELD2(cb_mult, u_multiplier + 128);
+ COPY_FILM_GRAIN_FIELD2(cb_luma_mult, u_luma_multiplier + 128);
+ COPY_FILM_GRAIN_FIELD2(cb_offset, u_offset + 256);
+ }
+ if (film_grain_params.num_v_points > 0) {
+ COPY_FILM_GRAIN_FIELD2(cr_mult, v_multiplier + 128);
+ COPY_FILM_GRAIN_FIELD2(cr_luma_mult, v_luma_multiplier + 128);
+ COPY_FILM_GRAIN_FIELD2(cr_offset, v_offset + 256);
+ }
+#undef COPY_FILM_GRAIN_FIELD2
+}
+
+void FillGlobalMotionInfo(
+ VAWarpedMotionParamsAV1 va_warped_motion[7],
+ const std::array<libgav1::GlobalMotion, libgav1::kNumReferenceFrameTypes>&
+ global_motion) {
+ // global_motion[0] (for kReferenceFrameIntra) is not used.
+ constexpr size_t kWarpedMotionSize = libgav1::kNumReferenceFrameTypes - 1;
+ for (size_t i = 0; i < kWarpedMotionSize; ++i) {
+ // Copy |global_motion| because SetupShear updates the affine variables of
+ // the |global_motion|.
+ auto gm = global_motion[i + 1];
+ switch (gm.type) {
+ case libgav1::kGlobalMotionTransformationTypeIdentity:
+ va_warped_motion[i].wmtype = VAAV1TransformationIdentity;
+ break;
+ case libgav1::kGlobalMotionTransformationTypeTranslation:
+ va_warped_motion[i].wmtype = VAAV1TransformationTranslation;
+ break;
+ case libgav1::kGlobalMotionTransformationTypeRotZoom:
+ va_warped_motion[i].wmtype = VAAV1TransformationRotzoom;
+ break;
+ case libgav1::kGlobalMotionTransformationTypeAffine:
+ va_warped_motion[i].wmtype = VAAV1TransformationAffine;
+ break;
+ default:
+ NOTREACHED() << "Invalid global motion transformation type, "
+ << va_warped_motion[i].wmtype;
+ }
+ static_assert(ARRAY_SIZE(va_warped_motion[i].wmmat) == 8 &&
+ ARRAY_SIZE(gm.params) == 6,
+ "Invalid size of warp motion parameters");
+ for (size_t j = 0; j < 6; ++j)
+ va_warped_motion[i].wmmat[j] = gm.params[j];
+ va_warped_motion[i].wmmat[6] = 0;
+ va_warped_motion[i].wmmat[7] = 0;
+ va_warped_motion[i].invalid = !libgav1::SetupShear(&gm);
+ }
+}
+
+bool FillTileInfo(VADecPictureParameterBufferAV1& va_pic_param,
+ const libgav1::TileInfo& tile_info) {
+ // Since gav1 decoder doesn't support decoding with tile lists (i.e. large
+ // scale tile decoding), libgav1::ObuParser doesn't parse tile list, so that
+ // we cannot acquire anchor_frames_num, anchor_frames_list, tile_count_minus_1
+ // and output_frame_width/height_in_tiles_minus_1, and thus must set them and
+ // large_scale_tile to 0 or false. This is already done by the memset in
+ // SubmitDecode(). libgav1::ObuParser returns kStatusUnimplemented on
+ // ParseOneFrame(), a fallback to av1 software decoder happens in the large
+ // scale tile decoding.
+ // TODO(hiroh): Support the large scale tile decoding once libgav1::ObuParser
+ // supports it.
+ va_pic_param.tile_cols = base::checked_cast<uint8_t>(tile_info.tile_columns);
+ va_pic_param.tile_rows = base::checked_cast<uint8_t>(tile_info.tile_rows);
+
+ if (!tile_info.uniform_spacing) {
+ constexpr int kVaSizeOfTileWidthAndHeightArray = 63;
+ static_assert(
+ ARRAY_SIZE(tile_info.tile_column_width_in_superblocks) == 65 &&
+ ARRAY_SIZE(tile_info.tile_row_height_in_superblocks) == 65 &&
+ ARRAY_SIZE(va_pic_param.width_in_sbs_minus_1) ==
+ kVaSizeOfTileWidthAndHeightArray &&
+ ARRAY_SIZE(va_pic_param.height_in_sbs_minus_1) ==
+ kVaSizeOfTileWidthAndHeightArray,
+ "Invalid sizes of tile column widths and row heights");
+ const int tile_columns =
+ std::min(kVaSizeOfTileWidthAndHeightArray, tile_info.tile_columns);
+ for (int i = 0; i < tile_columns; i++) {
+ if (!base::CheckSub<int>(tile_info.tile_column_width_in_superblocks[i], 1)
+ .AssignIfValid(&va_pic_param.width_in_sbs_minus_1[i])) {
+ return false;
+ }
+ }
+ const int tile_rows =
+ std::min(kVaSizeOfTileWidthAndHeightArray, tile_info.tile_rows);
+ for (int i = 0; i < tile_rows; i++) {
+ if (!base::CheckSub<int>(tile_info.tile_row_height_in_superblocks[i], 1)
+ .AssignIfValid(&va_pic_param.height_in_sbs_minus_1[i])) {
+ return false;
+ }
+ }
+ }
+
+ va_pic_param.context_update_tile_id =
+ base::checked_cast<uint16_t>(tile_info.context_update_id);
+ return true;
+}
+
+void FillLoopFilterInfo(VADecPictureParameterBufferAV1& va_pic_param,
+ const libgav1::LoopFilter& loop_filter) {
+ static_assert(STD_ARRAY_SIZE(loop_filter.level) == libgav1::kFrameLfCount &&
+ libgav1::kFrameLfCount == 4 &&
+ ARRAY_SIZE(va_pic_param.filter_level) == 2,
+ "Invalid size of loop filter strength array");
+ va_pic_param.filter_level[0] =
+ base::checked_cast<uint8_t>(loop_filter.level[0]);
+ va_pic_param.filter_level[1] =
+ base::checked_cast<uint8_t>(loop_filter.level[1]);
+ va_pic_param.filter_level_u =
+ base::checked_cast<uint8_t>(loop_filter.level[2]);
+ va_pic_param.filter_level_v =
+ base::checked_cast<uint8_t>(loop_filter.level[3]);
+
+ va_pic_param.loop_filter_info_fields.bits.sharpness_level =
+ loop_filter.sharpness;
+ va_pic_param.loop_filter_info_fields.bits.mode_ref_delta_enabled =
+ loop_filter.delta_enabled;
+ va_pic_param.loop_filter_info_fields.bits.mode_ref_delta_update =
+ loop_filter.delta_update;
+
+ static_assert(libgav1::kNumReferenceFrameTypes == 8 &&
+ ARRAY_SIZE(va_pic_param.ref_deltas) ==
+ libgav1::kNumReferenceFrameTypes &&
+ STD_ARRAY_SIZE(loop_filter.ref_deltas) ==
+ libgav1::kNumReferenceFrameTypes,
+ "Invalid size of ref deltas array");
+ static_assert(libgav1::kLoopFilterMaxModeDeltas == 2 &&
+ ARRAY_SIZE(va_pic_param.mode_deltas) ==
+ libgav1::kLoopFilterMaxModeDeltas &&
+ STD_ARRAY_SIZE(loop_filter.mode_deltas) ==
+ libgav1::kLoopFilterMaxModeDeltas,
+ "Invalid size of mode deltas array");
+ for (size_t i = 0; i < libgav1::kNumReferenceFrameTypes; i++)
+ va_pic_param.ref_deltas[i] = loop_filter.ref_deltas[i];
+ for (size_t i = 0; i < libgav1::kLoopFilterMaxModeDeltas; i++)
+ va_pic_param.mode_deltas[i] = loop_filter.mode_deltas[i];
+}
+
+void FillQuantizationInfo(VADecPictureParameterBufferAV1& va_pic_param,
+ const libgav1::QuantizerParameters& quant_param) {
+ va_pic_param.base_qindex = quant_param.base_index;
+ static_assert(
+ libgav1::kPlaneY == 0 && libgav1::kPlaneU == 1 && libgav1::kPlaneV == 2,
+ "Invalid plane index");
+ static_assert(libgav1::kMaxPlanes == 3 &&
+ ARRAY_SIZE(quant_param.delta_dc) == libgav1::kMaxPlanes &&
+ ARRAY_SIZE(quant_param.delta_ac) == libgav1::kMaxPlanes,
+ "Invalid size of delta dc/ac array");
+ va_pic_param.y_dc_delta_q = quant_param.delta_dc[0];
+ va_pic_param.u_dc_delta_q = quant_param.delta_dc[1];
+ va_pic_param.v_dc_delta_q = quant_param.delta_dc[2];
+ // quant_param.delta_ac[0] is useless as it is always 0.
+ va_pic_param.u_ac_delta_q = quant_param.delta_ac[1];
+ va_pic_param.v_ac_delta_q = quant_param.delta_ac[2];
+
+ va_pic_param.qmatrix_fields.bits.using_qmatrix = quant_param.use_matrix;
+ if (!quant_param.use_matrix)
+ return;
+ static_assert(ARRAY_SIZE(quant_param.matrix_level) == libgav1::kMaxPlanes,
+ "Invalid size of matrix levels");
+ va_pic_param.qmatrix_fields.bits.qm_y =
+ base::checked_cast<uint16_t>(quant_param.matrix_level[0]);
+ va_pic_param.qmatrix_fields.bits.qm_u =
+ base::checked_cast<uint16_t>(quant_param.matrix_level[1]);
+ va_pic_param.qmatrix_fields.bits.qm_v =
+ base::checked_cast<uint16_t>(quant_param.matrix_level[2]);
+}
+
+void FillCdefInfo(VADecPictureParameterBufferAV1& va_pic_param,
+ const libgav1::Cdef& cdef,
+ uint8_t color_bitdepth) {
+ // Damping value parsed in libgav1 is from the spec + (bitdepth - 8).
+ // All the strength values parsed in libgav1 are from the spec and left
+ // shifted by (bitdepth - 8).
+ CHECK_GE(color_bitdepth, 8u);
+ const uint8_t coeff_shift = color_bitdepth - 8u;
+ va_pic_param.cdef_damping_minus_3 =
+ base::checked_cast<uint8_t>(cdef.damping - coeff_shift - 3u);
+
+ va_pic_param.cdef_bits = cdef.bits;
+ static_assert(
+ libgav1::kMaxCdefStrengths == 8 &&
+ ARRAY_SIZE(cdef.y_primary_strength) == libgav1::kMaxCdefStrengths &&
+ ARRAY_SIZE(cdef.y_secondary_strength) == libgav1::kMaxCdefStrengths &&
+ ARRAY_SIZE(cdef.uv_primary_strength) == libgav1::kMaxCdefStrengths &&
+ ARRAY_SIZE(cdef.uv_secondary_strength) ==
+ libgav1::kMaxCdefStrengths &&
+ ARRAY_SIZE(va_pic_param.cdef_y_strengths) ==
+ libgav1::kMaxCdefStrengths &&
+ ARRAY_SIZE(va_pic_param.cdef_uv_strengths) ==
+ libgav1::kMaxCdefStrengths,
+ "Invalid size of cdef strengths");
+ const size_t num_cdef_strengths = 1 << cdef.bits;
+ DCHECK_LE(num_cdef_strengths,
+ static_cast<size_t>(libgav1::kMaxCdefStrengths));
+ for (size_t i = 0; i < num_cdef_strengths; ++i) {
+ const uint8_t prim_strength = cdef.y_primary_strength[i] >> coeff_shift;
+ uint8_t sec_strength = cdef.y_secondary_strength[i] >> coeff_shift;
+ DCHECK_LE(sec_strength, 4u);
+ if (sec_strength == 4)
+ sec_strength--;
+ va_pic_param.cdef_y_strengths[i] =
+ ((prim_strength & 0xf) << 2) | (sec_strength & 0x03);
+ }
+
+ for (size_t i = 0; i < num_cdef_strengths; ++i) {
+ const uint8_t prim_strength = cdef.uv_primary_strength[i] >> coeff_shift;
+ uint8_t sec_strength = cdef.uv_secondary_strength[i] >> coeff_shift;
+ DCHECK_LE(sec_strength, 4u);
+ if (sec_strength == 4)
+ sec_strength--;
+ va_pic_param.cdef_uv_strengths[i] =
+ ((prim_strength & 0xf) << 2) | (sec_strength & 0x03);
+ }
+}
+
+void FillModeControlInfo(VADecPictureParameterBufferAV1& va_pic_param,
+ const libgav1::ObuFrameHeader& frame_header) {
+ auto& mode_control = va_pic_param.mode_control_fields.bits;
+ mode_control.delta_q_present_flag = frame_header.delta_q.present;
+ mode_control.log2_delta_q_res = frame_header.delta_q.scale;
+ mode_control.delta_lf_present_flag = frame_header.delta_lf.present;
+ mode_control.log2_delta_lf_res = frame_header.delta_lf.scale;
+ mode_control.delta_lf_multi = frame_header.delta_lf.multi;
+ DCHECK_LE(0u, frame_header.tx_mode);
+ DCHECK_LE(frame_header.tx_mode, 2u);
+ mode_control.tx_mode = frame_header.tx_mode;
+
+ mode_control.reference_select = frame_header.reference_mode_select;
+ mode_control.reduced_tx_set_used = frame_header.reduced_tx_set;
+ mode_control.skip_mode_present = frame_header.skip_mode_present;
+}
+
+void FillLoopRestorationInfo(VADecPictureParameterBufferAV1& va_pic_param,
+ const libgav1::LoopRestoration& loop_restoration) {
+ auto to_frame_restoration_type =
+ [](libgav1::LoopRestorationType lr_type) -> uint16_t {
+ // Spec. 6.10.15
+ switch (lr_type) {
+ case libgav1::LoopRestorationType::kLoopRestorationTypeNone:
+ return 0;
+ case libgav1::LoopRestorationType::kLoopRestorationTypeSwitchable:
+ return 3;
+ case libgav1::LoopRestorationType::kLoopRestorationTypeWiener:
+ return 1;
+ case libgav1::LoopRestorationType::kLoopRestorationTypeSgrProj:
+ return 2;
+ default:
+ NOTREACHED() << "Invalid restoration type"
+ << base::strict_cast<int>(lr_type);
+ return 0;
+ }
+ };
+ static_assert(
+ libgav1::kMaxPlanes == 3 &&
+ ARRAY_SIZE(loop_restoration.type) == libgav1::kMaxPlanes &&
+ ARRAY_SIZE(loop_restoration.unit_size_log2) == libgav1::kMaxPlanes,
+ "Invalid size of loop restoration values");
+ auto& va_loop_restoration = va_pic_param.loop_restoration_fields.bits;
+ va_loop_restoration.yframe_restoration_type =
+ to_frame_restoration_type(loop_restoration.type[0]);
+ va_loop_restoration.cbframe_restoration_type =
+ to_frame_restoration_type(loop_restoration.type[1]);
+ va_loop_restoration.crframe_restoration_type =
+ to_frame_restoration_type(loop_restoration.type[2]);
+
+ const size_t num_planes = libgav1::kMaxPlanes;
+ const bool use_loop_restoration =
+ std::find_if(std::begin(loop_restoration.type),
+ std::begin(loop_restoration.type) + num_planes,
+ [](const auto type) {
+ return type != libgav1::kLoopRestorationTypeNone;
+ }) != (loop_restoration.type + num_planes);
+ if (!use_loop_restoration)
+ return;
+ static_assert(libgav1::kPlaneY == 0u && libgav1::kPlaneU == 1u,
+ "Invalid plane index");
+ DCHECK_GE(loop_restoration.unit_size_log2[0], 6);
+ DCHECK_GE(loop_restoration.unit_size_log2[0],
+ loop_restoration.unit_size_log2[1]);
+ DCHECK_LE(
+ loop_restoration.unit_size_log2[0] - loop_restoration.unit_size_log2[1],
+ 1);
+ va_loop_restoration.lr_unit_shift = loop_restoration.unit_size_log2[0] - 6;
+ va_loop_restoration.lr_uv_shift =
+ loop_restoration.unit_size_log2[0] - loop_restoration.unit_size_log2[1];
+}
+
+bool FillAV1PictureParameter(const AV1Picture& pic,
+ const libgav1::ObuSequenceHeader& sequence_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ VADecPictureParameterBufferAV1& va_pic_param) {
+ memset(&va_pic_param, 0, sizeof(VADecPictureParameterBufferAV1));
+ DCHECK_LE(base::strict_cast<uint8_t>(sequence_header.profile), 2u)
+ << "Unknown profile: " << base::strict_cast<int>(sequence_header.profile);
+ va_pic_param.profile = base::strict_cast<uint8_t>(sequence_header.profile);
+
+ if (sequence_header.enable_order_hint) {
+ DCHECK_GT(sequence_header.order_hint_bits, 0);
+ DCHECK_LE(sequence_header.order_hint_bits, 8);
+ va_pic_param.order_hint_bits_minus_1 = sequence_header.order_hint_bits - 1;
+ }
+
+ switch (sequence_header.color_config.bitdepth) {
+ case 8:
+ va_pic_param.bit_depth_idx = 0;
+ break;
+ case 10:
+ va_pic_param.bit_depth_idx = 1;
+ break;
+ case 12:
+ va_pic_param.bit_depth_idx = 2;
+ break;
+ default:
+ NOTREACHED() << "Unknown bit depth: "
+ << base::strict_cast<int>(
+ sequence_header.color_config.bitdepth);
+ }
+ switch (sequence_header.color_config.matrix_coefficients) {
+ case libgav1::kMatrixCoefficientsIdentity:
+ case libgav1::kMatrixCoefficientsBt709:
+ case libgav1::kMatrixCoefficientsUnspecified:
+ case libgav1::kMatrixCoefficientsFcc:
+ case libgav1::kMatrixCoefficientsBt470BG:
+ case libgav1::kMatrixCoefficientsBt601:
+ case libgav1::kMatrixCoefficientsSmpte240:
+ case libgav1::kMatrixCoefficientsSmpteYcgco:
+ case libgav1::kMatrixCoefficientsBt2020Ncl:
+ case libgav1::kMatrixCoefficientsBt2020Cl:
+ case libgav1::kMatrixCoefficientsSmpte2085:
+ case libgav1::kMatrixCoefficientsChromatNcl:
+ case libgav1::kMatrixCoefficientsChromatCl:
+ case libgav1::kMatrixCoefficientsIctcp:
+ va_pic_param.matrix_coefficients = base::checked_cast<uint8_t>(
+ sequence_header.color_config.matrix_coefficients);
+ break;
+ default:
+ DLOG(ERROR) << "Invalid matrix coefficients: "
+ << static_cast<int>(
+ sequence_header.color_config.matrix_coefficients);
+ return false;
+ }
+
+ DCHECK(!sequence_header.color_config.is_monochrome);
+#define COPY_SEQ_FIELD(a) \
+ va_pic_param.seq_info_fields.fields.a = sequence_header.a
+#define COPY_SEQ_FIELD2(a, b) va_pic_param.seq_info_fields.fields.a = b
+ COPY_SEQ_FIELD(still_picture);
+ COPY_SEQ_FIELD(use_128x128_superblock);
+ COPY_SEQ_FIELD(enable_filter_intra);
+ COPY_SEQ_FIELD(enable_intra_edge_filter);
+ COPY_SEQ_FIELD(enable_interintra_compound);
+ COPY_SEQ_FIELD(enable_masked_compound);
+ COPY_SEQ_FIELD(enable_dual_filter);
+ COPY_SEQ_FIELD(enable_order_hint);
+ COPY_SEQ_FIELD(enable_jnt_comp);
+ COPY_SEQ_FIELD(enable_cdef);
+ COPY_SEQ_FIELD2(mono_chrome, sequence_header.color_config.is_monochrome);
+ COPY_SEQ_FIELD2(subsampling_x, sequence_header.color_config.subsampling_x);
+ COPY_SEQ_FIELD2(subsampling_y, sequence_header.color_config.subsampling_y);
+ COPY_SEQ_FIELD(film_grain_params_present);
+#undef COPY_SEQ_FIELD
+ switch (sequence_header.color_config.color_range) {
+ case libgav1::kColorRangeStudio:
+ case libgav1::kColorRangeFull:
+ COPY_SEQ_FIELD2(color_range,
+ base::strict_cast<uint32_t>(
+ sequence_header.color_config.color_range));
+ break;
+ default:
+ NOTREACHED() << "Unknown color range: "
+ << static_cast<int>(
+ sequence_header.color_config.color_range);
+ }
+#undef COPY_SEQ_FILED2
+
+ const libgav1::ObuFrameHeader& frame_header = pic.frame_header;
+ const auto* vaapi_pic = static_cast<const VaapiAV1Picture*>(&pic);
+ DCHECK(!!vaapi_pic->display_va_surface() &&
+ !!vaapi_pic->reconstruct_va_surface());
+ if (frame_header.film_grain_params.apply_grain) {
+ DCHECK_NE(vaapi_pic->display_va_surface()->id(),
+ vaapi_pic->reconstruct_va_surface()->id())
+ << "When using film grain synthesis, the display and reconstruct "
+ "surfaces"
+ << " should be different.";
+ va_pic_param.current_frame = vaapi_pic->reconstruct_va_surface()->id();
+ va_pic_param.current_display_picture =
+ vaapi_pic->display_va_surface()->id();
+ } else {
+ DCHECK_EQ(vaapi_pic->display_va_surface()->id(),
+ vaapi_pic->reconstruct_va_surface()->id())
+ << "When not using film grain synthesis, the display and reconstruct"
+ << " surfaces should be the same.";
+ va_pic_param.current_frame = vaapi_pic->display_va_surface()->id();
+ va_pic_param.current_display_picture = VA_INVALID_SURFACE;
+ }
+
+ if (!base::CheckSub<int32_t>(frame_header.width, 1)
+ .AssignIfValid(&va_pic_param.frame_width_minus1) ||
+ !base::CheckSub<int32_t>(frame_header.height, 1)
+ .AssignIfValid(&va_pic_param.frame_height_minus1)) {
+ DLOG(ERROR) << "Invalid frame width and height"
+ << ", width=" << frame_header.width
+ << ", height=" << frame_header.height;
+ return false;
+ }
+
+ static_assert(libgav1::kNumReferenceFrameTypes == 8 &&
+ ARRAY_SIZE(va_pic_param.ref_frame_map) ==
+ libgav1::kNumReferenceFrameTypes,
+ "Invalid size of reference frames");
+ static_assert(libgav1::kNumInterReferenceFrameTypes == 7 &&
+ ARRAY_SIZE(frame_header.reference_frame_index) ==
+ libgav1::kNumInterReferenceFrameTypes &&
+ ARRAY_SIZE(va_pic_param.ref_frame_idx) ==
+ libgav1::kNumInterReferenceFrameTypes,
+ "Invalid size of reference frame indices");
+ for (size_t i = 0; i < libgav1::kNumReferenceFrameTypes; ++i) {
+ const auto* ref_pic =
+ static_cast<const VaapiAV1Picture*>(ref_frames[i].get());
+ va_pic_param.ref_frame_map[i] =
+ ref_pic ? ref_pic->reconstruct_va_surface()->id() : VA_INVALID_SURFACE;
+ }
+
+ // |va_pic_param.ref_frame_idx| doesn't need to be filled in for intra frames
+ // (it can be left zero initialized).
+ if (!libgav1::IsIntraFrame(frame_header.frame_type)) {
+ for (size_t i = 0; i < libgav1::kNumInterReferenceFrameTypes; ++i) {
+ const int8_t index = frame_header.reference_frame_index[i];
+ CHECK_GE(index, 0);
+ CHECK_LT(index, libgav1::kNumReferenceFrameTypes);
+ // AV1Decoder::CheckAndCleanUpReferenceFrames() ensures that
+ // |ref_frames[index]| is valid for all the reference frames needed by the
+ // current frame.
+ DCHECK_NE(va_pic_param.ref_frame_map[index], VA_INVALID_SURFACE);
+ va_pic_param.ref_frame_idx[i] = base::checked_cast<uint8_t>(index);
+ }
+ }
+
+ va_pic_param.primary_ref_frame =
+ base::checked_cast<uint8_t>(frame_header.primary_reference_frame);
+ va_pic_param.order_hint = frame_header.order_hint;
+
+ FillSegmentInfo(va_pic_param.seg_info, frame_header.segmentation);
+ FillFilmGrainInfo(va_pic_param.film_grain_info,
+ frame_header.film_grain_params);
+
+ if (!FillTileInfo(va_pic_param, frame_header.tile_info))
+ return false;
+
+ if (frame_header.use_superres) {
+ DVLOG(2) << "Upscaling (use_superres=1) is not supported";
+ return false;
+ }
+ auto& va_pic_info_fields = va_pic_param.pic_info_fields.bits;
+ va_pic_info_fields.uniform_tile_spacing_flag =
+ frame_header.tile_info.uniform_spacing;
+#define COPY_PIC_FIELD(a) va_pic_info_fields.a = frame_header.a
+ COPY_PIC_FIELD(show_frame);
+ COPY_PIC_FIELD(showable_frame);
+ COPY_PIC_FIELD(error_resilient_mode);
+ COPY_PIC_FIELD(allow_screen_content_tools);
+ COPY_PIC_FIELD(force_integer_mv);
+ COPY_PIC_FIELD(allow_intrabc);
+ COPY_PIC_FIELD(use_superres);
+ COPY_PIC_FIELD(allow_high_precision_mv);
+ COPY_PIC_FIELD(is_motion_mode_switchable);
+ COPY_PIC_FIELD(use_ref_frame_mvs);
+ COPY_PIC_FIELD(allow_warped_motion);
+#undef COPY_PIC_FIELD
+ switch (frame_header.frame_type) {
+ case libgav1::FrameType::kFrameKey:
+ case libgav1::FrameType::kFrameInter:
+ case libgav1::FrameType::kFrameIntraOnly:
+ case libgav1::FrameType::kFrameSwitch:
+ va_pic_info_fields.frame_type =
+ base::strict_cast<uint32_t>(frame_header.frame_type);
+ break;
+ default:
+ NOTREACHED() << "Unknown frame type: "
+ << base::strict_cast<int>(frame_header.frame_type);
+ }
+ va_pic_info_fields.disable_cdf_update = !frame_header.enable_cdf_update;
+ va_pic_info_fields.disable_frame_end_update_cdf =
+ !frame_header.enable_frame_end_update_cdf;
+
+ static_assert(libgav1::kSuperResScaleNumerator == 8,
+ "Invalid libgav1::kSuperResScaleNumerator value");
+ CHECK_EQ(frame_header.superres_scale_denominator,
+ libgav1::kSuperResScaleNumerator);
+ va_pic_param.superres_scale_denominator =
+ frame_header.superres_scale_denominator;
+ DCHECK_LE(base::strict_cast<uint8_t>(frame_header.interpolation_filter), 4u)
+ << "Unknown interpolation filter: "
+ << base::strict_cast<int>(frame_header.interpolation_filter);
+ va_pic_param.interp_filter =
+ base::strict_cast<uint8_t>(frame_header.interpolation_filter);
+
+ FillQuantizationInfo(va_pic_param, frame_header.quantizer);
+ FillLoopFilterInfo(va_pic_param, frame_header.loop_filter);
+ FillModeControlInfo(va_pic_param, frame_header);
+ FillLoopRestorationInfo(va_pic_param, frame_header.loop_restoration);
+ FillGlobalMotionInfo(va_pic_param.wm, frame_header.global_motion);
+ FillCdefInfo(
+ va_pic_param, frame_header.cdef,
+ base::checked_cast<uint8_t>(sequence_header.color_config.bitdepth));
+ return true;
+}
+
+bool FillAV1SliceParameters(
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
+ const size_t tile_columns,
+ base::span<const uint8_t> data,
+ std::vector<VASliceParameterBufferAV1>& va_slice_params) {
+ CHECK_GT(tile_columns, 0u);
+ const uint16_t num_tiles = base::checked_cast<uint16_t>(tile_buffers.size());
+ va_slice_params.resize(num_tiles);
+ for (uint16_t tile = 0; tile < num_tiles; ++tile) {
+ VASliceParameterBufferAV1& va_tile_param = va_slice_params[tile];
+ memset(&va_tile_param, 0, sizeof(VASliceParameterBufferAV1));
+ va_tile_param.slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ va_tile_param.tile_row = tile / base::checked_cast<uint16_t>(tile_columns);
+ va_tile_param.tile_column =
+ tile % base::checked_cast<uint16_t>(tile_columns);
+ if (!base::CheckedNumeric<size_t>(tile_buffers[tile].size)
+ .AssignIfValid(&va_tile_param.slice_data_size)) {
+ return false;
+ }
+ CHECK(tile_buffers[tile].data >= data.data());
+ va_tile_param.slice_data_offset =
+ base::checked_cast<uint32_t>(tile_buffers[tile].data - data.data());
+ base::CheckedNumeric<uint32_t> safe_va_slice_data_end(
+ va_tile_param.slice_data_offset);
+ safe_va_slice_data_end += va_tile_param.slice_data_size;
+ size_t va_slice_data_end;
+ if (!safe_va_slice_data_end.AssignIfValid(&va_slice_data_end) ||
+ va_slice_data_end > data.size()) {
+ DLOG(ERROR) << "Invalid tile offset and size"
+ << ", offset=" << va_tile_param.slice_data_size
+ << ", size=" << va_tile_param.slice_data_offset
+ << ", entire data size=" << data.size();
+ return false;
+ }
+ }
+ return true;
+}
+} // namespace
+
+AV1VaapiVideoDecoderDelegate::AV1VaapiVideoDecoderDelegate(
+ DecodeSurfaceHandler<VASurface>* const vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper)
+ : VaapiVideoDecoderDelegate(vaapi_dec,
+ std::move(vaapi_wrapper),
+ base::DoNothing(),
+ nullptr) {}
+
+AV1VaapiVideoDecoderDelegate::~AV1VaapiVideoDecoderDelegate() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(!picture_params_);
+ DCHECK(slice_params_.empty());
+}
+
+scoped_refptr<AV1Picture> AV1VaapiVideoDecoderDelegate::CreateAV1Picture(
+ bool apply_grain) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ const auto display_va_surface = vaapi_dec_->CreateSurface();
+ if (!display_va_surface)
+ return nullptr;
+
+ auto reconstruct_va_surface = display_va_surface;
+ if (apply_grain) {
+ // TODO(hiroh): When no surface is available here, this returns nullptr and
+ // |display_va_surface| is released. Since the surface is back to the pool,
+ // VaapiVideoDecoder will detect that there are surfaces available and will
+ // start another decode task which means that CreateSurface() might fail
+ // again for |reconstruct_va_surface| since only one surface might have gone
+ // back to the pool (the one for |display_va_surface|). We should avoid this
+ // loop for the sake of efficiency.
+ reconstruct_va_surface = vaapi_dec_->CreateSurface();
+ if (!reconstruct_va_surface)
+ return nullptr;
+ }
+
+ return base::MakeRefCounted<VaapiAV1Picture>(
+ std::move(display_va_surface), std::move(reconstruct_va_surface));
+}
+
+bool AV1VaapiVideoDecoderDelegate::OutputPicture(const AV1Picture& pic) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ const auto* vaapi_pic = static_cast<const VaapiAV1Picture*>(&pic);
+ vaapi_dec_->SurfaceReady(vaapi_pic->display_va_surface(),
+ vaapi_pic->bitstream_id(), vaapi_pic->visible_rect(),
+ vaapi_pic->get_colorspace());
+ return true;
+}
+
+bool AV1VaapiVideoDecoderDelegate::SubmitDecode(
+ const AV1Picture& pic,
+ const libgav1::ObuSequenceHeader& seq_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
+ base::span<const uint8_t> data) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // libgav1 ensures that tile_columns is >= 0 and <= MAX_TILE_COLS.
+ DCHECK_LE(0, pic.frame_header.tile_info.tile_columns);
+ DCHECK_LE(pic.frame_header.tile_info.tile_columns, libgav1::kMaxTileColumns);
+ const size_t tile_columns =
+ base::checked_cast<size_t>(pic.frame_header.tile_info.tile_columns);
+
+ VADecPictureParameterBufferAV1 pic_param;
+ std::vector<VASliceParameterBufferAV1> slice_params;
+ if (!FillAV1PictureParameter(pic, seq_header, ref_frames, pic_param) ||
+ !FillAV1SliceParameters(tile_buffers, tile_columns, data, slice_params)) {
+ return false;
+ }
+
+ if (!picture_params_) {
+ picture_params_ = vaapi_wrapper_->CreateVABuffer(
+ VAPictureParameterBufferType, sizeof(pic_param));
+ if (!picture_params_)
+ return false;
+ }
+ if (slice_params_.size() != slice_params.size()) {
+ while (slice_params_.size() < slice_params.size()) {
+ slice_params_.push_back(vaapi_wrapper_->CreateVABuffer(
+ VASliceParameterBufferType, sizeof(VASliceParameterBufferAV1)));
+ if (!slice_params_.back()) {
+ slice_params_.clear();
+ return false;
+ }
+ }
+ slice_params_.resize(slice_params.size());
+ slice_params_.shrink_to_fit();
+ }
+ // TODO(hiroh): Don't submit the entire coded data to the buffer. Instead,
+ // only pass the data starting from the tile list OBU to reduce the size of
+ // the VA buffer.
+ // Always re-create |encoded_data| because reusing the buffer causes horrific
+ // artifacts in decoded buffers. TODO(b/177028692): This seems to be a driver
+ // bug, fix it and reuse the buffer.
+ auto encoded_data =
+ vaapi_wrapper_->CreateVABuffer(VASliceDataBufferType, data.size_bytes());
+ if (!encoded_data)
+ return false;
+
+ std::vector<std::pair<VABufferID, VaapiWrapper::VABufferDescriptor>> buffers =
+ {{picture_params_->id(),
+ {picture_params_->type(), picture_params_->size(), &pic_param}},
+ {encoded_data->id(),
+ {encoded_data->type(), encoded_data->size(), data.data()}}};
+ for (size_t i = 0; i < slice_params.size(); ++i) {
+ buffers.push_back({slice_params_[i]->id(),
+ {slice_params_[i]->type(), slice_params_[i]->size(),
+ &slice_params[i]}});
+ }
+
+ const auto* vaapi_pic = static_cast<const VaapiAV1Picture*>(&pic);
+ return vaapi_wrapper_->MapAndCopyAndExecute(
+ vaapi_pic->reconstruct_va_surface()->id(), buffers);
+}
+
+void AV1VaapiVideoDecoderDelegate::OnVAContextDestructionSoon() {
+ // Destroy the member ScopedVABuffers below since they refer to a VAContextID
+ // that will be destroyed soon.
+ picture_params_.reset();
+ slice_params_.clear();
+}
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h
new file mode 100644
index 00000000000..c306f0c69f4
--- /dev/null
+++ b/chromium/media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h
@@ -0,0 +1,44 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VAAPI_AV1_VAAPI_VIDEO_DECODER_DELEGATE_H_
+#define MEDIA_GPU_VAAPI_AV1_VAAPI_VIDEO_DECODER_DELEGATE_H_
+
+#include <memory>
+#include <vector>
+
+#include "media/gpu/av1_decoder.h"
+#include "media/gpu/vaapi/vaapi_video_decoder_delegate.h"
+
+namespace media {
+class ScopedVABuffer;
+
+class AV1VaapiVideoDecoderDelegate : public AV1Decoder::AV1Accelerator,
+ public VaapiVideoDecoderDelegate {
+ public:
+ AV1VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* const vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper);
+ ~AV1VaapiVideoDecoderDelegate() override;
+ AV1VaapiVideoDecoderDelegate(const AV1VaapiVideoDecoderDelegate&) = delete;
+ AV1VaapiVideoDecoderDelegate& operator=(const AV1VaapiVideoDecoderDelegate&) =
+ delete;
+
+ // AV1Decoder::AV1Accelerator implementation.
+ scoped_refptr<AV1Picture> CreateAV1Picture(bool apply_grain) override;
+ bool SubmitDecode(const AV1Picture& pic,
+ const libgav1::ObuSequenceHeader& seq_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
+ base::span<const uint8_t> data) override;
+ bool OutputPicture(const AV1Picture& pic) override;
+
+ // VaapiVideoDecoderDelegate implementation.
+ void OnVAContextDestructionSoon() override;
+
+ private:
+ std::unique_ptr<ScopedVABuffer> picture_params_;
+ std::vector<std::unique_ptr<ScopedVABuffer>> slice_params_;
+};
+} // namespace media
+#endif // MEDIA_GPU_VAAPI_AV1_VAAPI_VIDEO_DECODER_DELEGATE_H_
diff --git a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc
index e27f39b0756..fa99601e8da 100644
--- a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.cc
@@ -6,8 +6,10 @@
#include <va/va.h>
+#include "base/memory/aligned_memory.h"
#include "base/stl_util.h"
#include "base/trace_event/trace_event.h"
+#include "media/base/cdm_context.h"
#include "media/gpu/decode_surface_handler.h"
#include "media/gpu/h264_dpb.h"
#include "media/gpu/macros.h"
@@ -34,12 +36,26 @@ static constexpr uint8_t kZigzagScan8x8[64] = {
35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63};
+int GetSliceHeaderCounter() {
+ // Needs to be static in case there are multiple active at once, in which case
+ // they all need unique values.
+ static base::AtomicSequenceNumber parsed_slice_hdr_counter;
+ return parsed_slice_hdr_counter.GetNext();
+}
+
} // namespace
H264VaapiVideoDecoderDelegate::H264VaapiVideoDecoderDelegate(
DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper)
- : VaapiVideoDecoderDelegate(vaapi_dec, std::move(vaapi_wrapper)) {}
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb,
+ CdmContext* cdm_context,
+ EncryptionScheme encryption_scheme)
+ : VaapiVideoDecoderDelegate(vaapi_dec,
+ std::move(vaapi_wrapper),
+ std::move(on_protected_session_update_cb),
+ cdm_context,
+ encryption_scheme) {}
H264VaapiVideoDecoderDelegate::~H264VaapiVideoDecoderDelegate() = default;
@@ -49,7 +65,15 @@ scoped_refptr<H264Picture> H264VaapiVideoDecoderDelegate::CreateH264Picture() {
if (!va_surface)
return nullptr;
- return new VaapiH264Picture(std::move(va_surface));
+ scoped_refptr<H264Picture> pic = new VaapiH264Picture(std::move(va_surface));
+ if (!vaapi_dec_->IsScalingDecode())
+ return pic;
+
+ // Setup the scaling buffer.
+ scoped_refptr<VASurface> scaled_surface = vaapi_dec_->CreateDecodeSurface();
+ CHECK(scaled_surface);
+ pic->AsVaapiH264Picture()->SetDecodeSurface(std::move(scaled_surface));
+ return pic;
}
// Fill |va_pic| with default/neutral values.
@@ -72,6 +96,10 @@ DecodeStatus H264VaapiVideoDecoderDelegate::SubmitFrameMetadata(
"H264VaapiVideoDecoderDelegate::SubmitFrameMetadata");
VAPictureParameterBufferH264 pic_param;
memset(&pic_param, 0, sizeof(pic_param));
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ memset(&crypto_params_, 0, sizeof(crypto_params_));
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ full_sample_ = false;
#define FROM_SPS_TO_PP(a) pic_param.a = sps->a
#define FROM_SPS_TO_PP2(a, b) pic_param.b = sps->a
@@ -174,6 +202,173 @@ DecodeStatus H264VaapiVideoDecoderDelegate::SubmitFrameMetadata(
return success ? DecodeStatus::kOk : DecodeStatus::kFail;
}
+DecodeStatus H264VaapiVideoDecoderDelegate::ParseEncryptedSliceHeader(
+ const std::vector<base::span<const uint8_t>>& data,
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::vector<uint8_t>& sps_nalu_data,
+ const std::vector<uint8_t>& pps_nalu_data,
+ H264SliceHeader* slice_header_out) {
+ DCHECK(slice_header_out);
+ DCHECK(!subsamples.empty());
+ DCHECK(!data.empty());
+
+ // This is done by sending in the encryption parameters and the encrypted
+ // slice header. Then the vaEndPicture call is blocking while it decrypts and
+ // parses the header parameters. We use VACencStatusBuf which allows us to
+ // extract the slice header parameters of interest and return them to the
+ // caller.
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ VAEncryptionParameters crypto_params = {};
+ // Don't use the VAEncryptionSegmentInfo vector in the class since we do not
+ // need to hold this data across calls.
+ std::vector<VAEncryptionSegmentInfo> segment_info;
+ ProtectedSessionState state =
+ SetupDecryptDecode(true /* full sample */, data[0].size(), &crypto_params,
+ &segment_info, subsamples);
+ if (state == ProtectedSessionState::kFailed) {
+ LOG(ERROR) << "ParseEncryptedSliceHeader fails because we couldn't setup "
+ "the protected session";
+ return DecodeStatus::kFail;
+ } else if (state != ProtectedSessionState::kCreated) {
+ return DecodeStatus::kTryAgain;
+ }
+
+ // For encrypted header parsing, we need to also send the SPS and PPS. Both of
+ // those and the slice NALU need to be prefixed with the 0x000001 start code.
+ constexpr size_t kStartCodeSize = 3;
+ constexpr size_t kExtraDataBytes = 3 * kStartCodeSize;
+
+ // Adjust the first segment length and init length to compensate for inserting
+ // the SPS, PPS and 3 start codes.
+ size_t size_adjustment =
+ sps_nalu_data.size() + pps_nalu_data.size() + kExtraDataBytes;
+ size_t total_size = 0;
+ size_t offset_adjustment = 0;
+ for (auto& segment : segment_info) {
+ segment.segment_length += size_adjustment;
+ segment.init_byte_length += size_adjustment;
+ segment.segment_start_offset += offset_adjustment;
+ offset_adjustment += size_adjustment;
+ // Any additional segments are only adjusted by the start code size;
+ size_adjustment = kStartCodeSize;
+ total_size += segment.segment_length;
+ }
+
+ crypto_params.status_report_index = GetSliceHeaderCounter();
+
+ // This is based on a sample from Intel for how to use this API.
+ constexpr size_t kDecryptQuerySizeAndAlignment = 4096;
+ std::unique_ptr<void, base::AlignedFreeDeleter> surface_memory(
+ base::AlignedAlloc(kDecryptQuerySizeAndAlignment,
+ kDecryptQuerySizeAndAlignment));
+ constexpr size_t kVaQueryCencBufferSize = 2048;
+ auto back_buffer_mem = std::make_unique<uint8_t[]>(kVaQueryCencBufferSize);
+ VACencStatusBuf* status_buf =
+ reinterpret_cast<VACencStatusBuf*>(surface_memory.get());
+ status_buf->status = VA_ENCRYPTION_STATUS_INCOMPLETE;
+ status_buf->buf = back_buffer_mem.get();
+ status_buf->buf_size = kVaQueryCencBufferSize;
+
+ auto slice_param_buf = std::make_unique<VACencSliceParameterBufferH264>();
+ status_buf->slice_buf_type = VaCencSliceBufParamter;
+ status_buf->slice_buf_size = sizeof(VACencSliceParameterBufferH264);
+ status_buf->slice_buf = slice_param_buf.get();
+
+ constexpr int kCencStatusSurfaceDimension = 64;
+ auto buffer_ptr_alloc = std::make_unique<uintptr_t>();
+ uintptr_t* buffer_ptr = reinterpret_cast<uintptr_t*>(buffer_ptr_alloc.get());
+ buffer_ptr[0] = reinterpret_cast<uintptr_t>(surface_memory.get());
+
+ auto surface = vaapi_wrapper_->CreateVASurfaceForUserPtr(
+ gfx::Size(kCencStatusSurfaceDimension, kCencStatusSurfaceDimension),
+ buffer_ptr,
+ 3 * kCencStatusSurfaceDimension * kCencStatusSurfaceDimension);
+ if (!surface) {
+ DVLOG(1) << "Failed allocating surface for decrypt status";
+ return DecodeStatus::kFail;
+ }
+
+ // Assembles the 'slice data' which is the SPS, PPS, encrypted SEIS and
+ // encrypted slice data, each of which is also prefixed by the 0x000001 start
+ // code.
+ std::vector<uint8_t> full_data;
+ const std::vector<uint8_t> start_code = {0u, 0u, 1u};
+ full_data.reserve(total_size);
+ full_data.insert(full_data.end(), start_code.begin(), start_code.end());
+ full_data.insert(full_data.end(), sps_nalu_data.begin(), sps_nalu_data.end());
+ full_data.insert(full_data.end(), start_code.begin(), start_code.end());
+ full_data.insert(full_data.end(), pps_nalu_data.begin(), pps_nalu_data.end());
+ for (auto& nalu : data) {
+ full_data.insert(full_data.end(), start_code.begin(), start_code.end());
+ full_data.insert(full_data.end(), nalu.begin(), nalu.end());
+ }
+ if (!vaapi_wrapper_->SubmitBuffers({{VAEncryptionParameterBufferType,
+ sizeof(crypto_params), &crypto_params},
+ {VAProtectedSliceDataBufferType,
+ full_data.size(), full_data.data()}})) {
+ DVLOG(1) << "Failure submitting encrypted slice header buffers";
+ return DecodeStatus::kFail;
+ }
+ if (!vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(surface->id())) {
+ LOG(ERROR) << "Failed executing for slice header decrypt";
+ return DecodeStatus::kFail;
+ }
+ if (status_buf->status != VA_ENCRYPTION_STATUS_SUCCESSFUL) {
+ LOG(ERROR) << "Failure status in encrypted header parsing: "
+ << static_cast<int>(status_buf->status);
+ return DecodeStatus::kFail;
+ }
+
+ // Read the parsed slice header data back and populate the structure with it.
+ slice_header_out->idr_pic_flag = !!slice_param_buf->idr_pic_flag;
+ slice_header_out->nal_ref_idc = slice_param_buf->nal_ref_idc;
+ // The last span in |data| will be the slice header NALU.
+ slice_header_out->nalu_data = data.back().data();
+ slice_header_out->nalu_size = data.back().size();
+ slice_header_out->slice_type = slice_param_buf->slice_type;
+ slice_header_out->frame_num = slice_param_buf->frame_number;
+ slice_header_out->idr_pic_id = slice_param_buf->idr_pic_id;
+ slice_header_out->pic_order_cnt_lsb = slice_param_buf->pic_order_cnt_lsb;
+ slice_header_out->delta_pic_order_cnt_bottom =
+ slice_param_buf->delta_pic_order_cnt_bottom;
+ slice_header_out->delta_pic_order_cnt0 =
+ slice_param_buf->delta_pic_order_cnt[0];
+ slice_header_out->delta_pic_order_cnt1 =
+ slice_param_buf->delta_pic_order_cnt[1];
+ slice_header_out->no_output_of_prior_pics_flag =
+ slice_param_buf->ref_pic_fields.bits.no_output_of_prior_pics_flag;
+ slice_header_out->long_term_reference_flag =
+ slice_param_buf->ref_pic_fields.bits.long_term_reference_flag;
+ slice_header_out->adaptive_ref_pic_marking_mode_flag =
+ slice_param_buf->ref_pic_fields.bits.adaptive_ref_pic_marking_mode_flag;
+ const size_t num_dec_ref_pics =
+ slice_param_buf->ref_pic_fields.bits.dec_ref_pic_marking_count;
+ if (num_dec_ref_pics > H264SliceHeader::kRefListSize) {
+ DVLOG(1) << "Invalid number of dec_ref_pics: " << num_dec_ref_pics;
+ return DecodeStatus::kFail;
+ }
+ for (size_t i = 0; i < num_dec_ref_pics; ++i) {
+ slice_header_out->ref_pic_marking[i].memory_mgmnt_control_operation =
+ slice_param_buf->memory_management_control_operation[i];
+ slice_header_out->ref_pic_marking[i].difference_of_pic_nums_minus1 =
+ slice_param_buf->difference_of_pic_nums_minus1[i];
+ slice_header_out->ref_pic_marking[i].long_term_pic_num =
+ slice_param_buf->long_term_pic_num[i];
+ slice_header_out->ref_pic_marking[i].long_term_frame_idx =
+ slice_param_buf->long_term_frame_idx[i];
+ slice_header_out->ref_pic_marking[i].max_long_term_frame_idx_plus1 =
+ slice_param_buf->max_long_term_frame_idx_plus1[i];
+ }
+ slice_header_out->full_sample_encryption = true;
+ slice_header_out->full_sample_index =
+ status_buf->status_report_index_feedback;
+ return DecodeStatus::kOk;
+#else // BUILDFLAG(IS_CHROMEOS_ASH)
+ return DecodeStatus::kFail;
+#endif
+}
+
DecodeStatus H264VaapiVideoDecoderDelegate::SubmitSlice(
const H264PPS* pps,
const H264SliceHeader* slice_hdr,
@@ -185,6 +380,34 @@ DecodeStatus H264VaapiVideoDecoderDelegate::SubmitSlice(
const std::vector<SubsampleEntry>& subsamples) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("media,gpu", "H264VaapiVideoDecoderDelegate::SubmitSlice");
+ if (slice_hdr->full_sample_encryption) {
+ // We do not need to submit all the slice data, instead we just submit the
+ // index for what was already sent for parsing. The HW decoder already has
+ // the full slice data from when we decrypted the header.
+ full_sample_ = true;
+ VACencStatusParameters cenc_status = {};
+ cenc_status.status_report_index_feedback = slice_hdr->full_sample_index;
+ return vaapi_wrapper_->SubmitBuffer(VACencStatusParameterBufferType,
+ sizeof(VACencStatusParameters),
+ &cenc_status)
+ ? DecodeStatus::kOk
+ : DecodeStatus::kFail;
+ }
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (IsEncryptedSession()) {
+ const ProtectedSessionState state = SetupDecryptDecode(
+ /*full_sample=*/false, size, &crypto_params_, &encryption_segment_info_,
+ subsamples);
+ if (state == ProtectedSessionState::kFailed) {
+ LOG(ERROR) << "SubmitSlice fails because we couldn't setup the protected "
+ "session";
+ return DecodeStatus::kFail;
+ } else if (state != ProtectedSessionState::kCreated) {
+ return DecodeStatus::kTryAgain;
+ }
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
VASliceParameterBufferH264 slice_param;
memset(&slice_param, 0, sizeof(slice_param));
@@ -273,10 +496,11 @@ DecodeStatus H264VaapiVideoDecoderDelegate::SubmitSlice(
FillVAPicture(&slice_param.RefPicList1[i], ref_pic_list1[i]);
}
- const bool success = vaapi_wrapper_->SubmitBuffers(
- {{VASliceParameterBufferType, sizeof(slice_param), &slice_param},
- {VASliceDataBufferType, size, data}});
- return success ? DecodeStatus::kOk : DecodeStatus::kFail;
+ return vaapi_wrapper_->SubmitBuffers(
+ {{VASliceParameterBufferType, sizeof(slice_param), &slice_param},
+ {VASliceDataBufferType, size, data}})
+ ? DecodeStatus::kOk
+ : DecodeStatus::kFail;
}
DecodeStatus H264VaapiVideoDecoderDelegate::SubmitDecode(
@@ -284,8 +508,36 @@ DecodeStatus H264VaapiVideoDecoderDelegate::SubmitDecode(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
TRACE_EVENT0("media,gpu", "H264VaapiVideoDecoderDelegate::SubmitDecode");
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (IsEncryptedSession() && !full_sample_ &&
+ !vaapi_wrapper_->SubmitBuffer(VAEncryptionParameterBufferType,
+ sizeof(crypto_params_), &crypto_params_)) {
+ return DecodeStatus::kFail;
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ const VaapiH264Picture* vaapi_pic = pic->AsVaapiH264Picture();
+ CHECK(gfx::Rect(vaapi_pic->GetDecodeSize()).Contains(pic->visible_rect()));
+ VAProcPipelineParameterBuffer proc_buffer;
+ if (FillDecodeScalingIfNeeded(pic->visible_rect(),
+ vaapi_pic->GetVADecodeSurfaceID(),
+ vaapi_pic->va_surface(), &proc_buffer)) {
+ if (!vaapi_wrapper_->SubmitBuffer(VAProcPipelineParameterBufferType,
+ sizeof(proc_buffer), &proc_buffer)) {
+ DLOG(ERROR) << "Failed submitting proc buffer";
+ return DecodeStatus::kFail;
+ }
+ }
+
const bool success = vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
- pic->AsVaapiH264Picture()->va_surface()->id());
+ vaapi_pic->GetVADecodeSurfaceID());
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ encryption_segment_info_.clear();
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ if (!success && NeedsProtectedSessionRecovery())
+ return DecodeStatus::kTryAgain;
+
+ if (success && IsEncryptedSession())
+ ProtectedDecodedSucceeded();
return success ? DecodeStatus::kOk : DecodeStatus::kFail;
}
@@ -294,17 +546,32 @@ bool H264VaapiVideoDecoderDelegate::OutputPicture(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
const VaapiH264Picture* vaapi_pic = pic->AsVaapiH264Picture();
- vaapi_dec_->SurfaceReady(vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
- vaapi_pic->visible_rect(),
- vaapi_pic->get_colorspace());
+ vaapi_dec_->SurfaceReady(
+ vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
+ vaapi_dec_->GetOutputVisibleRect(vaapi_pic->visible_rect(),
+ vaapi_pic->va_surface()->size()),
+ vaapi_pic->get_colorspace());
return true;
}
void H264VaapiVideoDecoderDelegate::Reset() {
DETACH_FROM_SEQUENCE(sequence_checker_);
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ encryption_segment_info_.clear();
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
vaapi_wrapper_->DestroyPendingBuffers();
}
+DecodeStatus H264VaapiVideoDecoderDelegate::SetStream(
+ base::span<const uint8_t> /*stream*/,
+ const DecryptConfig* decrypt_config) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (!decrypt_config)
+ return Status::kOk;
+ return SetDecryptConfig(decrypt_config->Clone()) ? Status::kOk
+ : Status::kFail;
+}
+
void H264VaapiVideoDecoderDelegate::FillVAPicture(
VAPictureH264* va_pic,
scoped_refptr<H264Picture> pic) {
@@ -312,7 +579,7 @@ void H264VaapiVideoDecoderDelegate::FillVAPicture(
VASurfaceID va_surface_id = VA_INVALID_SURFACE;
if (!pic->nonexisting)
- va_surface_id = pic->AsVaapiH264Picture()->va_surface()->id();
+ va_surface_id = pic->AsVaapiH264Picture()->GetVADecodeSurfaceID();
va_pic->picture_id = va_surface_id;
va_pic->frame_idx = pic->frame_num;
diff --git a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h
index d196678b314..29c8ed1102e 100644
--- a/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h
@@ -5,8 +5,10 @@
#ifndef MEDIA_GPU_VAAPI_H264_VAAPI_VIDEO_DECODER_DELEGATE_H_
#define MEDIA_GPU_VAAPI_H264_VAAPI_VIDEO_DECODER_DELEGATE_H_
+#include "base/atomic_sequence_num.h"
#include "base/memory/scoped_refptr.h"
#include "base/sequence_checker.h"
+#include "build/chromeos_buildflags.h"
#include "media/gpu/h264_decoder.h"
#include "media/gpu/vaapi/vaapi_video_decoder_delegate.h"
#include "media/video/h264_parser.h"
@@ -16,13 +18,19 @@ typedef struct _VAPictureH264 VAPictureH264;
namespace media {
+class CdmContext;
class H264Picture;
class H264VaapiVideoDecoderDelegate : public H264Decoder::H264Accelerator,
public VaapiVideoDecoderDelegate {
public:
- H264VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper);
+ H264VaapiVideoDecoderDelegate(
+ DecodeSurfaceHandler<VASurface>* vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb =
+ base::DoNothing(),
+ CdmContext* cdm_context = nullptr,
+ EncryptionScheme encryption_scheme = EncryptionScheme::kUnencrypted);
~H264VaapiVideoDecoderDelegate() override;
// H264Decoder::H264Accelerator implementation.
@@ -34,6 +42,12 @@ class H264VaapiVideoDecoderDelegate : public H264Decoder::H264Accelerator,
const H264Picture::Vector& ref_pic_listb0,
const H264Picture::Vector& ref_pic_listb1,
scoped_refptr<H264Picture> pic) override;
+ Status ParseEncryptedSliceHeader(
+ const std::vector<base::span<const uint8_t>>& data,
+ const std::vector<SubsampleEntry>& subsamples,
+ const std::vector<uint8_t>& sps_nalu_data,
+ const std::vector<uint8_t>& pps_nalu_data,
+ H264SliceHeader* slice_header_out) override;
Status SubmitSlice(const H264PPS* pps,
const H264SliceHeader* slice_hdr,
const H264Picture::Vector& ref_pic_list0,
@@ -45,6 +59,8 @@ class H264VaapiVideoDecoderDelegate : public H264Decoder::H264Accelerator,
Status SubmitDecode(scoped_refptr<H264Picture> pic) override;
bool OutputPicture(scoped_refptr<H264Picture> pic) override;
void Reset() override;
+ Status SetStream(base::span<const uint8_t> stream,
+ const DecryptConfig* decrypt_config) override;
private:
void FillVAPicture(VAPictureH264* va_pic, scoped_refptr<H264Picture> pic);
@@ -52,6 +68,20 @@ class H264VaapiVideoDecoderDelegate : public H264Decoder::H264Accelerator,
VAPictureH264* va_pics,
int num_pics);
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // We need to hold onto this memory here because it's referenced by the
+ // mapped buffer in libva across calls. It is filled in SubmitSlice() and
+ // stays alive until SubmitDecode() or Reset().
+ std::vector<VAEncryptionSegmentInfo> encryption_segment_info_;
+
+ // We need to retain this for the multi-slice case since that will aggregate
+ // the encryption details across all the slices.
+ VAEncryptionParameters crypto_params_;
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ // We need to set this so we don't resubmit crypto params on decode.
+ bool full_sample_;
+
DISALLOW_COPY_AND_ASSIGN(H264VaapiVideoDecoderDelegate);
};
diff --git a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
index fca887e04ba..eef9044281f 100644
--- a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
@@ -6,13 +6,12 @@
#include "base/stl_util.h"
#include "build/chromeos_buildflags.h"
+#include "media/base/cdm_context.h"
#include "media/gpu/decode_surface_handler.h"
#include "media/gpu/macros.h"
#include "media/gpu/vaapi/vaapi_common.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
-#include "base/strings/string_number_conversions.h"
-
namespace media {
namespace {
@@ -38,8 +37,15 @@ using DecodeStatus = H265Decoder::H265Accelerator::Status;
H265VaapiVideoDecoderDelegate::H265VaapiVideoDecoderDelegate(
DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper)
- : VaapiVideoDecoderDelegate(vaapi_dec, std::move(vaapi_wrapper)) {
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb,
+ CdmContext* cdm_context,
+ EncryptionScheme encryption_scheme)
+ : VaapiVideoDecoderDelegate(vaapi_dec,
+ std::move(vaapi_wrapper),
+ std::move(on_protected_session_update_cb),
+ cdm_context,
+ encryption_scheme) {
ref_pic_list_pocs_.reserve(kMaxRefIdxActive);
}
@@ -51,7 +57,15 @@ scoped_refptr<H265Picture> H265VaapiVideoDecoderDelegate::CreateH265Picture() {
if (!va_surface)
return nullptr;
- return new VaapiH265Picture(std::move(va_surface));
+ scoped_refptr<H265Picture> pic = new VaapiH265Picture(std::move(va_surface));
+ if (!vaapi_dec_->IsScalingDecode())
+ return pic;
+
+ // Setup the scaling buffer.
+ scoped_refptr<VASurface> scaled_surface = vaapi_dec_->CreateDecodeSurface();
+ CHECK(scaled_surface);
+ pic->AsVaapiH265Picture()->SetDecodeSurface(std::move(scaled_surface));
+ return pic;
}
DecodeStatus H265VaapiVideoDecoderDelegate::SubmitFrameMetadata(
@@ -65,6 +79,9 @@ DecodeStatus H265VaapiVideoDecoderDelegate::SubmitFrameMetadata(
VAPictureParameterBufferHEVC pic_param;
memset(&pic_param, 0, sizeof(pic_param));
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ memset(&crypto_params_, 0, sizeof(crypto_params_));
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
int highest_tid = sps->sps_max_sub_layers_minus1;
#define FROM_SPS_TO_PP(a) pic_param.a = sps->a
@@ -289,6 +306,20 @@ DecodeStatus H265VaapiVideoDecoderDelegate::SubmitSlice(
return DecodeStatus::kFail;
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (IsEncryptedSession()) {
+ const ProtectedSessionState state =
+ SetupDecryptDecode(/*full_sample=*/false, size, &crypto_params_,
+ &encryption_segment_info_, subsamples);
+ if (state == ProtectedSessionState::kFailed) {
+ LOG(ERROR) << "SubmitSlice fails because we couldn't setup the protected "
+ "session";
+ return DecodeStatus::kFail;
+ } else if (state != ProtectedSessionState::kCreated) {
+ return DecodeStatus::kTryAgain;
+ }
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
memset(&slice_param_, 0, sizeof(slice_param_));
slice_param_.slice_data_size = slice_hdr->nalu_size;
@@ -411,7 +442,7 @@ DecodeStatus H265VaapiVideoDecoderDelegate::SubmitSlice(
#if BUILDFLAG(IS_CHROMEOS_ASH)
slice_param_.slice_data_num_emu_prevn_bytes =
slice_hdr->header_emulation_prevention_bytes;
-#endif
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
last_slice_data_ = data;
last_slice_size_ = size;
@@ -427,11 +458,40 @@ DecodeStatus H265VaapiVideoDecoderDelegate::SubmitDecode(
return DecodeStatus::kFail;
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (IsEncryptedSession() &&
+ !vaapi_wrapper_->SubmitBuffer(VAEncryptionParameterBufferType,
+ sizeof(crypto_params_), &crypto_params_)) {
+ return DecodeStatus::kFail;
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ const VaapiH265Picture* vaapi_pic = pic->AsVaapiH265Picture();
+ CHECK(gfx::Rect(vaapi_pic->GetDecodeSize()).Contains(pic->visible_rect()));
+ VAProcPipelineParameterBuffer proc_buffer;
+ if (FillDecodeScalingIfNeeded(pic->visible_rect(),
+ vaapi_pic->GetVADecodeSurfaceID(),
+ vaapi_pic->va_surface(), &proc_buffer)) {
+ if (!vaapi_wrapper_->SubmitBuffer(VAProcPipelineParameterBufferType,
+ sizeof(proc_buffer), &proc_buffer)) {
+ DLOG(ERROR) << "Failed submitting proc buffer";
+ return DecodeStatus::kFail;
+ }
+ }
+
+ const bool success = vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
+ vaapi_pic->GetVADecodeSurfaceID());
ref_pic_list_pocs_.clear();
- return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
- pic->AsVaapiH265Picture()->va_surface()->id())
- ? DecodeStatus::kOk
- : DecodeStatus::kFail;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ encryption_segment_info_.clear();
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ if (!success && NeedsProtectedSessionRecovery())
+ return DecodeStatus::kTryAgain;
+
+ if (success && IsEncryptedSession())
+ ProtectedDecodedSucceeded();
+
+ return success ? DecodeStatus::kOk : DecodeStatus::kFail;
}
bool H265VaapiVideoDecoderDelegate::OutputPicture(
@@ -439,9 +499,11 @@ bool H265VaapiVideoDecoderDelegate::OutputPicture(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
const VaapiH265Picture* vaapi_pic = pic->AsVaapiH265Picture();
- vaapi_dec_->SurfaceReady(vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
- vaapi_pic->visible_rect(),
- vaapi_pic->get_colorspace());
+ vaapi_dec_->SurfaceReady(
+ vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
+ vaapi_dec_->GetOutputVisibleRect(vaapi_pic->visible_rect(),
+ vaapi_pic->va_surface()->size()),
+ vaapi_pic->get_colorspace());
return true;
}
@@ -449,14 +511,27 @@ void H265VaapiVideoDecoderDelegate::Reset() {
DETACH_FROM_SEQUENCE(sequence_checker_);
vaapi_wrapper_->DestroyPendingBuffers();
ref_pic_list_pocs_.clear();
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ encryption_segment_info_.clear();
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
last_slice_data_ = nullptr;
}
+DecodeStatus H265VaapiVideoDecoderDelegate::SetStream(
+ base::span<const uint8_t> /*stream*/,
+ const DecryptConfig* decrypt_config) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (!decrypt_config)
+ return Status::kOk;
+ return SetDecryptConfig(decrypt_config->Clone()) ? Status::kOk
+ : Status::kFail;
+}
+
void H265VaapiVideoDecoderDelegate::FillVAPicture(
VAPictureHEVC* va_pic,
scoped_refptr<H265Picture> pic) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- va_pic->picture_id = pic->AsVaapiH265Picture()->va_surface()->id();
+ va_pic->picture_id = pic->AsVaapiH265Picture()->GetVADecodeSurfaceID();
va_pic->pic_order_cnt = pic->pic_order_cnt_val_;
va_pic->flags = 0;
diff --git a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
index 6fd06e5e72e..f02871a1b22 100644
--- a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
@@ -8,6 +8,7 @@
#include <va/va.h>
#include "base/memory/scoped_refptr.h"
+#include "build/chromeos_buildflags.h"
#include "media/gpu/h265_decoder.h"
#include "media/gpu/h265_dpb.h"
#include "media/gpu/vaapi/vaapi_video_decoder_delegate.h"
@@ -18,13 +19,18 @@ typedef struct _VAPictureHEVC VAPictureHEVC;
namespace media {
+class CdmContext;
class H265Picture;
class H265VaapiVideoDecoderDelegate : public H265Decoder::H265Accelerator,
public VaapiVideoDecoderDelegate {
public:
- H265VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper);
+ H265VaapiVideoDecoderDelegate(
+ DecodeSurfaceHandler<VASurface>* vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb,
+ CdmContext* cdm_context,
+ EncryptionScheme encryption_scheme);
H265VaapiVideoDecoderDelegate(const H265VaapiVideoDecoderDelegate&) = delete;
H265VaapiVideoDecoderDelegate& operator=(
@@ -51,6 +57,8 @@ class H265VaapiVideoDecoderDelegate : public H265Decoder::H265Accelerator,
Status SubmitDecode(scoped_refptr<H265Picture> pic) override;
bool OutputPicture(scoped_refptr<H265Picture> pic) override;
void Reset() override;
+ Status SetStream(base::span<const uint8_t> stream,
+ const DecryptConfig* decrypt_config) override;
private:
void FillVAPicture(VAPictureHEVC* va_pic, scoped_refptr<H265Picture> pic);
@@ -80,6 +88,17 @@ class H265VaapiVideoDecoderDelegate : public H265Decoder::H265Accelerator,
// |slice_param_| filled.
const uint8_t* last_slice_data_{nullptr};
size_t last_slice_size_{0};
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // We need to hold onto this memory here because it's referenced by the
+ // mapped buffer in libva across calls. It is filled in SubmitSlice() and
+ // stays alive until SubmitDecode() or Reset().
+ std::vector<VAEncryptionSegmentInfo> encryption_segment_info_;
+
+ // We need to retain this for the multi-slice case since that will aggregate
+ // the encryption details across all the slices.
+ VAEncryptionParameters crypto_params_;
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/va_prot.sigs b/chromium/media/gpu/vaapi/va_prot.sigs
new file mode 100644
index 00000000000..dd13dafef1a
--- /dev/null
+++ b/chromium/media/gpu/vaapi/va_prot.sigs
@@ -0,0 +1,12 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+//------------------------------------------------
+// Functions from libva protected content interface used in chromium code.
+//------------------------------------------------
+VAStatus vaCreateProtectedSession(VADisplay dpy, VAConfigID config_id, VAProtectedSessionID *protected_session);
+VAStatus vaDestroyProtectedSession(VADisplay dpy, VAProtectedSessionID protected_session);
+VAStatus vaAttachProtectedSession(VADisplay dpy, VAContextID reserved, VAProtectedSessionID protected_session);
+VAStatus vaDetachProtectedSession(VADisplay dpy, VAContextID reserved);
+VAStatus vaProtectedSessionExecute(VADisplay dpy, VAProtectedSessionID protected_session, VABufferID buf_id); \ No newline at end of file
diff --git a/chromium/media/gpu/vaapi/vaapi_common.cc b/chromium/media/gpu/vaapi/vaapi_common.cc
index 415f84acf89..763c3919ff3 100644
--- a/chromium/media/gpu/vaapi/vaapi_common.cc
+++ b/chromium/media/gpu/vaapi/vaapi_common.cc
@@ -4,6 +4,8 @@
#include "media/gpu/vaapi/vaapi_common.h"
+#include "build/chromeos_buildflags.h"
+
namespace media {
VaapiH264Picture::VaapiH264Picture(scoped_refptr<VASurface> va_surface)
@@ -15,6 +17,11 @@ VaapiH264Picture* VaapiH264Picture::AsVaapiH264Picture() {
return this;
}
+void VaapiH264Picture::SetDecodeSurface(
+ scoped_refptr<VASurface> decode_va_surface) {
+ decode_va_surface_ = std::move(decode_va_surface);
+}
+
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
VaapiH265Picture::VaapiH265Picture(scoped_refptr<VASurface> va_surface)
: va_surface_(va_surface) {}
@@ -24,6 +31,12 @@ VaapiH265Picture::~VaapiH265Picture() = default;
VaapiH265Picture* VaapiH265Picture::AsVaapiH265Picture() {
return this;
}
+
+void VaapiH265Picture::SetDecodeSurface(
+ scoped_refptr<VASurface> decode_va_surface) {
+ decode_va_surface_ = std::move(decode_va_surface);
+}
+
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
VaapiVP8Picture::VaapiVP8Picture(scoped_refptr<VASurface> va_surface)
@@ -44,8 +57,26 @@ VaapiVP9Picture* VaapiVP9Picture::AsVaapiVP9Picture() {
return this;
}
+void VaapiVP9Picture::SetDecodeSurface(
+ scoped_refptr<VASurface> decode_va_surface) {
+ decode_va_surface_ = std::move(decode_va_surface);
+}
+
scoped_refptr<VP9Picture> VaapiVP9Picture::CreateDuplicate() {
return new VaapiVP9Picture(va_surface_);
}
+VaapiAV1Picture::VaapiAV1Picture(
+ scoped_refptr<VASurface> display_va_surface,
+ scoped_refptr<VASurface> reconstruct_va_surface)
+ : display_va_surface_(std::move(display_va_surface)),
+ reconstruct_va_surface_(std::move(reconstruct_va_surface)) {}
+
+VaapiAV1Picture::~VaapiAV1Picture() = default;
+
+scoped_refptr<AV1Picture> VaapiAV1Picture::CreateDuplicate() {
+ return base::MakeRefCounted<VaapiAV1Picture>(display_va_surface_,
+ reconstruct_va_surface_);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_common.h b/chromium/media/gpu/vaapi/vaapi_common.h
index 1c71ce6c813..1a530cb48b3 100644
--- a/chromium/media/gpu/vaapi/vaapi_common.h
+++ b/chromium/media/gpu/vaapi/vaapi_common.h
@@ -4,6 +4,8 @@
#ifndef MEDIA_GPU_VAAPI_VAAPI_COMMON_H_
#define MEDIA_GPU_VAAPI_VAAPI_COMMON_H_
+#include "build/chromeos_buildflags.h"
+#include "media/gpu/av1_picture.h"
#include "media/gpu/h264_dpb.h"
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vp8_picture.h"
@@ -28,12 +30,21 @@ class VaapiH264Picture : public H264Picture {
scoped_refptr<VASurface> va_surface() const { return va_surface_; }
VASurfaceID GetVASurfaceID() const { return va_surface_->id(); }
+ void SetDecodeSurface(scoped_refptr<VASurface> decode_va_surface);
+ VASurfaceID GetVADecodeSurfaceID() const {
+ return decode_va_surface_ ? decode_va_surface_->id() : GetVASurfaceID();
+ }
+ const gfx::Size& GetDecodeSize() const {
+ return decode_va_surface_ ? decode_va_surface_->size()
+ : va_surface_->size();
+ }
protected:
~VaapiH264Picture() override;
private:
scoped_refptr<VASurface> va_surface_;
+ scoped_refptr<VASurface> decode_va_surface_;
DISALLOW_COPY_AND_ASSIGN(VaapiH264Picture);
};
@@ -50,12 +61,21 @@ class VaapiH265Picture : public H265Picture {
scoped_refptr<VASurface> va_surface() const { return va_surface_; }
VASurfaceID GetVASurfaceID() const { return va_surface_->id(); }
+ void SetDecodeSurface(scoped_refptr<VASurface> decode_va_surface);
+ VASurfaceID GetVADecodeSurfaceID() const {
+ return decode_va_surface_ ? decode_va_surface_->id() : GetVASurfaceID();
+ }
+ const gfx::Size& GetDecodeSize() const {
+ return decode_va_surface_ ? decode_va_surface_->size()
+ : va_surface_->size();
+ }
protected:
~VaapiH265Picture() override;
private:
scoped_refptr<VASurface> va_surface_;
+ scoped_refptr<VASurface> decode_va_surface_;
};
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
@@ -85,6 +105,14 @@ class VaapiVP9Picture : public VP9Picture {
scoped_refptr<VASurface> va_surface() const { return va_surface_; }
VASurfaceID GetVASurfaceID() const { return va_surface_->id(); }
+ void SetDecodeSurface(scoped_refptr<VASurface> decode_va_surface);
+ VASurfaceID GetVADecodeSurfaceID() const {
+ return decode_va_surface_ ? decode_va_surface_->id() : GetVASurfaceID();
+ }
+ const gfx::Size& GetDecodeSize() const {
+ return decode_va_surface_ ? decode_va_surface_->size()
+ : va_surface_->size();
+ }
protected:
~VaapiVP9Picture() override;
@@ -93,10 +121,45 @@ class VaapiVP9Picture : public VP9Picture {
scoped_refptr<VP9Picture> CreateDuplicate() override;
scoped_refptr<VASurface> va_surface_;
+ scoped_refptr<VASurface> decode_va_surface_;
DISALLOW_COPY_AND_ASSIGN(VaapiVP9Picture);
};
+class VaapiAV1Picture : public AV1Picture {
+ public:
+ VaapiAV1Picture(scoped_refptr<VASurface> display_va_surface,
+ scoped_refptr<VASurface> reconstruct_va_surface);
+ VaapiAV1Picture(const VaapiAV1Picture&) = delete;
+ VaapiAV1Picture& operator=(const VaapiAV1Picture&) = delete;
+
+ const scoped_refptr<VASurface>& display_va_surface() const {
+ return display_va_surface_;
+ }
+ const scoped_refptr<VASurface>& reconstruct_va_surface() const {
+ return reconstruct_va_surface_;
+ }
+
+ protected:
+ ~VaapiAV1Picture() override;
+
+ private:
+ scoped_refptr<AV1Picture> CreateDuplicate() override;
+
+ // |display_va_surface_| refers to the final decoded frame, both when using
+ // film grain synthesis and when not using film grain.
+ // |reconstruct_va_surface_| is only useful when using film grain synthesis:
+ // it's the decoded frame prior to applying the film grain.
+ // When not using film grain synthesis, |reconstruct_va_surface_| is equal to
+ // |display_va_surface_|. This is necessary to simplify the reference frame
+ // code when filling the VA-API structures and to be able to always use
+ // reconstruct_va_surface() when calling ExecuteAndDestroyPendingBuffers()
+ // (the driver expects the reconstructed surface as the target in the case
+ // of film grain synthesis).
+ scoped_refptr<VASurface> display_va_surface_;
+ scoped_refptr<VASurface> reconstruct_va_surface_;
+};
+
} // namespace media
#endif // MEDIA_GPU_VAAPI_VAAPI_COMMON_H_
diff --git a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
index 6674b1e282f..fa533e05721 100644
--- a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
@@ -162,6 +162,7 @@ VaapiDmaBufVideoFrameMapper::VaapiDmaBufVideoFrameMapper(
: VideoFrameMapper(format),
vaapi_wrapper_(VaapiWrapper::Create(VaapiWrapper::kVideoProcess,
VAProfileNone,
+ EncryptionScheme::kUnencrypted,
base::DoNothing())) {}
VaapiDmaBufVideoFrameMapper::~VaapiDmaBufVideoFrameMapper() {}
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decoder.cc b/chromium/media/gpu/vaapi/vaapi_image_decoder.cc
index 3c13981d14b..1c92c7b6fa8 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_image_decoder.cc
@@ -29,7 +29,8 @@ VaapiImageDecoder::~VaapiImageDecoder() = default;
bool VaapiImageDecoder::Initialize(const ReportErrorToUMACB& error_uma_cb) {
vaapi_wrapper_ =
- VaapiWrapper::Create(VaapiWrapper::kDecode, va_profile_, error_uma_cb);
+ VaapiWrapper::Create(VaapiWrapper::kDecode, va_profile_,
+ EncryptionScheme::kUnencrypted, error_uma_cb);
return !!vaapi_wrapper_;
}
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
index d94c070be88..cce592e9993 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
@@ -500,6 +500,7 @@ VaapiJpegEncodeAccelerator::Initialize(
client_ = client;
scoped_refptr<VaapiWrapper> vaapi_wrapper = VaapiWrapper::Create(
VaapiWrapper::kEncode, VAProfileJPEGBaseline,
+ EncryptionScheme::kUnencrypted,
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiJpegEncodeAccelerator.VAAPIError"));
@@ -510,6 +511,7 @@ VaapiJpegEncodeAccelerator::Initialize(
scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
+ EncryptionScheme::kUnencrypted,
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiJpegEncodeAccelerator.Vpp.VAAPIError"));
if (!vpp_vaapi_wrapper) {
diff --git a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
index 3bbac0d7a98..40ec7291eb1 100644
--- a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
@@ -141,6 +141,7 @@ bool VaapiMjpegDecodeAccelerator::Initialize(
vpp_vaapi_wrapper_ = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
+ EncryptionScheme::kUnencrypted,
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiMjpegDecodeAccelerator.Vpp.VAAPIError"));
if (!vpp_vaapi_wrapper_) {
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
index 2f070bf61ed..9de0c93b442 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
@@ -8,6 +8,7 @@
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "ui/base/ui_base_features.h"
#include "ui/gfx/x/connection.h"
+#include "ui/gfx/x/future.h"
#include "ui/gfx/x/xproto.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_image_egl_pixmap.h"
@@ -25,7 +26,7 @@ x11::Pixmap CreatePixmap(const gfx::Size& size) {
auto root = connection->default_root();
uint8_t depth = 0;
- if (auto reply = connection->GetGeometry({root}).Sync())
+ if (auto reply = connection->GetGeometry(root).Sync())
depth = reply->depth;
else
return x11::Pixmap::None;
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
index 7b604f19f33..3f7e221d8a0 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
@@ -8,6 +8,7 @@
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "ui/base/ui_base_features.h"
#include "ui/gfx/x/connection.h"
+#include "ui/gfx/x/future.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_image_glx.h"
#include "ui/gl/scoped_binders.h"
@@ -90,7 +91,7 @@ Status VaapiTFPPicture::Allocate(gfx::BufferFormat format) {
auto root = connection_->default_root();
uint8_t depth = 0;
- if (auto reply = connection_->GetGeometry({root}).Sync())
+ if (auto reply = connection_->GetGeometry(root).Sync())
depth = reply->depth;
else
return StatusCode::kVaapiNoPixmap;
diff --git a/chromium/media/gpu/vaapi/vaapi_unittest.cc b/chromium/media/gpu/vaapi/vaapi_unittest.cc
index 80ef0a94fbc..0332dfbe23a 100644
--- a/chromium/media/gpu/vaapi/vaapi_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_unittest.cc
@@ -19,11 +19,14 @@
#include "base/optional.h"
#include "base/process/launch.h"
#include "base/stl_util.h"
+#include "base/strings/pattern.h"
#include "base/strings/string_split.h"
#include "base/test/launcher/unit_test_launcher.h"
+#include "base/test/scoped_feature_list.h"
#include "base/test/test_suite.h"
#include "build/chromeos_buildflags.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "media/base/media_switches.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/media_buildflags.h"
@@ -40,14 +43,10 @@ base::Optional<VAProfile> ConvertToVAProfile(VideoCodecProfile profile) {
{VP8PROFILE_ANY, VAProfileVP8Version0_3},
{VP9PROFILE_PROFILE0, VAProfileVP9Profile0},
{VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
-#if BUILDFLAG(IS_ASH)
- // TODO(hiroh): Remove if-macro once libva for linux-chrome is upreved to
- // 2.9.0 or newer.
- // https://source.chromium.org/chromium/chromium/src/+/master:build/linux/sysroot_scripts/generated_package_lists/sid.amd64
{AV1PROFILE_PROFILE_MAIN, VAProfileAV1Profile0},
-#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
{HEVCPROFILE_MAIN, VAProfileHEVCMain},
+ {HEVCPROFILE_MAIN10, VAProfileHEVCMain10},
#endif
};
auto it = kProfileMap.find(profile);
@@ -69,14 +68,10 @@ base::Optional<VAProfile> StringToVAProfile(const std::string& va_profile) {
{"VAProfileVP8Version0_3", VAProfileVP8Version0_3},
{"VAProfileVP9Profile0", VAProfileVP9Profile0},
{"VAProfileVP9Profile2", VAProfileVP9Profile2},
-#if BUILDFLAG(IS_ASH)
- // TODO(hiroh): Remove if-macro once libva for linux-chrome is upreved to
- // 2.9.0 or newer.
- // https://source.chromium.org/chromium/chromium/src/+/master:build/linux/sysroot_scripts/generated_package_lists/sid.amd64
{"VAProfileAV1Profile0", VAProfileAV1Profile0},
-#endif
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
{"VAProfileHEVCMain", VAProfileHEVCMain},
+ {"VAProfileHEVCMain10", VAProfileHEVCMain10},
#endif
};
@@ -101,12 +96,23 @@ base::Optional<VAEntrypoint> StringToVAEntrypoint(
? base::make_optional<VAEntrypoint>(it->second)
: base::nullopt;
}
+
+std::unique_ptr<base::test::ScopedFeatureList> CreateScopedFeatureList() {
+ auto scoped_feature_list = std::make_unique<base::test::ScopedFeatureList>();
+ scoped_feature_list->InitWithFeatures(
+ /*enabled_features=*/{media::kVaapiAV1Decoder},
+ /*disabled_features=*/{});
+ return scoped_feature_list;
+}
} // namespace
class VaapiTest : public testing::Test {
public:
- VaapiTest() = default;
+ VaapiTest() : scoped_feature_list_(CreateScopedFeatureList()) {}
~VaapiTest() override = default;
+
+ private:
+ std::unique_ptr<base::test::ScopedFeatureList> scoped_feature_list_;
};
std::map<VAProfile, std::vector<VAEntrypoint>> ParseVainfo(
@@ -245,14 +251,102 @@ TEST_F(VaapiTest, DefaultEntrypointIsSupported) {
}
}
}
+
+// Verifies that VaapiWrapper::CreateContext() will queue up a buffer to set the
+// encoder to its lowest quality setting if a given VAProfile and VAEntrypoint
+// claims to support configuring it.
+TEST_F(VaapiTest, LowQualityEncodingSetting) {
+ // This test only applies to low powered Intel processors.
+ constexpr int kPentiumAndLaterFamily = 0x06;
+ const base::CPU cpuid;
+ const bool is_core_y_processor =
+ base::MatchPattern(cpuid.cpu_brand(), "Intel(R) Core(TM) *Y CPU*");
+ const bool is_low_power_intel =
+ cpuid.family() == kPentiumAndLaterFamily &&
+ (base::Contains(cpuid.cpu_brand(), "Pentium") ||
+ base::Contains(cpuid.cpu_brand(), "Celeron") || is_core_y_processor);
+ if (!is_low_power_intel)
+ GTEST_SKIP() << "Not an Intel low power processor";
+
+ std::map<VAProfile, std::vector<VAEntrypoint>> configurations =
+ VaapiWrapper::GetSupportedConfigurationsForCodecModeForTesting(
+ VaapiWrapper::kEncode);
+
+ for (const auto& codec_mode :
+ {VaapiWrapper::kEncode,
+ VaapiWrapper::kEncodeConstantQuantizationParameter}) {
+ std::map<VAProfile, std::vector<VAEntrypoint>> configurations =
+ VaapiWrapper::GetSupportedConfigurationsForCodecModeForTesting(
+ codec_mode);
+
+ for (const auto& profile_and_entrypoints : configurations) {
+ const VAProfile va_profile = profile_and_entrypoints.first;
+ scoped_refptr<VaapiWrapper> wrapper = VaapiWrapper::Create(
+ VaapiWrapper::kEncode, va_profile, EncryptionScheme::kUnencrypted,
+ base::DoNothing());
+
+ // Depending on the GPU Gen, flags and policies, we may or may not utilize
+ // all entrypoints (e.g. we might always want VAEntrypointEncSliceLP if
+ // supported and enabled). Query VaapiWrapper's mandated entry point.
+ const VAEntrypoint entrypoint =
+ VaapiWrapper::GetDefaultVaEntryPoint(codec_mode, va_profile);
+ ASSERT_TRUE(base::Contains(profile_and_entrypoints.second, entrypoint));
+
+ VAConfigAttrib attrib{};
+ attrib.type = VAConfigAttribEncQualityRange;
+ {
+ base::AutoLock auto_lock(*wrapper->va_lock_);
+ VAStatus va_res = vaGetConfigAttributes(
+ wrapper->va_display_, va_profile, entrypoint, &attrib, 1);
+ ASSERT_EQ(va_res, VA_STATUS_SUCCESS);
+ }
+ const auto quality_level = attrib.value;
+ if (quality_level == VA_ATTRIB_NOT_SUPPORTED || quality_level <= 1u)
+ continue;
+ DLOG(INFO) << vaProfileStr(va_profile)
+ << " supports encoding quality setting, with max value "
+ << quality_level;
+
+ // If we get here it means the |va_profile| and |entrypoint| support
+ // the quality setting. We cannot inspect what the driver does with this
+ // number (it could ignore it), so instead just make sure there's a
+ // |pending_va_buffers_| that, when mapped, looks correct. That buffer
+ // should be created by CreateContext().
+ ASSERT_TRUE(wrapper->CreateContext(gfx::Size(640, 368)));
+ ASSERT_EQ(wrapper->pending_va_buffers_.size(), 1u);
+ {
+ base::AutoLock auto_lock(*wrapper->va_lock_);
+ ScopedVABufferMapping mapping(wrapper->va_lock_, wrapper->va_display_,
+ wrapper->pending_va_buffers_.front());
+ ASSERT_TRUE(mapping.IsValid());
+
+ auto* const va_buffer =
+ reinterpret_cast<VAEncMiscParameterBuffer*>(mapping.data());
+ EXPECT_EQ(va_buffer->type, VAEncMiscParameterTypeQualityLevel);
+
+ auto* const enc_quality =
+ reinterpret_cast<VAEncMiscParameterBufferQualityLevel*>(
+ va_buffer->data);
+ EXPECT_EQ(enc_quality->quality_level, quality_level)
+ << vaProfileStr(va_profile) << " " << vaEntrypointStr(entrypoint);
+ }
+ }
+ }
+}
} // namespace media
int main(int argc, char** argv) {
base::TestSuite test_suite(argc, argv);
-
- // PreSandboxInitialization() loads and opens the driver, queries its
- // capabilities and fills in the VASupportedProfiles.
- media::VaapiWrapper::PreSandboxInitialization();
+ {
+ // Enables/Disables features during PreSandboxInitialization(). We have to
+ // destruct ScopedFeatureList after it because base::TestSuite::Run()
+ // creates a ScopedFeatureList and multiple concurrent ScopedFeatureLists
+ // are not allowed.
+ auto scoped_feature_list = media::CreateScopedFeatureList();
+ // PreSandboxInitialization() loads and opens the driver, queries its
+ // capabilities and fills in the VASupportedProfiles.
+ media::VaapiWrapper::PreSandboxInitialization();
+ }
return base::LaunchUnitTests(
argc, argv,
diff --git a/chromium/media/gpu/vaapi/vaapi_utils.cc b/chromium/media/gpu/vaapi/vaapi_utils.cc
index 450f0c852f7..7d2437069d8 100644
--- a/chromium/media/gpu/vaapi/vaapi_utils.cc
+++ b/chromium/media/gpu/vaapi/vaapi_utils.cc
@@ -11,10 +11,12 @@
#include "base/memory/ptr_util.h"
#include "base/numerics/ranges.h"
#include "base/synchronization/lock.h"
+#include "build/chromeos_buildflags.h"
#include "media/gpu/vaapi/vaapi_common.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/gpu/vp8_picture.h"
#include "media/gpu/vp8_reference_frame_vector.h"
+#include "third_party/libva_protected_content/va_protected_content.h"
namespace media {
@@ -79,7 +81,7 @@ std::unique_ptr<ScopedVABuffer> ScopedVABuffer::Create(
DCHECK(lock);
DCHECK(va_display);
DCHECK_NE(va_context_id, VA_INVALID_ID);
- DCHECK_LT(va_buffer_type, VABufferTypeMax);
+ DCHECK(IsValidVABufferType(va_buffer_type));
DCHECK_NE(size, 0u);
lock->AssertAcquired();
unsigned int va_buffer_size;
@@ -347,4 +349,15 @@ void FillVP8DataStructures(const Vp8FrameHeader& frame_header,
for (size_t i = 0; i < frame_header.num_of_dct_partitions; ++i)
slice_param->partition_size[i + 1] = frame_header.dct_partition_sizes[i];
}
+
+bool IsValidVABufferType(VABufferType type) {
+ return type < VABufferTypeMax ||
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // TODO(jkardatzke): Remove this once we update to libva 2.0.10 in
+ // ChromeOS.
+ type == VAEncryptionParameterBufferType ||
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ type == VACencStatusParameterBufferType;
+}
+
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_utils.h b/chromium/media/gpu/vaapi/vaapi_utils.h
index 7420256ef2c..9bcee23ce96 100644
--- a/chromium/media/gpu/vaapi/vaapi_utils.h
+++ b/chromium/media/gpu/vaapi/vaapi_utils.h
@@ -181,6 +181,9 @@ void FillVP8DataStructures(const Vp8FrameHeader& frame_header,
VAProbabilityDataBufferVP8* prob_buf,
VAPictureParameterBufferVP8* pic_param,
VASliceParameterBufferVP8* slice_param);
+
+bool IsValidVABufferType(VABufferType type);
+
} // namespace media
#endif // MEDIA_GPU_VAAPI_VAAPI_UTILS_H_
diff --git a/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc b/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc
index 479ea44397a..834a2df937f 100644
--- a/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc
@@ -42,6 +42,7 @@ class VaapiUtilsTest : public testing::Test {
// Create a VaapiWrapper for testing.
vaapi_wrapper_ =
VaapiWrapper::Create(VaapiWrapper::kDecode, VAProfileJPEGBaseline,
+ EncryptionScheme::kUnencrypted,
base::BindRepeating([](VaapiFunctions function) {
LOG(FATAL) << "Oh noes! Decoder failed";
}));
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
index 49950ea388a..9d9cb7a1713 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
@@ -18,6 +18,7 @@
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
#include "base/numerics/ranges.h"
+#include "base/numerics/safe_conversions.h"
#include "base/stl_util.h"
#include "base/strings/string_util.h"
#include "base/synchronization/waitable_event.h"
@@ -204,7 +205,7 @@ bool VaapiVideoDecodeAccelerator::Initialize(const Config& config,
VLOGF(2) << "Initializing VAVDA, profile: " << GetProfileName(profile);
vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
- VaapiWrapper::kDecode, profile,
+ VaapiWrapper::kDecode, profile, EncryptionScheme::kUnencrypted,
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiVideoDecodeAccelerator.VAAPIError"));
@@ -468,6 +469,11 @@ void VaapiVideoDecodeAccelerator::DecodeTask() {
switch (res) {
case AcceleratedVideoDecoder::kConfigChange: {
+ const uint8_t bit_depth = decoder_->GetBitDepth();
+ RETURN_AND_NOTIFY_ON_FAILURE(
+ bit_depth == 8u,
+ "Unsupported bit depth: " << base::strict_cast<int>(bit_depth),
+ PLATFORM_FAILURE, );
// The visible rect should be a subset of the picture size. Otherwise,
// the encoded stream is bad.
const gfx::Size pic_size = decoder_->GetPicSize();
@@ -614,7 +620,7 @@ void VaapiVideoDecodeAccelerator::TryFinishSurfaceSetChange() {
if (profile_ != new_profile) {
profile_ = new_profile;
auto new_vaapi_wrapper = VaapiWrapper::CreateForVideoCodec(
- VaapiWrapper::kDecode, profile_,
+ VaapiWrapper::kDecode, profile_, EncryptionScheme::kUnencrypted,
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiVideoDecodeAccelerator.VAAPIError"));
RETURN_AND_NOTIFY_ON_FAILURE(new_vaapi_wrapper.get(),
@@ -709,6 +715,7 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
if (!vpp_vaapi_wrapper_) {
vpp_vaapi_wrapper_ = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
+ EncryptionScheme::kUnencrypted,
base::BindRepeating(
&ReportVaapiErrorToUMA,
"Media.VaapiVideoDecodeAccelerator.Vpp.VAAPIError"));
@@ -1193,12 +1200,12 @@ VaapiVideoDecodeAccelerator::GetSupportedProfiles(
const gpu::GpuDriverBugWorkarounds& workarounds) {
VideoDecodeAccelerator::SupportedProfiles profiles =
VaapiWrapper::GetSupportedDecodeProfiles(workarounds);
- // VaVDA never supported VP9 Profile 2 and AV1, but VaapiWrapper does. Filter
- // them out.
+ // VaVDA never supported VP9 Profile 2, AV1 and HEVC, but VaapiWrapper does.
+ // Filter them out.
base::EraseIf(profiles, [](const auto& profile) {
+ VideoCodec codec = VideoCodecProfileToVideoCodec(profile.profile);
return profile.profile == VP9PROFILE_PROFILE2 ||
- VideoCodecProfileToVideoCodec(profile.profile) ==
- VideoCodec::kCodecAV1;
+ codec == VideoCodec::kCodecAV1 || codec == VideoCodec::kCodecHEVC;
});
return profiles;
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
index 0b3517cdb27..50ec1b1da9c 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
@@ -61,6 +61,7 @@ class MockAcceleratedVideoDecoder : public AcceleratedVideoDecoder {
MOCK_METHOD0(Decode, DecodeResult());
MOCK_CONST_METHOD0(GetPicSize, gfx::Size());
MOCK_CONST_METHOD0(GetProfile, VideoCodecProfile());
+ MOCK_CONST_METHOD0(GetBitDepth, uint8_t());
MOCK_CONST_METHOD0(GetVisibleRect, gfx::Rect());
MOCK_CONST_METHOD0(GetRequiredNumOfPictures, size_t());
MOCK_CONST_METHOD0(GetNumReferenceFrames, size_t());
@@ -169,8 +170,8 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
vda_.decoder_thread_task_runner_ = decoder_thread_.task_runner();
- decoder_delegate_ =
- std::make_unique<VaapiVideoDecoderDelegate>(&vda_, mock_vaapi_wrapper_);
+ decoder_delegate_ = std::make_unique<VaapiVideoDecoderDelegate>(
+ &vda_, mock_vaapi_wrapper_, base::DoNothing(), nullptr);
// Plug in all the mocks and ourselves as the |client_|.
vda_.decoder_.reset(mock_decoder_);
@@ -213,9 +214,9 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
// Reset epilogue, needed to get |vda_| worker thread out of its Wait().
void ResetSequence() {
base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
EXPECT_CALL(*mock_decoder_, Reset());
- EXPECT_CALL(*this, NotifyResetDone()).WillOnce(RunClosure(quit_closure));
+ EXPECT_CALL(*this, NotifyResetDone())
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
vda_.Reset();
run_loop.Run();
}
@@ -231,14 +232,13 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
bool expect_dismiss_picture_buffers = false,
size_t num_picture_buffers_to_dismiss = 0) {
::testing::InSequence s;
- base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
EXPECT_CALL(*mock_decoder_,
SetStream(_, IsExpectedDecoderBuffer(kInputSize, nullptr)))
.WillOnce(Return());
EXPECT_CALL(*mock_decoder_, Decode())
.WillOnce(Return(AcceleratedVideoDecoder::kConfigChange));
+ EXPECT_CALL(*mock_decoder_, GetBitDepth()).WillOnce(Return(8u));
EXPECT_CALL(*mock_decoder_, GetPicSize()).WillOnce(Return(picture_size));
EXPECT_CALL(*mock_decoder_, GetVisibleRect())
.WillOnce(Return(gfx::Rect(picture_size)));
@@ -262,10 +262,12 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
? num_pictures - kNumReferenceFrames
: num_pictures;
+ base::RunLoop run_loop;
+
EXPECT_CALL(*this,
ProvidePictureBuffers(expected_num_picture_buffers_requested, _,
1, picture_size, _))
- .WillOnce(RunClosure(quit_closure));
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
auto region = base::UnsafeSharedMemoryRegion::TakeHandleForSerialization(
in_shm_.Duplicate());
@@ -286,9 +288,6 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
ASSERT_TRUE(vda_.curr_input_buffer_)
<< "QueueInputBuffer() should have been called";
- base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
-
// |decode_using_client_picture_buffers| determines the concrete method for
// creation of context, surfaces and VaapiPictures.
if (GetParam().decode_using_client_picture_buffers) {
@@ -326,10 +325,12 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
}
::testing::InSequence s;
+ base::RunLoop run_loop;
+
EXPECT_CALL(*mock_decoder_, Decode())
.WillOnce(Return(AcceleratedVideoDecoder::kRanOutOfStreamData));
EXPECT_CALL(*this, NotifyEndOfBitstreamBuffer(bitstream_id))
- .WillOnce(RunClosure(quit_closure));
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
const auto tex_target = mock_vaapi_picture_factory_->GetGLTextureTarget();
int irrelevant_id = 2;
@@ -352,14 +353,13 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
// because the Decode() is (almost) immediate.
void DecodeOneFrameFast(int32_t bitstream_id) {
base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
EXPECT_CALL(*mock_decoder_,
SetStream(_, IsExpectedDecoderBuffer(kInputSize, nullptr)))
.WillOnce(Return());
EXPECT_CALL(*mock_decoder_, Decode())
.WillOnce(Return(AcceleratedVideoDecoder::kRanOutOfStreamData));
EXPECT_CALL(*this, NotifyEndOfBitstreamBuffer(bitstream_id))
- .WillOnce(RunClosure(quit_closure));
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
auto region = base::UnsafeSharedMemoryRegion::TakeHandleForSerialization(
in_shm_.Duplicate());
@@ -423,7 +423,8 @@ TEST_P(VaapiVideoDecodeAcceleratorTest, SupportedPlatforms) {
}
// This test checks that QueueInputBuffer() fails when state is kUnitialized.
-TEST_P(VaapiVideoDecodeAcceleratorTest, QueueInputBufferAndError) {
+TEST_P(VaapiVideoDecodeAcceleratorTest,
+ QueueInputBufferAndErrorWhenVDAUninitialized) {
SetVdaStateToUnitialized();
auto region = base::UnsafeSharedMemoryRegion::TakeHandleForSerialization(
@@ -442,14 +443,34 @@ TEST_P(VaapiVideoDecodeAcceleratorTest, QueueInputBufferAndDecodeError) {
BitstreamBuffer bitstream_buffer(kBitstreamId, std::move(region), kInputSize);
base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
EXPECT_CALL(*mock_decoder_,
SetStream(_, IsExpectedDecoderBuffer(kInputSize, nullptr)))
.WillOnce(Return());
EXPECT_CALL(*mock_decoder_, Decode())
.WillOnce(Return(AcceleratedVideoDecoder::kDecodeError));
EXPECT_CALL(*this, NotifyError(VaapiVideoDecodeAccelerator::PLATFORM_FAILURE))
- .WillOnce(RunClosure(quit_closure));
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
+
+ QueueInputBuffer(std::move(bitstream_buffer));
+ run_loop.Run();
+}
+
+TEST_P(VaapiVideoDecodeAcceleratorTest, QueueVP9Profile2AndError) {
+ if (GetParam().video_codec != VP9PROFILE_PROFILE2)
+ GTEST_SKIP() << "The test parameter is not vp9 profile 2";
+
+ auto region = base::UnsafeSharedMemoryRegion::TakeHandleForSerialization(
+ in_shm_.Duplicate());
+ BitstreamBuffer bitstream_buffer(kBitstreamId, std::move(region), kInputSize);
+ base::RunLoop run_loop;
+ EXPECT_CALL(*mock_decoder_,
+ SetStream(_, IsExpectedDecoderBuffer(kInputSize, nullptr)))
+ .WillOnce(Return());
+ EXPECT_CALL(*mock_decoder_, Decode())
+ .WillOnce(Return(AcceleratedVideoDecoder::kConfigChange));
+ EXPECT_CALL(*mock_decoder_, GetBitDepth()).WillOnce(Return(10u));
+ EXPECT_CALL(*this, NotifyError(VaapiVideoDecodeAccelerator::PLATFORM_FAILURE))
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
QueueInputBuffer(std::move(bitstream_buffer));
run_loop.Run();
@@ -457,6 +478,8 @@ TEST_P(VaapiVideoDecodeAcceleratorTest, QueueInputBufferAndDecodeError) {
// Verifies a single fast frame decoding..
TEST_P(VaapiVideoDecodeAcceleratorTest, DecodeOneFrame) {
+ if (GetParam().video_codec == VP9PROFILE_PROFILE2)
+ GTEST_SKIP() << "Decoding profile 2 is not supported";
DecodeOneFrameFast(kBitstreamId);
ResetSequence();
@@ -466,6 +489,8 @@ TEST_P(VaapiVideoDecodeAcceleratorTest, DecodeOneFrame) {
// |vda_| asks for PictureBuffers, that we provide via AssignPictureBuffers().
TEST_P(VaapiVideoDecodeAcceleratorTest,
QueueInputBuffersAndAssignPictureBuffers) {
+ if (GetParam().video_codec == VP9PROFILE_PROFILE2)
+ GTEST_SKIP() << "Decoding profile 2 is not supported";
QueueInputBufferSequence(kNumPictures, kPictureSize, kBitstreamId);
AssignPictureBuffersSequence(kNumPictures, kPictureSize, kBitstreamId);
@@ -480,6 +505,8 @@ TEST_P(VaapiVideoDecodeAcceleratorTest,
// is purely ingress-wise, i.e. there's no decoded output checks.
TEST_P(VaapiVideoDecodeAcceleratorTest,
QueueInputBuffersAndAssignPictureBuffersAndReallocate) {
+ if (GetParam().video_codec == VP9PROFILE_PROFILE2)
+ GTEST_SKIP() << "Decoding profile 2 is not supported";
QueueInputBufferSequence(kNumPictures, kPictureSize, kBitstreamId);
AssignPictureBuffersSequence(kNumPictures, kPictureSize, kBitstreamId);
@@ -502,7 +529,9 @@ constexpr TestParams kTestCases[] = {
{H264PROFILE_MIN, true /* decode_using_client_picture_buffers */},
{VP8PROFILE_MIN, false /* decode_using_client_picture_buffers */},
{VP9PROFILE_MIN, false /* decode_using_client_picture_buffers */},
- {VP9PROFILE_MIN, true /* decode_using_client_picture_buffers */}};
+ {VP9PROFILE_MIN, true /* decode_using_client_picture_buffers */},
+ {VP9PROFILE_PROFILE2, false /* decode_using_client_picture_buffers */},
+};
INSTANTIATE_TEST_SUITE_P(All,
VaapiVideoDecodeAcceleratorTest,
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
index 9672aaffed5..e04ae1a0328 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
@@ -9,17 +9,24 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
+#include "base/command_line.h"
+#include "base/containers/fixed_flat_map.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
+#include "base/stl_util.h"
#include "base/trace_event/trace_event.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/format_utils.h"
+#include "media/base/media_switches.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
+#include "media/gpu/av1_decoder.h"
#include "media/gpu/chromeos/dmabuf_video_frame_pool.h"
#include "media/gpu/chromeos/platform_video_frame_utils.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/macros.h"
+#include "media/gpu/vaapi/av1_vaapi_video_decoder_delegate.h"
#include "media/gpu/vaapi/h264_vaapi_video_decoder_delegate.h"
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_utils.h"
@@ -32,6 +39,10 @@
#include "media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h"
#endif
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_factory.h"
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
namespace media {
namespace {
@@ -39,21 +50,32 @@ namespace {
// Size of the timestamp cache, needs to be large enough for frame-reordering.
constexpr size_t kTimestampCacheSize = 128;
-// Returns the preferred VA_RT_FORMAT for the given |profile|.
-unsigned int GetVaFormatForVideoCodecProfile(VideoCodecProfile profile) {
- if (profile == VP9PROFILE_PROFILE2 || profile == VP9PROFILE_PROFILE3)
- return VA_RT_FORMAT_YUV420_10BPP;
- return VA_RT_FORMAT_YUV420;
-}
-
-gfx::BufferFormat GetBufferFormat(VideoCodecProfile profile) {
+base::Optional<VideoPixelFormat> GetPixelFormatForBitDepth(uint8_t bit_depth) {
+ constexpr auto kSupportedBitDepthAndGfxFormats = base::MakeFixedFlatMap<
+ uint8_t, gfx::BufferFormat>({
#if defined(USE_OZONE)
- if (profile == VP9PROFILE_PROFILE2 || profile == VP9PROFILE_PROFILE3)
- return gfx::BufferFormat::P010;
- return gfx::BufferFormat::YUV_420_BIPLANAR;
+ {8u, gfx::BufferFormat::YUV_420_BIPLANAR}, {10u, gfx::BufferFormat::P010},
#else
- return gfx::BufferFormat::RGBX_8888;
-#endif
+ {8u, gfx::BufferFormat::RGBX_8888},
+#endif // defined(USE_OZONE)
+ });
+ if (!base::Contains(kSupportedBitDepthAndGfxFormats, bit_depth)) {
+ VLOGF(1) << "Unsupported bit depth: " << base::strict_cast<int>(bit_depth);
+ return base::nullopt;
+ }
+ return GfxBufferFormatToVideoPixelFormat(
+ kSupportedBitDepthAndGfxFormats.at(bit_depth));
+}
+
+inline int RoundDownToEven(int x) {
+ DCHECK_GE(x, 0);
+ return x - (x % 2);
+}
+
+inline int RoundUpToEven(int x) {
+ DCHECK_GE(x, 0);
+ CHECK_LT(x, std::numeric_limits<int>::max());
+ return x + (x % 2);
}
} // namespace
@@ -117,13 +139,16 @@ VaapiVideoDecoder::~VaapiVideoDecoder() {
decoder_delegate_->OnVAContextDestructionSoon();
// Destroy explicitly to DCHECK() that |vaapi_wrapper_| references are held
- // inside the accelerator in |decoder_|, by the |allocated_va_surfaces_| and
- // of course by this class. To clear |allocated_va_surfaces_| we have to first
- // DestroyContext().
+ // inside the accelerator in |decoder_|, by the |allocated_va_surfaces_|, by
+ // the |decode_surface_pool_for_scaling_| and of course by this class. To
+ // clear |allocated_va_surfaces_| and |decode_surface_pool_for_scaling_| we
+ // have to first DestroyContext().
decoder_ = nullptr;
if (vaapi_wrapper_) {
vaapi_wrapper_->DestroyContext();
allocated_va_surfaces_.clear();
+ while (!decode_surface_pool_for_scaling_.empty())
+ decode_surface_pool_for_scaling_.pop();
DCHECK(vaapi_wrapper_->HasOneRef());
vaapi_wrapper_ = nullptr;
@@ -133,11 +158,13 @@ VaapiVideoDecoder::~VaapiVideoDecoder() {
void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
- const OutputCB& output_cb) {
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb) {
DVLOGF(2) << config.AsHumanReadableString();
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(config.IsValidConfig());
- DCHECK(state_ == State::kUninitialized || state_ == State::kWaitingForInput);
+ DCHECK(state_ == State::kError || state_ == State::kUninitialized ||
+ state_ == State::kWaitingForInput);
// Reinitializing the decoder is allowed if there are no pending decodes.
if (current_decode_task_ || !decode_task_queue_.empty()) {
@@ -147,17 +174,6 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- if (cdm_context || config.is_encrypted()) {
- VLOGF(1) << "Vaapi decoder does not support encrypted stream";
- std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
- return;
- }
-
- // We expect the decoder to have released all output buffers (by the client
- // triggering a flush or reset), even if the
- // DecoderInterface API doesn't explicitly specify this.
- DCHECK(output_frames_.empty());
-
if (state_ != State::kUninitialized) {
DVLOGF(3) << "Reinitializing decoder";
@@ -167,20 +183,84 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
decoder_ = nullptr;
DCHECK(vaapi_wrapper_);
- // To clear |allocated_va_surfaces_| we have to first DestroyContext().
+ // To clear |allocated_va_surfaces_| and |decode_surface_pool_for_scaling_|
+ // we have to first DestroyContext().
vaapi_wrapper_->DestroyContext();
allocated_va_surfaces_.clear();
+ while (!decode_surface_pool_for_scaling_.empty())
+ decode_surface_pool_for_scaling_.pop();
+ decode_to_output_scale_factor_.reset();
DCHECK(vaapi_wrapper_->HasOneRef());
vaapi_wrapper_ = nullptr;
decoder_delegate_ = nullptr;
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // |cdm_context_ref_| is reset after |decoder_| because we passed
+ // |cdm_context_ref_->GetCdmContext()| when creating the |decoder_|, so we
+ // don't want |decoder_| to have a dangling pointer. We also destroy
+ // |cdm_event_cb_registration_| before |cdm_context_ref_| so that we have a
+ // CDM at the moment of destroying the callback registration.
+ cdm_event_cb_registration_ = nullptr;
+ cdm_context_ref_ = nullptr;
+#endif
+
SetState(State::kUninitialized);
}
+ DCHECK(!current_decode_task_);
+ DCHECK(decode_task_queue_.empty());
+
+ // Destroying the |decoder_| during re-initialization should release all
+ // output buffers (and there should be no output buffers to begin with if the
+ // decoder was previously uninitialized).
+ DCHECK(output_frames_.empty());
+
+ if (config.is_encrypted()) {
+#if !BUILDFLAG(IS_CHROMEOS_ASH)
+ SetState(State::kError);
+ std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
+ return;
+#else
+ if (!cdm_context || !cdm_context->GetChromeOsCdmContext()) {
+ LOG(ERROR) << "Cannot support encrypted stream w/out ChromeOsCdmContext";
+ SetState(State::kError);
+ std::move(init_cb).Run(StatusCode::kDecoderMissingCdmForEncryptedContent);
+ return;
+ }
+ if (config.codec() != kCodecH264 && config.codec() != kCodecVP9 &&
+ config.codec() != kCodecHEVC) {
+ VLOGF(1)
+ << "Vaapi decoder does not support this codec for encrypted content";
+ SetState(State::kError);
+ std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
+ return;
+ }
+ cdm_event_cb_registration_ = cdm_context->RegisterEventCB(
+ base::BindRepeating(&VaapiVideoDecoder::OnCdmContextEvent,
+ weak_this_factory_.GetWeakPtr()));
+ cdm_context_ref_ = cdm_context->GetChromeOsCdmContext()->GetCdmContextRef();
+#endif
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+ } else if (config.codec() == kCodecHEVC &&
+ !base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableClearHevcForTesting)) {
+ DVLOG(1) << "Clear HEVC content is not supported";
+ SetState(State::kError);
+ std::move(init_cb).Run(StatusCode::kClearContentUnsupported);
+ return;
+#endif
+ }
// Initialize VAAPI wrapper.
const VideoCodecProfile profile = config.profile();
vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
- VaapiWrapper::kDecode, profile,
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ !cdm_context_ref_ ? VaapiWrapper::kDecode
+ : VaapiWrapper::kDecodeProtected,
+#else
+ VaapiWrapper::kDecode,
+#endif
+ profile, config.encryption_scheme(),
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiVideoDecoder.VAAPIError"));
UMA_HISTOGRAM_BOOLEAN("Media.VaapiVideoDecoder.VaapiWrapperCreationSuccess",
@@ -188,14 +268,17 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
if (!vaapi_wrapper_.get()) {
VLOGF(1) << "Failed initializing VAAPI for profile "
<< GetProfileName(profile);
+ SetState(State::kError);
std::move(init_cb).Run(StatusCode::kDecoderUnsupportedProfile);
return;
}
profile_ = profile;
color_space_ = config.color_space_info();
+ encryption_scheme_ = config.encryption_scheme();
auto accel_status = CreateAcceleratedVideoDecoder();
if (!accel_status.is_ok()) {
+ SetState(State::kError);
std::move(init_cb).Run(std::move(accel_status));
return;
}
@@ -207,12 +290,24 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
pixel_aspect_ratio_ = config.GetPixelAspectRatio();
output_cb_ = std::move(output_cb);
+ waiting_cb_ = std::move(waiting_cb);
SetState(State::kWaitingForInput);
// Notify client initialization was successful.
std::move(init_cb).Run(OkStatus());
}
+void VaapiVideoDecoder::OnCdmContextEvent(CdmContext::Event event) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (event != CdmContext::Event::kHasAdditionalUsableKey)
+ return;
+
+ // Invoke the callback we'd get for a protected session update because this is
+ // the same thing, it's a trigger that there are new keys, so if we were
+ // waiting for a key we should fetch them again.
+ ProtectedSessionUpdate(true);
+}
+
void VaapiVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -316,8 +411,13 @@ void VaapiVideoDecoder::HandleDecodeTask() {
SetState(State::kError);
break;
case AcceleratedVideoDecoder::kTryAgain:
- LOG(ERROR) << "Encrypted streams not supported";
- SetState(State::kError);
+ DVLOG(1) << "Decoder going into the waiting for protected state";
+ DCHECK_NE(encryption_scheme_, EncryptionScheme::kUnencrypted);
+ SetState(State::kWaitingForProtected);
+ // If we have lost our protected HW session, it should be recoverable, so
+ // indicate that we have lost our decoder state so it can be reloaded.
+ if (decoder_delegate_->HasInitiatedProtectedRecovery())
+ waiting_cb_.Run(WaitingReason::kDecoderStateLost);
break;
}
}
@@ -402,8 +502,56 @@ scoped_refptr<VASurface> VaapiVideoDecoder::CreateSurface() {
base::BindOnce(&VaapiVideoDecoder::ReleaseVideoFrame, weak_this_);
return new VASurface(surface_id, frame->layout().coded_size(),
- GetVaFormatForVideoCodecProfile(profile_),
- std::move(release_frame_cb));
+ va_surface->format(), std::move(release_frame_cb));
+}
+
+scoped_refptr<VASurface> VaapiVideoDecoder::CreateDecodeSurface() {
+ DVLOGF(4);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_EQ(state_, State::kDecoding);
+ DCHECK(current_decode_task_);
+
+ if (decode_surface_pool_for_scaling_.empty())
+ return nullptr;
+
+ // Get surface from pool.
+ std::unique_ptr<ScopedVASurface> surface =
+ std::move(decode_surface_pool_for_scaling_.front());
+ decode_surface_pool_for_scaling_.pop();
+ // Gather information about the surface to avoid use-after-move.
+ const VASurfaceID surface_id = surface->id();
+ const gfx::Size surface_size = surface->size();
+ const unsigned int surface_format = surface->format();
+ // Wrap the ScopedVASurface inside a VASurface indirectly.
+ VASurface::ReleaseCB release_decode_surface_cb =
+ base::BindOnce(&VaapiVideoDecoder::ReturnDecodeSurfaceToPool, weak_this_,
+ std::move(surface));
+ return new VASurface(surface_id, surface_size, surface_format,
+ std::move(release_decode_surface_cb));
+}
+
+bool VaapiVideoDecoder::IsScalingDecode() {
+ // If we're not decoding while scaling, we shouldn't have any surfaces for
+ // that purpose.
+ DCHECK(!!decode_to_output_scale_factor_ ||
+ decode_surface_pool_for_scaling_.empty());
+ return !!decode_to_output_scale_factor_;
+}
+
+const gfx::Rect VaapiVideoDecoder::GetOutputVisibleRect(
+ const gfx::Rect& decode_visible_rect,
+ const gfx::Size& output_picture_size) {
+ if (!IsScalingDecode())
+ return decode_visible_rect;
+ DCHECK_LT(*decode_to_output_scale_factor_, 1.0f);
+ gfx::Rect output_rect =
+ ScaleToEnclosedRect(decode_visible_rect, *decode_to_output_scale_factor_);
+ // Make the dimensions even numbered to align with other requirements later in
+ // the pipeline.
+ output_rect.set_width(RoundDownToEven(output_rect.width()));
+ output_rect.set_height(RoundDownToEven(output_rect.height()));
+ CHECK(gfx::Rect(output_picture_size).Contains(output_rect));
+ return output_rect;
}
void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface,
@@ -448,6 +596,14 @@ void VaapiVideoDecoder::SurfaceReady(scoped_refptr<VASurface> va_surface,
video_frame = std::move(wrapped_frame);
}
+ if (cdm_context_ref_) {
+ // For protected content we also need to set the ID for validating protected
+ // surfaces in the VideoFrame metadata so we can check if the surface is
+ // still valid once we get to the compositor stage.
+ uint32_t protected_instance_id = vaapi_wrapper_->GetProtectedInstanceID();
+ video_frame->metadata().hw_protected_validation_id = protected_instance_id;
+ }
+
const auto gfx_color_space = color_space.ToGfxColorSpace();
if (gfx_color_space.IsValid())
video_frame->set_color_space(gfx_color_space);
@@ -462,22 +618,42 @@ void VaapiVideoDecoder::ApplyResolutionChange() {
DCHECK(output_frames_.empty());
VLOGF(2);
- const gfx::Rect visible_rect = decoder_->GetVisibleRect();
- const gfx::Size natural_size =
- GetNaturalSize(visible_rect, pixel_aspect_ratio_);
- const gfx::Size pic_size = decoder_->GetPicSize();
+ if (cdm_context_ref_) {
+ // Get the screen resolutions so we can determine if we should pre-scale
+ // content during decoding to maximize use of overlay downscaling since
+ // protected content requires overlays currently.
+ // NOTE: Only use this for protected content as other requirements for using
+ // it are tied to protected content.
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ chromeos::ChromeOsCdmFactory::GetScreenResolutions(BindToCurrentLoop(
+ base::BindOnce(&VaapiVideoDecoder::ApplyResolutionChangeWithScreenSizes,
+ weak_this_)));
+ return;
+#endif
+ }
+ ApplyResolutionChangeWithScreenSizes(std::vector<gfx::Size>());
+}
+
+void VaapiVideoDecoder::ApplyResolutionChangeWithScreenSizes(
+ const std::vector<gfx::Size>& screen_resolutions) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(state_ == State::kChangingResolution ||
+ state_ == State::kWaitingForInput || state_ == State::kResetting ||
+ state_ == State::kError);
+ DCHECK(output_frames_.empty());
+ VLOGF(2);
+ // If we are not in the state for changing resolution, then skip doing it. For
+ // all the other states, those can occur because something happened after the
+ // async call to get the screen sizes in ApplyResolutionChange(), and in that
+ // case we will get another resolution change event when the decoder parses
+ // the resolution and notifies us.
+ if (state_ != State::kChangingResolution)
+ return;
+
+ const uint8_t bit_depth = decoder_->GetBitDepth();
const base::Optional<VideoPixelFormat> format =
- GfxBufferFormatToVideoPixelFormat(
- GetBufferFormat(decoder_->GetProfile()));
- CHECK(format);
- auto format_fourcc = Fourcc::FromVideoPixelFormat(*format);
- CHECK(format_fourcc);
- // TODO(jkardatzke): Pass true for the last argument when we are in protected
- // mode.
- if (!frame_pool_->Initialize(
- *format_fourcc, pic_size, visible_rect, natural_size,
- decoder_->GetRequiredNumOfPictures(), /*use_protected=*/false)) {
- DLOG(WARNING) << "Failed Initialize()ing the frame pool.";
+ GetPixelFormatForBitDepth(bit_depth);
+ if (!format) {
SetState(State::kError);
return;
}
@@ -488,15 +664,136 @@ void VaapiVideoDecoder::ApplyResolutionChange() {
// All pending decode operations will be completed before triggering a
// resolution change, so we can safely DestroyContext() here; that, in turn,
- // allows for clearing the |allocated_va_surfaces_|.
+ // allows for clearing the |allocated_va_surfaces_| and the
+ // |decode_surface_pool_for_scaling_|.
vaapi_wrapper_->DestroyContext();
allocated_va_surfaces_.clear();
+ while (!decode_surface_pool_for_scaling_.empty())
+ decode_surface_pool_for_scaling_.pop();
+ decode_to_output_scale_factor_.reset();
+
+ gfx::Rect output_visible_rect = decoder_->GetVisibleRect();
+ gfx::Size output_pic_size = decoder_->GetPicSize();
+ if (output_pic_size.IsEmpty()) {
+ DLOG(ERROR) << "Empty picture size in decoder";
+ SetState(State::kError);
+ return;
+ }
+ const auto format_fourcc = Fourcc::FromVideoPixelFormat(*format);
+ CHECK(format_fourcc);
+ if (!screen_resolutions.empty()) {
+ // Ideally we would base this off visible size, but that can change
+ // midstream without forcing a config change, so we need to scale the
+ // overall decoded image and then apply that same relative scaling to the
+ // visible rect later.
+ CHECK(cdm_context_ref_);
+ gfx::Size max_desired_size;
+ const float pic_aspect =
+ static_cast<float>(output_pic_size.width()) / output_pic_size.height();
+ for (const auto& screen : screen_resolutions) {
+ if (screen.IsEmpty())
+ continue;
+ int target_width;
+ int target_height;
+ const float screen_aspect =
+ static_cast<float>(screen.width()) / screen.height();
+ if (pic_aspect >= screen_aspect) {
+ // Constrain on width.
+ if (screen.width() < output_pic_size.width()) {
+ target_width = screen.width();
+ target_height =
+ base::checked_cast<int>(std::lround(target_width / pic_aspect));
+ } else {
+ target_width = output_pic_size.width();
+ target_height = output_pic_size.height();
+ }
+ } else {
+ // Constrain on height.
+ if (screen.height() < output_pic_size.height()) {
+ target_height = screen.height();
+ target_width =
+ base::checked_cast<int>(std::lround(target_height * pic_aspect));
+ } else {
+ target_height = output_pic_size.height();
+ target_width = output_pic_size.width();
+ }
+ }
+ if (target_width > max_desired_size.width() ||
+ target_height > max_desired_size.height()) {
+ max_desired_size.SetSize(target_width, target_height);
+ }
+ }
+ if (!max_desired_size.IsEmpty() &&
+ max_desired_size.width() < output_pic_size.width()) {
+ // Fix this so we are sure it's on a multiple of two to deal with
+ // subsampling.
+ max_desired_size.set_width(RoundUpToEven(max_desired_size.width()));
+ max_desired_size.set_height(RoundUpToEven(max_desired_size.height()));
+ decode_to_output_scale_factor_ =
+ static_cast<float>(max_desired_size.width()) /
+ output_pic_size.width();
+ output_pic_size = max_desired_size;
+ output_visible_rect =
+ GetOutputVisibleRect(output_visible_rect, output_pic_size);
+
+ // Create the surface pool for decoding, the normal pool will be used for
+ // output.
+ const size_t decode_pool_size = decoder_->GetRequiredNumOfPictures();
+ const base::Optional<gfx::BufferFormat> buffer_format =
+ VideoPixelFormatToGfxBufferFormat(*format);
+ if (!buffer_format) {
+ decode_to_output_scale_factor_.reset();
+ SetState(State::kError);
+ return;
+ }
+ const uint32_t va_fourcc =
+ VaapiWrapper::BufferFormatToVAFourCC(*buffer_format);
+ const uint32_t va_rt_format =
+ VaapiWrapper::BufferFormatToVARTFormat(*buffer_format);
+ if (!va_fourcc || !va_rt_format) {
+ decode_to_output_scale_factor_.reset();
+ SetState(State::kError);
+ return;
+ }
+ const gfx::Size decoder_pic_size = decoder_->GetPicSize();
+ for (size_t i = 0; i < decode_pool_size; ++i) {
+ std::unique_ptr<ScopedVASurface> surface =
+ vaapi_wrapper_->CreateScopedVASurface(
+ base::strict_cast<unsigned int>(va_rt_format), decoder_pic_size,
+ /*visible_size=*/base::nullopt, va_fourcc);
+ if (!surface) {
+ while (!decode_surface_pool_for_scaling_.empty())
+ decode_surface_pool_for_scaling_.pop();
+ decode_to_output_scale_factor_.reset();
+ SetState(State::kError);
+ return;
+ }
+ decode_surface_pool_for_scaling_.push(std::move(surface));
+ }
+ }
+ }
+ const gfx::Size natural_size =
+ GetNaturalSize(output_visible_rect, pixel_aspect_ratio_);
+ if (!frame_pool_->Initialize(
+ *format_fourcc, output_pic_size, output_visible_rect, natural_size,
+ decoder_->GetRequiredNumOfPictures(), !!cdm_context_ref_)) {
+ DLOG(WARNING) << "Failed Initialize()ing the frame pool.";
+ SetState(State::kError);
+ return;
+ }
+
if (profile_ != decoder_->GetProfile()) {
// When a profile is changed, we need to re-initialize VaapiWrapper.
profile_ = decoder_->GetProfile();
auto new_vaapi_wrapper = VaapiWrapper::CreateForVideoCodec(
- VaapiWrapper::kDecode, profile_,
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ !cdm_context_ref_ ? VaapiWrapper::kDecode
+ : VaapiWrapper::kDecodeProtected,
+#else
+ VaapiWrapper::kDecode,
+#endif
+ profile_, encryption_scheme_,
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiVideoDecoder.VAAPIError"));
if (!new_vaapi_wrapper.get()) {
@@ -508,7 +805,7 @@ void VaapiVideoDecoder::ApplyResolutionChange() {
vaapi_wrapper_ = std::move(new_vaapi_wrapper);
}
- if (!vaapi_wrapper_->CreateContext(pic_size)) {
+ if (!vaapi_wrapper_->CreateContext(decoder_->GetPicSize())) {
VLOGF(1) << "Failed creating context";
SetState(State::kError);
return;
@@ -552,6 +849,27 @@ void VaapiVideoDecoder::NotifyFrameAvailable() {
}
}
+void VaapiVideoDecoder::ProtectedSessionUpdate(bool success) {
+ DVLOGF(4);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (!success) {
+ LOG(ERROR) << "Terminating decoding after failed protected update";
+ SetState(State::kError);
+ return;
+ }
+
+ // If we were waiting for a protected update, retry the current decode task.
+ if (state_ != State::kWaitingForProtected)
+ return;
+
+ DCHECK(current_decode_task_);
+ SetState(State::kDecoding);
+ decoder_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VaapiVideoDecoder::HandleDecodeTask, weak_this_));
+}
+
void VaapiVideoDecoder::Flush() {
DVLOGF(2);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -623,9 +941,14 @@ Status VaapiVideoDecoder::CreateAcceleratedVideoDecoder() {
DVLOGF(2);
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ VaapiVideoDecoderDelegate::ProtectedSessionUpdateCB protected_update_cb =
+ BindToCurrentLoop(base::BindRepeating(
+ &VaapiVideoDecoder::ProtectedSessionUpdate, weak_this_));
if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX) {
- auto accelerator =
- std::make_unique<H264VaapiVideoDecoderDelegate>(this, vaapi_wrapper_);
+ auto accelerator = std::make_unique<H264VaapiVideoDecoderDelegate>(
+ this, vaapi_wrapper_, std::move(protected_update_cb),
+ cdm_context_ref_ ? cdm_context_ref_->GetCdmContext() : nullptr,
+ encryption_scheme_);
decoder_delegate_ = accelerator.get();
decoder_.reset(
@@ -637,22 +960,35 @@ Status VaapiVideoDecoder::CreateAcceleratedVideoDecoder() {
decoder_.reset(new VP8Decoder(std::move(accelerator)));
} else if (profile_ >= VP9PROFILE_MIN && profile_ <= VP9PROFILE_MAX) {
- auto accelerator =
- std::make_unique<VP9VaapiVideoDecoderDelegate>(this, vaapi_wrapper_);
+ auto accelerator = std::make_unique<VP9VaapiVideoDecoderDelegate>(
+ this, vaapi_wrapper_, std::move(protected_update_cb),
+ cdm_context_ref_ ? cdm_context_ref_->GetCdmContext() : nullptr,
+ encryption_scheme_);
decoder_delegate_ = accelerator.get();
decoder_.reset(
new VP9Decoder(std::move(accelerator), profile_, color_space_));
+ }
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
- } else if (profile_ >= HEVCPROFILE_MIN && profile_ <= HEVCPROFILE_MAX) {
- auto accelerator =
- std::make_unique<H265VaapiVideoDecoderDelegate>(this, vaapi_wrapper_);
+ else if (profile_ >= HEVCPROFILE_MIN && profile_ <= HEVCPROFILE_MAX) {
+ auto accelerator = std::make_unique<H265VaapiVideoDecoderDelegate>(
+ this, vaapi_wrapper_, std::move(protected_update_cb),
+ cdm_context_ref_ ? cdm_context_ref_->GetCdmContext() : nullptr,
+ encryption_scheme_);
decoder_delegate_ = accelerator.get();
decoder_.reset(
new H265Decoder(std::move(accelerator), profile_, color_space_));
+ }
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
- } else {
+ else if (profile_ >= AV1PROFILE_MIN && profile_ <= AV1PROFILE_MAX) {
+ auto accelerator =
+ std::make_unique<AV1VaapiVideoDecoderDelegate>(this, vaapi_wrapper_);
+ decoder_delegate_ = accelerator.get();
+
+ decoder_.reset(new AV1Decoder(std::move(accelerator), profile_));
+ }
+ else {
return Status(StatusCode::kDecoderUnsupportedProfile)
.WithData("profile", profile_);
}
@@ -678,7 +1014,7 @@ void VaapiVideoDecoder::SetState(State state) {
// Check whether the state change is valid.
switch (state) {
case State::kUninitialized:
- DCHECK_EQ(state_, State::kWaitingForInput);
+ DCHECK(state_ == State::kWaitingForInput || state_ == State::kError);
break;
case State::kWaitingForInput:
DCHECK(decode_task_queue_.empty());
@@ -686,6 +1022,9 @@ void VaapiVideoDecoder::SetState(State state) {
DCHECK(state_ == State::kUninitialized || state_ == State::kDecoding ||
state_ == State::kResetting);
break;
+ case State::kWaitingForProtected:
+ DCHECK(!!cdm_context_ref_);
+ FALLTHROUGH;
case State::kWaitingForOutput:
DCHECK(current_decode_task_);
DCHECK_EQ(state_, State::kDecoding);
@@ -693,11 +1032,14 @@ void VaapiVideoDecoder::SetState(State state) {
case State::kDecoding:
DCHECK(state_ == State::kWaitingForInput ||
state_ == State::kWaitingForOutput ||
- state_ == State::kChangingResolution);
+ state_ == State::kChangingResolution ||
+ state_ == State::kWaitingForProtected);
break;
case State::kResetting:
DCHECK(state_ == State::kWaitingForInput ||
- state_ == State::kWaitingForOutput || state_ == State::kDecoding);
+ state_ == State::kWaitingForOutput || state_ == State::kDecoding ||
+ state_ == State::kWaitingForProtected ||
+ state_ == State::kChangingResolution);
ClearDecodeTaskQueue(DecodeStatus::ABORTED);
break;
case State::kChangingResolution:
@@ -713,4 +1055,12 @@ void VaapiVideoDecoder::SetState(State state) {
state_ = state;
}
+void VaapiVideoDecoder::ReturnDecodeSurfaceToPool(
+ std::unique_ptr<ScopedVASurface> surface,
+ VASurfaceID) {
+ DVLOGF(4);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ decode_surface_pool_for_scaling_.push(std::move(surface));
+}
+
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.h b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
index 65b70882fab..06cdb0004df 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
@@ -22,13 +22,16 @@
#include "base/optional.h"
#include "base/sequence_checker.h"
#include "base/time/time.h"
+#include "build/chromeos_buildflags.h"
+#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
#include "media/base/status.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_codecs.h"
#include "media/base/video_frame_layout.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "media/gpu/decode_surface_handler.h"
-#include "media/video/supported_video_decoder_config.h"
+#include "media/gpu/vaapi/vaapi_utils.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/gpu_memory_buffer.h"
@@ -60,13 +63,19 @@ class VaapiVideoDecoder : public DecoderInterface,
void Initialize(const VideoDecoderConfig& config,
CdmContext* cdm_context,
InitCB init_cb,
- const OutputCB& output_cb) override;
+ const OutputCB& output_cb,
+ const WaitingCB& waiting_cb) override;
void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
void Reset(base::OnceClosure reset_cb) override;
void ApplyResolutionChange() override;
// DecodeSurfaceHandler<VASurface> implementation.
scoped_refptr<VASurface> CreateSurface() override;
+ scoped_refptr<VASurface> CreateDecodeSurface() override;
+ bool IsScalingDecode() override;
+ const gfx::Rect GetOutputVisibleRect(
+ const gfx::Rect& decode_visible_rect,
+ const gfx::Size& output_picture_size) override;
void SurfaceReady(scoped_refptr<VASurface> va_surface,
int32_t buffer_id,
const gfx::Rect& visible_rect,
@@ -88,14 +97,16 @@ class VaapiVideoDecoder : public DecoderInterface,
};
enum class State {
- kUninitialized, // not initialized yet or initialization failed.
- kWaitingForInput, // waiting for input buffers.
- kWaitingForOutput, // waiting for output buffers.
- kDecoding, // decoding buffers.
- kChangingResolution, // need to change resolution, waiting for pipeline to
- // be flushed.
- kResetting, // resetting decoder.
- kError, // decoder encountered an error.
+ kUninitialized, // not initialized yet or initialization failed.
+ kWaitingForInput, // waiting for input buffers.
+ kWaitingForOutput, // waiting for output buffers.
+ kWaitingForProtected, // waiting on something related to protected content,
+ // either setup, full sample parsing or key loading.
+ kDecoding, // decoding buffers.
+ kChangingResolution, // need to change resolution, waiting for pipeline to
+ // be flushed.
+ kResetting, // resetting decoder.
+ kError, // decoder encountered an error.
};
VaapiVideoDecoder(
@@ -119,6 +130,9 @@ class VaapiVideoDecoder : public DecoderInterface,
void ReleaseVideoFrame(VASurfaceID surface_id);
// Callback for |frame_pool_| to notify of available resources.
void NotifyFrameAvailable();
+ // Callback from accelerator to indicate the protected state has been updated
+ // so we can proceed or fail.
+ void ProtectedSessionUpdate(bool success);
// Flushes |decoder_|, blocking until all pending decode tasks have been
// executed and all frames have been output.
@@ -133,12 +147,31 @@ class VaapiVideoDecoder : public DecoderInterface,
// Change the current |state_| to the specified |state|.
void SetState(State state);
+ // Callback for the CDM to notify |this|.
+ void OnCdmContextEvent(CdmContext::Event event);
+
+ // This is a callback from ApplyResolutionChange() when we need to query the
+ // browser process for the screen sizes.
+ void ApplyResolutionChangeWithScreenSizes(
+ const std::vector<gfx::Size>& screen_resolution);
+
+ // Callback for when a VASurface in the decode pool is no longer used as a
+ // reference frame and should then be returned to the pool. We ignore the
+ // VASurfaceID in the normal callback because it is retained in the |surface|
+ // object.
+ void ReturnDecodeSurfaceToPool(std::unique_ptr<ScopedVASurface> surface,
+ VASurfaceID);
+
// The video decoder's state.
State state_ = State::kUninitialized;
// Callback used to notify the client when a frame is available for output.
OutputCB output_cb_;
+ // Callback used to notify the client when we have lost decode context and
+ // request a reset. (Used in protected decoding).
+ WaitingCB waiting_cb_;
+
// The video stream's profile.
VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
// Color space of the video frame.
@@ -175,6 +208,24 @@ class VaapiVideoDecoder : public DecoderInterface,
base::small_map<std::map<gfx::GpuMemoryBufferId, scoped_refptr<VASurface>>>
allocated_va_surfaces_;
+ // We need to use a CdmContextRef so that we destruct
+ // |cdm_event_cb_registration_| before the CDM is destructed. The CDM has
+ // mechanisms to ensure destruction on the proper thread.
+ //
+ // For clarity, the MojoVideoDecoderService does hold a reference to both the
+ // decoder and the CDM to ensure the CDM doesn't get destructed before the
+ // decoder; however, in the VideoDecoderPipeline, which owns the
+ // VaapiVideoDecoder, it uses an asynchronous destructor to destroy the
+ // pipeline (and thus the VaapiVideoDecoder) on the decoder thread.
+ std::unique_ptr<CdmContextRef> cdm_context_ref_;
+
+ EncryptionScheme encryption_scheme_;
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // To keep the CdmContext event callback registered.
+ std::unique_ptr<CallbackRegistration> cdm_event_cb_registration_;
+#endif
+
// Platform and codec specific video decoder.
std::unique_ptr<AcceleratedVideoDecoder> decoder_;
scoped_refptr<VaapiWrapper> vaapi_wrapper_;
@@ -182,6 +233,15 @@ class VaapiVideoDecoder : public DecoderInterface,
// the pointer from AcceleratedVideoDecoder.
VaapiVideoDecoderDelegate* decoder_delegate_ = nullptr;
+ // When we are doing scaled decoding, this is the pool of surfaces used by the
+ // decoder for reference frames.
+ base::queue<std::unique_ptr<ScopedVASurface>>
+ decode_surface_pool_for_scaling_;
+
+ // When we are doing scaled decoding, this is the scale factor we are using,
+ // and applies the same in both dimensions.
+ base::Optional<float> decode_to_output_scale_factor_;
+
SEQUENCE_CHECKER(sequence_checker_);
base::WeakPtr<VaapiVideoDecoder> weak_this_;
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc
index d69f3250e05..b2accbbc497 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.cc
@@ -4,32 +4,384 @@
#include "media/gpu/vaapi/vaapi_video_decoder_delegate.h"
+#include "base/bind.h"
+#include "base/containers/contains.h"
+#include "base/logging.h"
+#include "base/numerics/safe_conversions.h"
+#include "base/time/default_tick_clock.h"
+#include "build/chromeos_buildflags.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/base/cdm_context.h"
#include "media/gpu/decode_surface_handler.h"
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_factory.h"
+
+namespace {
+// During playback of protected content, we need to request the keys at an
+// interval no greater than this. This allows updating of key usage data.
+constexpr base::TimeDelta kKeyRetrievalMaxPeriod =
+ base::TimeDelta::FromMinutes(1);
+// This increments the lower 64 bit counter of an 128 bit IV.
+void ctr128_inc64(uint8_t* counter) {
+ uint32_t n = 16;
+ do {
+ if (++counter[--n] != 0)
+ return;
+ } while (n > 8);
+}
+
+} // namespace
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
namespace media {
VaapiVideoDecoderDelegate::VaapiVideoDecoderDelegate(
DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper)
- : vaapi_dec_(vaapi_dec), vaapi_wrapper_(std::move(vaapi_wrapper)) {
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb,
+ CdmContext* cdm_context,
+ EncryptionScheme encryption_scheme)
+ : vaapi_dec_(vaapi_dec),
+ vaapi_wrapper_(std::move(vaapi_wrapper)),
+ on_protected_session_update_cb_(
+ std::move(on_protected_session_update_cb)),
+ encryption_scheme_(encryption_scheme),
+ protected_session_state_(ProtectedSessionState::kNotCreated),
+ scaled_surface_id_(VA_INVALID_ID),
+ performing_recovery_(false) {
DCHECK(vaapi_wrapper_);
DCHECK(vaapi_dec_);
DETACH_FROM_SEQUENCE(sequence_checker_);
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (cdm_context)
+ chromeos_cdm_context_ = cdm_context->GetChromeOsCdmContext();
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ memset(&src_region_, 0, sizeof(src_region_));
+ memset(&dst_region_, 0, sizeof(dst_region_));
}
VaapiVideoDecoderDelegate::~VaapiVideoDecoderDelegate() {
// TODO(mcasas): consider enabling the checker, https://crbug.com/789160
// DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // Also destroy the protected session on destruction of the accelerator
+ // delegate. That way if a new delegate is created, when it tries to create a
+ // new protected session it won't overwrite the existing one.
+ vaapi_wrapper_->DestroyProtectedSession();
}
void VaapiVideoDecoderDelegate::set_vaapi_wrapper(
scoped_refptr<VaapiWrapper> vaapi_wrapper) {
DETACH_FROM_SEQUENCE(sequence_checker_);
vaapi_wrapper_ = std::move(vaapi_wrapper);
+ protected_session_state_ = ProtectedSessionState::kNotCreated;
+ hw_identifier_.clear();
+ hw_key_data_map_.clear();
}
void VaapiVideoDecoderDelegate::OnVAContextDestructionSoon() {}
+bool VaapiVideoDecoderDelegate::HasInitiatedProtectedRecovery() {
+ if (protected_session_state_ != ProtectedSessionState::kNeedsRecovery)
+ return false;
+
+ performing_recovery_ = true;
+ protected_session_state_ = ProtectedSessionState::kNotCreated;
+ return true;
+}
+
+bool VaapiVideoDecoderDelegate::SetDecryptConfig(
+ std::unique_ptr<DecryptConfig> decrypt_config) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // It is possible to switch between clear and encrypted (and vice versa), but
+ // we should not be changing encryption schemes across encrypted portions.
+ if (!decrypt_config)
+ return true;
+ // TODO(jkardatzke): Handle changing encryption modes midstream, the latest
+ // OEMCrypto spec allows this, although we won't hit it in reality for now.
+ // Check to make sure they are compatible.
+ if (decrypt_config->encryption_scheme() != encryption_scheme_) {
+ LOG(ERROR) << "Cannot change encryption modes midstream";
+ return false;
+ }
+ decrypt_config_ = std::move(decrypt_config);
+ return true;
+}
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+VaapiVideoDecoderDelegate::ProtectedSessionState
+VaapiVideoDecoderDelegate::SetupDecryptDecode(
+ bool full_sample,
+ size_t size,
+ VAEncryptionParameters* crypto_params,
+ std::vector<VAEncryptionSegmentInfo>* segments,
+ const std::vector<SubsampleEntry>& subsamples) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(crypto_params);
+ DCHECK(segments);
+ if (protected_session_state_ == ProtectedSessionState::kInProcess ||
+ protected_session_state_ == ProtectedSessionState::kFailed) {
+ return protected_session_state_;
+ }
+ if (protected_session_state_ == ProtectedSessionState::kNotCreated) {
+ if (!chromeos_cdm_context_) {
+ LOG(ERROR) << "Cannot create protected session w/out ChromeOsCdmContext";
+ protected_session_state_ = ProtectedSessionState::kFailed;
+ return protected_session_state_;
+ }
+ // We need to start the creation of this, first part requires getting the
+ // hw config data from the daemon.
+ chromeos::ChromeOsCdmFactory::GetHwConfigData(BindToCurrentLoop(
+ base::BindOnce(&VaapiVideoDecoderDelegate::OnGetHwConfigData,
+ weak_factory_.GetWeakPtr())));
+ protected_session_state_ = ProtectedSessionState::kInProcess;
+ return protected_session_state_;
+ }
+
+ DCHECK_EQ(protected_session_state_, ProtectedSessionState::kCreated);
+
+ if (encryption_scheme_ == EncryptionScheme::kCenc) {
+ crypto_params->encryption_type = full_sample
+ ? VA_ENCRYPTION_TYPE_FULLSAMPLE_CTR
+ : VA_ENCRYPTION_TYPE_SUBSAMPLE_CTR;
+ } else {
+ crypto_params->encryption_type = full_sample
+ ? VA_ENCRYPTION_TYPE_FULLSAMPLE_CBC
+ : VA_ENCRYPTION_TYPE_SUBSAMPLE_CBC;
+ }
+
+ // For multi-slice we may already have segment information in here, so
+ // calculate the current offset.
+ size_t offset = 0;
+ for (const auto& segment : *segments)
+ offset += segment.segment_length;
+
+ if (subsamples.empty() ||
+ (subsamples.size() == 1 && subsamples[0].cypher_bytes == 0)) {
+ // We still need to specify the crypto params to the driver for some reason
+ // and indicate the entire content is clear.
+ VAEncryptionSegmentInfo segment_info = {};
+ segment_info.segment_start_offset = offset;
+ segment_info.segment_length = segment_info.init_byte_length = size;
+ if (decrypt_config_) {
+ // We need to specify the IV even if the segment is clear.
+ memcpy(segment_info.aes_cbc_iv_or_ctr, decrypt_config_->iv().data(),
+ DecryptConfig::kDecryptionKeySize);
+ }
+ segments->emplace_back(std::move(segment_info));
+ crypto_params->num_segments++;
+ crypto_params->segment_info = &segments->front();
+ return protected_session_state_;
+ }
+
+ DCHECK(decrypt_config_);
+ // We also need to make sure we have the key data for the active
+ // DecryptConfig now that the protected session exists.
+ if (!base::Contains(hw_key_data_map_, decrypt_config_->key_id())) {
+ DVLOG(1) << "Looking up the key data for: " << decrypt_config_->key_id();
+ chromeos_cdm_context_->GetHwKeyData(
+ decrypt_config_.get(), hw_identifier_,
+ BindToCurrentLoop(base::BindOnce(
+ &VaapiVideoDecoderDelegate::OnGetHwKeyData,
+ weak_factory_.GetWeakPtr(), decrypt_config_->key_id())));
+ last_key_retrieval_time_ =
+ base::DefaultTickClock::GetInstance()->NowTicks();
+ // Don't change our state here because we are created, but we just return
+ // kInProcess for now to trigger a wait/retry state.
+ return ProtectedSessionState::kInProcess;
+ }
+
+ // We may also need to request the key in order to update key usage times in
+ // OEMCrypto. We do care about the return value, because it will indicate key
+ // validity for us.
+ if (base::DefaultTickClock::GetInstance()->NowTicks() -
+ last_key_retrieval_time_ >
+ kKeyRetrievalMaxPeriod) {
+ chromeos_cdm_context_->GetHwKeyData(
+ decrypt_config_.get(), hw_identifier_,
+ BindToCurrentLoop(base::BindOnce(
+ &VaapiVideoDecoderDelegate::OnGetHwKeyData,
+ weak_factory_.GetWeakPtr(), decrypt_config_->key_id())));
+
+ last_key_retrieval_time_ =
+ base::DefaultTickClock::GetInstance()->NowTicks();
+ }
+
+ crypto_params->num_segments += subsamples.size();
+ if (decrypt_config_->HasPattern()) {
+ if (subsamples.size() != 1) {
+ LOG(ERROR) << "Need single subsample for encryption pattern";
+ protected_session_state_ = ProtectedSessionState::kFailed;
+ return protected_session_state_;
+ }
+ crypto_params->blocks_stripe_encrypted =
+ decrypt_config_->encryption_pattern()->crypt_byte_block();
+ crypto_params->blocks_stripe_clear =
+ decrypt_config_->encryption_pattern()->skip_byte_block();
+ VAEncryptionSegmentInfo segment_info = {};
+ segment_info.segment_start_offset = offset;
+ segment_info.init_byte_length = subsamples[0].clear_bytes;
+ segment_info.segment_length =
+ subsamples[0].clear_bytes + subsamples[0].cypher_bytes;
+ memcpy(segment_info.aes_cbc_iv_or_ctr, decrypt_config_->iv().data(),
+ DecryptConfig::kDecryptionKeySize);
+ segments->emplace_back(std::move(segment_info));
+ } else {
+ size_t total_cypher_size = 0;
+ std::vector<uint8_t> iv(DecryptConfig::kDecryptionKeySize);
+ iv.assign(decrypt_config_->iv().begin(), decrypt_config_->iv().end());
+ for (const auto& entry : subsamples) {
+ VAEncryptionSegmentInfo segment_info = {};
+ segment_info.segment_start_offset = offset;
+ segment_info.segment_length = entry.clear_bytes + entry.cypher_bytes;
+ size_t partial_block_size =
+ (DecryptConfig::kDecryptionKeySize -
+ (total_cypher_size % DecryptConfig::kDecryptionKeySize)) %
+ DecryptConfig::kDecryptionKeySize;
+ segment_info.partial_aes_block_size = partial_block_size;
+ memcpy(segment_info.aes_cbc_iv_or_ctr, iv.data(),
+ DecryptConfig::kDecryptionKeySize);
+ if (entry.cypher_bytes > partial_block_size) {
+ // If we are finishing a block, increment the counter.
+ if (partial_block_size)
+ ctr128_inc64(iv.data());
+ // Increment the counter for every complete block we are adding.
+ for (size_t block = 0;
+ block < (entry.cypher_bytes - partial_block_size) /
+ DecryptConfig::kDecryptionKeySize;
+ ++block)
+ ctr128_inc64(iv.data());
+ }
+ total_cypher_size += entry.cypher_bytes;
+ segment_info.init_byte_length = entry.clear_bytes;
+ offset += entry.clear_bytes + entry.cypher_bytes;
+ segments->emplace_back(std::move(segment_info));
+ }
+ }
+ memcpy(crypto_params->wrapped_decrypt_blob,
+ hw_key_data_map_[decrypt_config_->key_id()].data(),
+ DecryptConfig::kDecryptionKeySize);
+ crypto_params->key_blob_size = DecryptConfig::kDecryptionKeySize;
+ crypto_params->segment_info = &segments->front();
+ return protected_session_state_;
+}
+#endif // if BUILDFLAG(IS_CHROMEOS_ASH)
+
+bool VaapiVideoDecoderDelegate::NeedsProtectedSessionRecovery() {
+ if (!IsEncryptedSession() || !vaapi_wrapper_->IsProtectedSessionDead() ||
+ performing_recovery_) {
+ return false;
+ }
+
+ LOG(WARNING) << "Protected session loss detected, initiating recovery";
+ protected_session_state_ = ProtectedSessionState::kNeedsRecovery;
+ hw_key_data_map_.clear();
+ hw_identifier_.clear();
+ vaapi_wrapper_->DestroyProtectedSession();
+ return true;
+}
+
+void VaapiVideoDecoderDelegate::ProtectedDecodedSucceeded() {
+ performing_recovery_ = false;
+}
+
+bool VaapiVideoDecoderDelegate::FillDecodeScalingIfNeeded(
+ const gfx::Rect& decode_visible_rect,
+ VASurfaceID decode_surface_id,
+ scoped_refptr<VASurface> output_surface,
+ VAProcPipelineParameterBuffer* proc_buffer) {
+ if (!vaapi_dec_->IsScalingDecode())
+ return false;
+
+ // Submit the buffer for the inline decode scaling.
+ memset(proc_buffer, 0, sizeof(*proc_buffer));
+ src_region_.x = base::checked_cast<int16_t>(decode_visible_rect.x());
+ src_region_.y = base::checked_cast<int16_t>(decode_visible_rect.y());
+ src_region_.width = base::checked_cast<uint16_t>(decode_visible_rect.width());
+ src_region_.height =
+ base::checked_cast<uint16_t>(decode_visible_rect.height());
+
+ gfx::Rect scaled_visible_rect = vaapi_dec_->GetOutputVisibleRect(
+ decode_visible_rect, output_surface->size());
+ dst_region_.x = base::checked_cast<int16_t>(scaled_visible_rect.x());
+ dst_region_.y = base::checked_cast<int16_t>(scaled_visible_rect.y());
+ dst_region_.width = base::checked_cast<uint16_t>(scaled_visible_rect.width());
+ dst_region_.height =
+ base::checked_cast<uint16_t>(scaled_visible_rect.height());
+
+ proc_buffer->surface_region = &src_region_;
+ proc_buffer->output_region = &dst_region_;
+
+ scaled_surface_id_ = output_surface->id();
+ proc_buffer->additional_outputs = &scaled_surface_id_;
+ proc_buffer->num_additional_outputs = 1;
+ proc_buffer->surface = decode_surface_id;
+ return true;
+}
+
+void VaapiVideoDecoderDelegate::OnGetHwConfigData(
+ bool success,
+ const std::vector<uint8_t>& config_data) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (!success) {
+ protected_session_state_ = ProtectedSessionState::kFailed;
+ on_protected_session_update_cb_.Run(false);
+ return;
+ }
+
+ hw_identifier_.clear();
+ if (!vaapi_wrapper_->CreateProtectedSession(encryption_scheme_, config_data,
+ &hw_identifier_)) {
+ LOG(ERROR) << "Failed to setup protected session";
+ protected_session_state_ = ProtectedSessionState::kFailed;
+ on_protected_session_update_cb_.Run(false);
+ return;
+ }
+
+ protected_session_state_ = ProtectedSessionState::kCreated;
+ on_protected_session_update_cb_.Run(true);
+}
+
+void VaapiVideoDecoderDelegate::OnGetHwKeyData(
+ const std::string& key_id,
+ Decryptor::Status status,
+ const std::vector<uint8_t>& key_data) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // There's a special case here where we are updating usage times/checking on
+ // key validity, and in that case the key is already in the map.
+ if (base::Contains(hw_key_data_map_, key_id)) {
+ if (status == Decryptor::Status::kSuccess)
+ return;
+ // This key is no longer valid, decryption will fail, so stop playback
+ // now. This key should have been renewed by the CDM instead.
+ LOG(ERROR) << "CDM has lost key information, stopping playback";
+ protected_session_state_ = ProtectedSessionState::kFailed;
+ on_protected_session_update_cb_.Run(false);
+ return;
+ }
+ if (status != Decryptor::Status::kSuccess) {
+ // If it's a failure, then indicate so, otherwise if it's waiting for a key,
+ // then we don't do anything since we will get called again when there's a
+ // message about key availability changing.
+ if (status == Decryptor::Status::kNoKey) {
+ DVLOG(1) << "HW did not have key information, keep waiting for it";
+ return;
+ }
+ LOG(ERROR) << "Failure getting the key data, fail overall";
+ protected_session_state_ = ProtectedSessionState::kFailed;
+ on_protected_session_update_cb_.Run(false);
+ return;
+ }
+ if (key_data.size() != DecryptConfig::kDecryptionKeySize) {
+ LOG(ERROR) << "Invalid key size returned of: " << key_data.size();
+ protected_session_state_ = ProtectedSessionState::kFailed;
+ on_protected_session_update_cb_.Run(false);
+ return;
+ }
+ hw_key_data_map_[key_id] = key_data;
+ on_protected_session_update_cb_.Run(true);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h
index 56d9a43fa5d..9d46000ba9d 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder_delegate.h
@@ -5,23 +5,55 @@
#ifndef MEDIA_GPU_VAAPI_VAAPI_VIDEO_DECODER_DELEGATE_H_
#define MEDIA_GPU_VAAPI_VAAPI_VIDEO_DECODER_DELEGATE_H_
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/memory/scoped_refptr.h"
+#include "base/memory/weak_ptr.h"
#include "base/sequence_checker.h"
+#include "base/time/time.h"
+#include "build/chromeos_buildflags.h"
+#include "media/base/decryptor.h"
+#include "media/base/encryption_scheme.h"
+#include "media/base/subsample_entry.h"
+#include "third_party/libva_protected_content/va_protected_content.h"
+#include "ui/gfx/geometry/rect.h"
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_context.h"
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
namespace media {
+class CdmContext;
template <class T>
class DecodeSurfaceHandler;
+class DecryptConfig;
class VaapiWrapper;
class VASurface;
// The common part of each AcceleratedVideoDecoder's Accelerator for VA-API.
// This class allows clients to reset VaapiWrapper in case of a profile change.
// DecodeSurfaceHandler must stay alive for the lifetime of this class.
+// This also handles all of the shared functionality relating to protected
+// sessions in VA-API.
class VaapiVideoDecoderDelegate {
public:
- VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper);
+ // Callback when using protected mode to indicate that if waiting, the
+ // decoder should resume again. If |success| is false, then decoding should
+ // fail.
+ using ProtectedSessionUpdateCB = base::RepeatingCallback<void(bool success)>;
+
+ VaapiVideoDecoderDelegate(
+ DecodeSurfaceHandler<VASurface>* const vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb,
+ CdmContext* cdm_context,
+ EncryptionScheme encryption_scheme = EncryptionScheme::kUnencrypted);
virtual ~VaapiVideoDecoderDelegate();
void set_vaapi_wrapper(scoped_refptr<VaapiWrapper> vaapi_wrapper);
@@ -31,12 +63,107 @@ class VaapiVideoDecoderDelegate {
VaapiVideoDecoderDelegate& operator=(const VaapiVideoDecoderDelegate&) =
delete;
+ // Should be called when kTryAgain is returned from decoding to determine if
+ // we should try to recover the session by sending a kDecodeStateLost message
+ // up through the WaitingCB in the decoder. Returns true if we should send the
+ // kDecodeStateLost message.
+ bool HasInitiatedProtectedRecovery();
+
protected:
+ // Sets the |decrypt_config| currently active for this stream. Returns true if
+ // that config is compatible with the existing one (for example, you can't
+ // change encryption schemes midstream).
+ bool SetDecryptConfig(std::unique_ptr<DecryptConfig> decrypt_config);
+
+ enum class ProtectedSessionState {
+ kNotCreated,
+ kInProcess,
+ kCreated,
+ kNeedsRecovery,
+ kFailed
+ };
+
+ // Ensures we have a protected session setup and attached to the active
+ // |vaapi_wrapper_| we are using. We are in the corresponding state returned
+ // when this call returns. |full_sample| indicates if we are using full sample
+ // encryption or not and must remain consistent for a session. If everything
+ // is setup for a protected session, it will fill in the |crypto_params|.
+ // |segments| must retain its memory until the frame is submitted.
+ // |subsamples| is for the current slice. |size| is the size of the slice
+ // data. This should be called if IsEncrypted() is true even if the current
+ // data is not encrypted (i.e. |subsamples| is empty).
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ ProtectedSessionState SetupDecryptDecode(
+ bool full_sample,
+ size_t size,
+ VAEncryptionParameters* crypto_params,
+ std::vector<VAEncryptionSegmentInfo>* segments,
+ const std::vector<SubsampleEntry>& subsamples);
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ // Returns true if we are handling encrypted content, in which case
+ // SetupDecryptDecode() should be called for every slice.
+ bool IsEncryptedSession() const {
+ return encryption_scheme_ != EncryptionScheme::kUnencrypted;
+ }
+
+ // Should be called by subclasses if a failure occurs during actual decoding.
+ // This will check if we are using protected mode and it's in a state that
+ // can be recovered which should resolve the error. If this method returns
+ // true, then the caller should return kTryAgain from the accelerator to kick
+ // off the rest of the recovery process.
+ bool NeedsProtectedSessionRecovery();
+
+ // Should be invoked by subclasses if they successfully decoded protected
+ // video. This is so we can reset our tracker to indicate we successfully
+ // recovered from protected session loss. It is fine to call this method on
+ // every successful protected decode.
+ void ProtectedDecodedSucceeded();
+
+ // Fills *|proc_buffer| with the proper parameters for decode scaling and
+ // returns true if that buffer was filled in and should be submitted, false
+ // otherwise.
+ bool FillDecodeScalingIfNeeded(const gfx::Rect& decode_visible_rect,
+ VASurfaceID decode_surface_id,
+ scoped_refptr<VASurface> output_surface,
+ VAProcPipelineParameterBuffer* proc_buffer);
+
// Both owned by caller.
DecodeSurfaceHandler<VASurface>* const vaapi_dec_;
scoped_refptr<VaapiWrapper> vaapi_wrapper_;
SEQUENCE_CHECKER(sequence_checker_);
+
+ private:
+ void OnGetHwConfigData(bool success, const std::vector<uint8_t>& config_data);
+ void OnGetHwKeyData(const std::string& key_id,
+ Decryptor::Status status,
+ const std::vector<uint8_t>& key_data);
+
+ // All members below pertain to protected content playback.
+ ProtectedSessionUpdateCB on_protected_session_update_cb_;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ chromeos::ChromeOsCdmContext* chromeos_cdm_context_{nullptr}; // Not owned.
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+ EncryptionScheme encryption_scheme_;
+ ProtectedSessionState protected_session_state_;
+ std::unique_ptr<DecryptConfig> decrypt_config_;
+ std::vector<uint8_t> hw_identifier_;
+ std::map<std::string, std::vector<uint8_t>> hw_key_data_map_;
+ base::TimeTicks last_key_retrieval_time_;
+ // We need to hold onto these across a call since the VABuffer will reference
+ // their pointers, so declare them here to allow for that. These are used in
+ // the decode scaling operation.
+ VARectangle src_region_;
+ VARectangle dst_region_;
+ VASurfaceID scaled_surface_id_;
+
+ // This gets set to true if we indicated we should try to recover from
+ // protected session loss. We use this so that we don't go into a loop where
+ // we repeatedly retry recovery over and over.
+ bool performing_recovery_;
+
+ base::WeakPtrFactory<VaapiVideoDecoderDelegate> weak_factory_{this};
};
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
index 6dee785a05c..59225af8ae6 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
@@ -28,7 +28,6 @@
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
-#include "gpu/ipc/service/gpu_memory_buffer_factory.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/format_utils.h"
#include "media/base/unaligned_shared_memory.h"
@@ -67,13 +66,11 @@ constexpr unsigned int kTargetBitratePercentage = 90;
// requirements.
gfx::Size GetInputFrameSize(VideoPixelFormat format,
const gfx::Size& visible_size) {
- std::unique_ptr<::gpu::GpuMemoryBufferFactory> gpu_memory_buffer_factory =
- ::gpu::GpuMemoryBufferFactory::CreateNativeType(nullptr);
// Get a VideoFrameLayout of a graphic buffer with the same gfx::BufferUsage
// as camera stack.
base::Optional<VideoFrameLayout> layout = GetPlatformVideoFrameLayout(
- gpu_memory_buffer_factory.get(), format, visible_size,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE);
+ /*gpu_memory_buffer_factory=*/nullptr, format, visible_size,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
if (!layout || layout->planes().empty()) {
VLOGF(1) << "Failed to allocate VideoFrameLayout";
return gfx::Size();
@@ -234,7 +231,7 @@ VaapiVideoEncodeAccelerator::VaapiVideoEncodeAccelerator()
// TODO(akahuang): Change to use SequencedTaskRunner to see if the
// performance is affected.
encoder_task_runner_(base::ThreadPool::CreateSingleThreadTaskRunner(
- {base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN},
+ {base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN, base::MayBlock()},
base::SingleThreadTaskRunnerThreadMode::DEDICATED)) {
VLOGF(2);
DCHECK_CALLED_ON_VALID_SEQUENCE(child_sequence_checker_);
@@ -294,7 +291,7 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
}
if (config.storage_type.value_or(Config::StorageType::kShmem) ==
- Config::StorageType::kDmabuf) {
+ Config::StorageType::kGpuMemoryBuffer) {
#if !defined(USE_OZONE)
VLOGF(1) << "Native mode is only available on OZONE platform.";
return false;
@@ -338,7 +335,7 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
codec == kCodecVP9 ? VaapiWrapper::kEncodeConstantQuantizationParameter
: VaapiWrapper::kEncode;
vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
- mode, config.output_profile,
+ mode, config.output_profile, EncryptionScheme::kUnencrypted,
base::BindRepeating(&ReportVaapiErrorToUMA,
"Media.VaapiVideoEncodeAccelerator.VAAPIError"));
if (!vaapi_wrapper_) {
@@ -731,6 +728,7 @@ std::unique_ptr<VaapiEncodeJob> VaapiVideoEncodeAccelerator::CreateEncodeJob(
if (!vpp_vaapi_wrapper_) {
vpp_vaapi_wrapper_ = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
+ EncryptionScheme::kUnencrypted,
base::BindRepeating(
&ReportVaapiErrorToUMA,
"Media.VaapiVideoEncodeAccelerator.Vpp.VAAPIError"));
@@ -831,12 +829,11 @@ void VaapiVideoEncodeAccelerator::EncodePendingInputs() {
input_queue_.pop();
- if (job) {
- if (!encoder_->PrepareEncodeJob(job.get())) {
- NOTIFY_ERROR(kPlatformFailureError, "Failed preparing an encode job.");
- return;
- }
+ if (job && !encoder_->PrepareEncodeJob(job.get())) {
+ NOTIFY_ERROR(kPlatformFailureError, "Failed preparing an encode job.");
+ return;
}
+
TRACE_EVENT0("media,gpu", "VAVEA::FromExecuteToReturn");
if (job) {
TRACE_EVENT0("media,gpu", "VAVEA::Execute");
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
index d2d222e0866..440d9383f87 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator_unittest.cc
@@ -182,7 +182,6 @@ class VaapiVideoEncodeAcceleratorTest
void InitializeSequenceForVP9(const VideoEncodeAccelerator::Config& config) {
base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
::testing::InSequence s;
constexpr auto kBitrateControl =
AcceleratedVideoEncoder::BitrateControl::kConstantQuantizationParameter;
@@ -208,14 +207,12 @@ class VaapiVideoEncodeAcceleratorTest
}));
EXPECT_CALL(client_, NotifyEncoderInfoChange(MatchesEncoderInfo(
config.spatial_layers[0].num_of_temporal_layers)))
- .WillOnce([&quit_closure]() { quit_closure.Run(); });
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
ASSERT_TRUE(InitializeVideoEncodeAccelerator(config));
run_loop.Run();
}
void EncodeSequenceForVP9(bool use_temporal_layer_encoding) {
- base::RunLoop run_loop;
- base::Closure quit_closure = run_loop.QuitClosure();
::testing::InSequence s;
constexpr VABufferID kCodedBufferId = 123;
@@ -284,11 +281,13 @@ class VaapiVideoEncodeAcceleratorTest
}));
constexpr int32_t kBitstreamId = 12;
+ base::RunLoop run_loop;
+
EXPECT_CALL(client_, BitstreamBufferReady(kBitstreamId,
MatchesBitstreamBufferMetadata(
kEncodedChunkSize, false,
use_temporal_layer_encoding)))
- .WillOnce(RunClosure(quit_closure));
+ .WillOnce(RunClosure(run_loop.QuitClosure()));
auto region = base::UnsafeSharedMemoryRegion::Create(output_buffer_size_);
ASSERT_TRUE(region.IsValid());
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
index d31fae09b17..5cac28746eb 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
@@ -6,12 +6,14 @@
#include <dlfcn.h>
#include <string.h>
+#include <sys/types.h>
#include <unistd.h>
#include <va/va.h>
#include <va/va_drm.h>
#include <va/va_drmcommon.h>
#include <va/va_str.h>
#include <va/va_version.h>
+#include <xf86drm.h>
#include <algorithm>
#include <string>
@@ -32,6 +34,7 @@
#include "base/numerics/safe_conversions.h"
#include "base/posix/eintr_wrapper.h"
#include "base/stl_util.h"
+#include "base/strings/pattern.h"
#include "base/strings/string_util.h"
#include "base/system/sys_info.h"
#include "base/trace_event/trace_event.h"
@@ -49,7 +52,9 @@
#include "media/gpu/vaapi/va_stubs.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
+#include "third_party/libva_protected_content/va_protected_content.h"
#include "third_party/libyuv/include/libyuv.h"
+#include "third_party/minigbm/src/external/i915_drm.h"
#include "ui/gfx/buffer_format_util.h"
#include "ui/gfx/buffer_types.h"
#include "ui/gfx/geometry/rect.h"
@@ -74,6 +79,11 @@ extern "C" {
#include "ui/ozone/public/surface_factory_ozone.h"
#endif
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+#include <va/va_prot.h>
+using media_gpu_vaapi::kModuleVa_prot;
+#endif
+
using media_gpu_vaapi::kModuleVa;
using media_gpu_vaapi::kModuleVa_drm;
#if defined(USE_X11)
@@ -118,8 +128,15 @@ enum class VaapiFunctions {
kVASyncSurface = 22,
kVATerminate = 23,
kVAUnmapBuffer = 24,
+ // Protected mode functions below.
+ kVACreateProtectedSession = 25,
+ kVADestroyProtectedSession = 26,
+ kVAAttachProtectedSession = 27,
+ kVADetachProtectedSession = 28,
+ kVAProtectedSessionHwUpdate_Deprecated = 29,
+ kVAProtectedSessionExecute = 30,
// Anything else is captured in this last entry.
- kOtherVAFunction = 25,
+ kOtherVAFunction = 31,
kMaxValue = kOtherVAFunction,
};
@@ -155,6 +172,12 @@ constexpr std::array<const char*,
"vaSyncSurface",
"vaTerminate",
"vaUnmapBuffer",
+ "vaCreateProtectedSession",
+ "vaDestroyProtectedSession",
+ "vaAttachProtectedSession",
+ "vaDetachProtectedSession",
+ "vaProtectedSessionHwUpdate (Deprecated)",
+ "vaProtectedSessionExecute",
"Other VA function"};
// Translates |function| into a human readable string for logging.
@@ -189,28 +212,6 @@ const char* VaapiFunctionName(VaapiFunctions function) {
namespace {
-uint32_t BufferFormatToVAFourCC(gfx::BufferFormat fmt) {
- switch (fmt) {
- case gfx::BufferFormat::BGRX_8888:
- return VA_FOURCC_BGRX;
- case gfx::BufferFormat::BGRA_8888:
- return VA_FOURCC_BGRA;
- case gfx::BufferFormat::RGBX_8888:
- return VA_FOURCC_RGBX;
- case gfx::BufferFormat::RGBA_8888:
- return VA_FOURCC_RGBA;
- case gfx::BufferFormat::YVU_420:
- return VA_FOURCC_YV12;
- case gfx::BufferFormat::YUV_420_BIPLANAR:
- return VA_FOURCC_NV12;
- case gfx::BufferFormat::P010:
- return VA_FOURCC_P010;
- default:
- NOTREACHED() << gfx::BufferFormatToString(fmt);
- return 0;
- }
-}
-
media::VAImplementation VendorStringToImplementationType(
const std::string& va_vendor_string) {
if (base::StartsWith(va_vendor_string, "Mesa Gallium driver",
@@ -234,6 +235,25 @@ namespace {
// VAEntrypoint is an enumeration starting from 1, but has no "invalid" value.
constexpr VAEntrypoint kVAEntrypointInvalid = static_cast<VAEntrypoint>(0);
+// Returns true if the SoC has a Gen8 GPU. CPU model ID's are referenced from
+// the following file in the kernel source: arch/x86/include/asm/intel-family.h.
+bool IsGen8Gpu() {
+ constexpr int kPentiumAndLaterFamily = 0x06;
+ constexpr int kBroadwellCoreModelId = 0x3D;
+ constexpr int kBroadwellGT3EModelId = 0x47;
+ constexpr int kBroadwellXModelId = 0x4F;
+ constexpr int kBroadwellXeonDModelId = 0x56;
+ constexpr int kBraswellModelId = 0x4C;
+ static const base::NoDestructor<base::CPU> cpuid;
+ static const bool is_gen8_gpu = cpuid->family() == kPentiumAndLaterFamily &&
+ (cpuid->model() == kBroadwellCoreModelId ||
+ cpuid->model() == kBroadwellGT3EModelId ||
+ cpuid->model() == kBroadwellXModelId ||
+ cpuid->model() == kBroadwellXeonDModelId ||
+ cpuid->model() == kBraswellModelId);
+ return is_gen8_gpu;
+}
+
// Returns true if the SoC has a Gen9 GPU. CPU model ID's are referenced from
// the following file in the kernel source: arch/x86/include/asm/intel-family.h.
bool IsGen9Gpu() {
@@ -241,7 +261,7 @@ bool IsGen9Gpu() {
constexpr int kSkyLakeModelId = 0x5E;
constexpr int kSkyLake_LModelId = 0x4E;
constexpr int kApolloLakeModelId = 0x5c;
- static base::NoDestructor<base::CPU> cpuid;
+ static const base::NoDestructor<base::CPU> cpuid;
static const bool is_gen9_gpu = cpuid->family() == kPentiumAndLaterFamily &&
(cpuid->model() == kSkyLakeModelId ||
cpuid->model() == kSkyLake_LModelId ||
@@ -259,7 +279,7 @@ bool IsGen95Gpu() {
constexpr int kGeminiLakeModelId = 0x7A;
constexpr int kCometLakeModelId = 0xA5;
constexpr int kCometLake_LModelId = 0xA6;
- static base::NoDestructor<base::CPU> cpuid;
+ static const base::NoDestructor<base::CPU> cpuid;
static const bool is_gen95_gpu = cpuid->family() == kPentiumAndLaterFamily &&
(cpuid->model() == kKabyLakeModelId ||
cpuid->model() == kKabyLake_LModelId ||
@@ -269,6 +289,33 @@ bool IsGen95Gpu() {
return is_gen95_gpu;
}
+// Returns true if the intel hybrid driver is used for decoding |va_profile|.
+// https://github.com/intel/intel-hybrid-driver
+// Note that since the hybrid driver runs as a part of the i965 driver,
+// vaQueryVendorString() returns "Intel i965 driver".
+bool IsUsingHybridDriverForDecoding(VAProfile va_profile) {
+ // Note that Skylake (not gen8) also needs the hybrid decoder for VP9
+ // decoding. However, it is disabled today on ChromeOS
+ // (see crrev.com/c/390511).
+ return va_profile == VAProfileVP9Profile0 && IsGen8Gpu();
+}
+
+// Returns true if the SoC is considered a low power one, i.e. it's an Intel
+// Pentium, Celeron, or a Core Y-series. See go/intel-socs-101 or
+// https://www.intel.com/content/www/us/en/processors/processor-numbers.html.
+bool IsLowPowerIntelProcessor() {
+ constexpr int kPentiumAndLaterFamily = 0x06;
+ static const base::NoDestructor<base::CPU> cpuid;
+ static const bool is_core_y_processor =
+ base::MatchPattern(cpuid->cpu_brand(), "Intel(R) Core(TM) *Y CPU*");
+
+ static const bool is_low_power_intel =
+ cpuid->family() == kPentiumAndLaterFamily &&
+ (base::Contains(cpuid->cpu_brand(), "Pentium") ||
+ base::Contains(cpuid->cpu_brand(), "Celeron") || is_core_y_processor);
+ return is_low_power_intel;
+}
+
bool IsModeEncoding(VaapiWrapper::CodecMode mode) {
return mode == VaapiWrapper::CodecMode::kEncode ||
mode == VaapiWrapper::CodecMode::kEncodeConstantQuantizationParameter;
@@ -355,16 +402,12 @@ const ProfileCodecMap& GetProfileCodecMap() {
{VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
// VaapiWrapper does not support Profile 3.
//{VP9PROFILE_PROFILE3, VAProfileVP9Profile3},
-#if BUILDFLAG(IS_ASH)
- // TODO(hiroh): Remove if-macro once libva for linux-chrome is upreved
- // to 2.9.0 or newer.
- // https://source.chromium.org/chromium/chromium/src/+/master:build/linux/sysroot_scripts/generated_package_lists/sid.amd64
{AV1PROFILE_PROFILE_MAIN, VAProfileAV1Profile0},
-#endif // BUILDFLAG(IS_ASH)
// VaapiWrapper does not support AV1 Profile 1.
// {AV1PROFILE_PROFILE_HIGH, VAProfileAV1Profile1},
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
{HEVCPROFILE_MAIN, VAProfileHEVCMain},
+ {HEVCPROFILE_MAIN10, VAProfileHEVCMain10},
#endif
});
return *kMediaToVAProfileMap;
@@ -381,9 +424,13 @@ VAProfile ProfileToVAProfile(VideoCodecProfile profile,
}
bool IsVAProfileSupported(VAProfile va_profile) {
- // VAProfileJPEGBaseline is always recognized but is not a video codec per se.
const auto& profiles = GetProfileCodecMap();
+ // VAProfileJPEGBaseline and VAProfileProtected are always recognized but are
+ // not video codecs per se.
return va_profile == VAProfileJPEGBaseline ||
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ va_profile == VAProfileProtected ||
+#endif
std::find_if(profiles.begin(), profiles.end(),
[va_profile](const auto& entry) {
return entry.second == va_profile;
@@ -392,13 +439,8 @@ bool IsVAProfileSupported(VAProfile va_profile) {
bool IsBlockedDriver(VaapiWrapper::CodecMode mode, VAProfile va_profile) {
if (!IsModeEncoding(mode)) {
-#if BUILDFLAG(IS_ASH)
- if (va_profile == VAProfileAV1Profile0 &&
- !base::FeatureList::IsEnabled(kVaapiAV1Decoder)) {
- return true;
- }
-#endif // BUILDFLAG(IS_ASH)
- return false;
+ return va_profile == VAProfileAV1Profile0 &&
+ !base::FeatureList::IsEnabled(kVaapiAV1Decoder);
}
// TODO(posciak): Remove once VP8 encoding is to be enabled by default.
@@ -433,6 +475,9 @@ class VADisplayState {
VAImplementation implementation_type() const { return implementation_type_; }
void SetDrmFd(base::PlatformFile fd) { drm_fd_.reset(HANDLE_EINTR(dup(fd))); }
+ base::ScopedFD GetDrmFd() {
+ return base::ScopedFD(HANDLE_EINTR(dup(drm_fd_.get())));
+ }
private:
friend class base::NoDestructor<VADisplayState>;
@@ -700,12 +745,15 @@ std::vector<VAEntrypoint> GetEntryPointsForProfile(const base::Lock* va_lock,
va_entrypoints.resize(num_va_entrypoints);
const std::vector<VAEntrypoint> kAllowedEntryPoints[] = {
- {VAEntrypointVLD}, // kDecode.
- {VAEntrypointEncSlice, VAEntrypointEncPicture,
- VAEntrypointEncSliceLP}, // kEncode.
- {VAEntrypointEncSlice,
- VAEntrypointEncSliceLP}, // kEncodeConstantQuantizationParameter.
- {VAEntrypointVideoProc} // kVideoProcess.
+ {VAEntrypointVLD}, // kDecode.
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ {VAEntrypointVLD, VAEntrypointProtectedContent}, // kDecodeProtected.
+#endif
+ {VAEntrypointEncSlice, VAEntrypointEncPicture,
+ VAEntrypointEncSliceLP}, // kEncode.
+ {VAEntrypointEncSlice,
+ VAEntrypointEncSliceLP}, // kEncodeConstantQuantizationParameter.
+ {VAEntrypointVideoProc} // kVideoProcess.
};
static_assert(base::size(kAllowedEntryPoints) == VaapiWrapper::kCodecModeMax,
"");
@@ -733,32 +781,54 @@ bool GetRequiredAttribs(const base::Lock* va_lock,
if (profile == VAProfileVP9Profile2 || profile == VAProfileVP9Profile3) {
required_attribs->push_back(
{VAConfigAttribRTFormat, VA_RT_FORMAT_YUV420_10BPP});
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ } else if (profile == VAProfileProtected) {
+ DCHECK_EQ(mode, VaapiWrapper::kDecodeProtected);
+ constexpr int kWidevineUsage = 0x1;
+ required_attribs->push_back(
+ {VAConfigAttribProtectedContentUsage, kWidevineUsage});
+ required_attribs->push_back(
+ {VAConfigAttribProtectedContentCipherAlgorithm, VA_PC_CIPHER_AES});
+ required_attribs->push_back(
+ {VAConfigAttribProtectedContentCipherBlockSize, VA_PC_BLOCK_SIZE_128});
+ required_attribs->push_back(
+ {VAConfigAttribProtectedContentCipherMode, VA_PC_CIPHER_MODE_CTR});
+#endif
} else {
required_attribs->push_back({VAConfigAttribRTFormat, VA_RT_FORMAT_YUV420});
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (mode == VaapiWrapper::kDecodeProtected && profile != VAProfileProtected) {
+ required_attribs->push_back(
+ {VAConfigAttribEncryption, VA_ENCRYPTION_TYPE_SUBSAMPLE_CTR});
+ required_attribs->push_back(
+ {VAConfigAttribDecProcessing, VA_DEC_PROCESSING});
+ }
+#endif
+
if (!IsModeEncoding(mode))
return true;
- if (profile != VAProfileJPEGBaseline) {
- if (mode == VaapiWrapper::kEncode)
- required_attribs->push_back({VAConfigAttribRateControl, VA_RC_CBR});
- if (mode == VaapiWrapper::kEncodeConstantQuantizationParameter)
- required_attribs->push_back({VAConfigAttribRateControl, VA_RC_CQP});
- }
+ if (profile == VAProfileJPEGBaseline)
+ return true;
+
+ if (mode == VaapiWrapper::kEncode)
+ required_attribs->push_back({VAConfigAttribRateControl, VA_RC_CBR});
+ if (mode == VaapiWrapper::kEncodeConstantQuantizationParameter)
+ required_attribs->push_back({VAConfigAttribRateControl, VA_RC_CQP});
constexpr VAProfile kSupportedH264VaProfilesForEncoding[] = {
VAProfileH264ConstrainedBaseline, VAProfileH264Main, VAProfileH264High};
// VAConfigAttribEncPackedHeaders is H.264 specific.
if (base::Contains(kSupportedH264VaProfilesForEncoding, profile)) {
- // Encode with Packed header if a driver supports.
- VAConfigAttrib attrib;
+ // Encode with Packed header if the driver supports.
+ VAConfigAttrib attrib{};
attrib.type = VAConfigAttribEncPackedHeaders;
const VAStatus va_res =
vaGetConfigAttributes(va_display, profile, entrypoint, &attrib, 1);
if (va_res != VA_STATUS_SUCCESS) {
- LOG(ERROR) << "vaGetConfigAttributes failed for "
- << vaProfileStr(profile);
+ LOG(ERROR) << "vaGetConfigAttributes failed: " << vaProfileStr(profile);
return false;
}
@@ -903,9 +973,14 @@ void VASupportedProfiles::FillSupportedProfileInfos(base::Lock* va_lock,
GetSupportedVAProfiles(va_lock, va_display);
constexpr VaapiWrapper::CodecMode kWrapperModes[] = {
- VaapiWrapper::kDecode, VaapiWrapper::kEncode,
- VaapiWrapper::kEncodeConstantQuantizationParameter,
- VaapiWrapper::kVideoProcess};
+ VaapiWrapper::kDecode,
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ VaapiWrapper::kDecodeProtected,
+#endif
+ VaapiWrapper::kEncode,
+ VaapiWrapper::kEncodeConstantQuantizationParameter,
+ VaapiWrapper::kVideoProcess
+ };
static_assert(base::size(kWrapperModes) == VaapiWrapper::kCodecModeMax, "");
for (VaapiWrapper::CodecMode mode : kWrapperModes) {
@@ -941,6 +1016,7 @@ void VASupportedProfiles::FillSupportedProfileInfos(base::Lock* va_lock,
<< vaEntrypointStr(entrypoint);
continue;
}
+
supported_profile_infos.push_back(profile_info);
}
}
@@ -972,6 +1048,15 @@ bool VASupportedProfiles::FillProfileInfo_Locked(
},
va_display, va_config_id));
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // Nothing further to query for protected profile.
+ if (va_profile == VAProfileProtected) {
+ profile_info->va_profile = va_profile;
+ profile_info->va_entrypoint = entrypoint;
+ return true;
+ }
+#endif
+
// Calls vaQuerySurfaceAttributes twice. The first time is to get the number
// of attributes to prepare the space and the second time is to get all
// attributes.
@@ -1021,6 +1106,41 @@ bool VASupportedProfiles::FillProfileInfo_Locked(
return false;
}
+ if (va_profile != VAProfileJPEGBaseline) {
+ // Deny unreasonably small resolutions (e.g. 0x0) for VA-API hardware video
+ // decode and encode acceleration.
+ profile_info->min_resolution.SetToMax(gfx::Size(16, 16));
+ if (entrypoint == VAEntrypointEncSliceLP ||
+ entrypoint == VAEntrypointEncSlice) {
+ // Using VA-API for accelerated encoding frames smaller than a certain
+ // size is less efficient than using a software encoder.
+ constexpr gfx::Size kMinEncodeResolution(320 + 1, 240 + 1);
+ if (!gfx::Rect(profile_info->min_resolution)
+ .Contains(gfx::Rect(kMinEncodeResolution))) {
+ profile_info->min_resolution.SetToMax(kMinEncodeResolution);
+ DVLOG(2) << "Setting the minimum supported encoding resolution to "
+ << profile_info->min_resolution.ToString() << " for "
+ << vaProfileStr(va_profile);
+ }
+ } else if (entrypoint == VAEntrypointVLD &&
+ IsUsingHybridDriverForDecoding(va_profile)) {
+ // Using the hybrid driver for accelerated decoding of frames smaller than
+ // a certain size is less efficient than using a software decoder. This
+ // minimum resolution is selected from the fact that the resolutions of
+ // videos in tile layout in Google Meet are QVGA.
+ constexpr gfx::Size kMinDecodeResolutionForHybridDecoder(320 + 1,
+ 240 + 1);
+ if (!gfx::Rect(profile_info->min_resolution)
+ .Contains(gfx::Rect(kMinDecodeResolutionForHybridDecoder))) {
+ profile_info->min_resolution.SetToMax(
+ kMinDecodeResolutionForHybridDecoder);
+ DVLOG(2) << "Setting the minimum supported decoding resolution to "
+ << profile_info->min_resolution.ToString() << " for "
+ << vaProfileStr(va_profile);
+ }
+ }
+ }
+
// Create a new configuration to find the supported RT formats. We don't pass
// required attributes here because we want the driver to tell us all the
// supported RT formats.
@@ -1252,15 +1372,27 @@ VAImplementation VaapiWrapper::GetImplementationType() {
scoped_refptr<VaapiWrapper> VaapiWrapper::Create(
CodecMode mode,
VAProfile va_profile,
+ EncryptionScheme encryption_scheme,
const ReportErrorToUMACB& report_error_to_uma_cb) {
if (!VASupportedProfiles::Get().IsProfileSupported(mode, va_profile)) {
DVLOG(1) << "Unsupported va_profile: " << vaProfileStr(va_profile);
return nullptr;
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // In protected decode |mode| we need to ensure that |va_profile| is supported
+ // (which we verified above) and that VAProfileProtected is supported, which
+ // we check here.
+ if (mode == kDecodeProtected &&
+ !VASupportedProfiles::Get().IsProfileSupported(mode,
+ VAProfileProtected)) {
+ LOG(ERROR) << "Protected content profile not supported";
+ return nullptr;
+ }
+#endif
scoped_refptr<VaapiWrapper> vaapi_wrapper(new VaapiWrapper(mode));
if (vaapi_wrapper->VaInitialize(report_error_to_uma_cb)) {
- if (vaapi_wrapper->Initialize(mode, va_profile))
+ if (vaapi_wrapper->Initialize(mode, va_profile, encryption_scheme))
return vaapi_wrapper;
}
LOG(ERROR) << "Failed to create VaapiWrapper for va_profile: "
@@ -1272,9 +1404,10 @@ scoped_refptr<VaapiWrapper> VaapiWrapper::Create(
scoped_refptr<VaapiWrapper> VaapiWrapper::CreateForVideoCodec(
CodecMode mode,
VideoCodecProfile profile,
+ EncryptionScheme encryption_scheme,
const ReportErrorToUMACB& report_error_to_uma_cb) {
const VAProfile va_profile = ProfileToVAProfile(profile, mode);
- return Create(mode, va_profile, report_error_to_uma_cb);
+ return Create(mode, va_profile, encryption_scheme, report_error_to_uma_cb);
}
// static
@@ -1294,10 +1427,7 @@ VaapiWrapper::GetSupportedEncodeProfiles() {
VideoEncodeAccelerator::SupportedProfile profile;
profile.profile = media_profile;
- // Using VA-API for accelerated encoding frames smaller than a certain
- // size is less efficient than using a software encoder.
- const gfx::Size kMinEncodeResolution = gfx::Size(320 + 1, 240 + 1);
- profile.min_resolution = kMinEncodeResolution;
+ profile.min_resolution = profile_info->min_resolution;
profile.max_resolution = profile_info->max_resolution;
// Maximum framerate of encoded profile. This value is an arbitrary
// limit and not taken from HW documentation.
@@ -1337,7 +1467,7 @@ VaapiWrapper::GetSupportedDecodeProfiles(
VideoDecodeAccelerator::SupportedProfile profile;
profile.profile = media_profile;
profile.max_resolution = profile_info->max_resolution;
- profile.min_resolution.SetSize(16, 16);
+ profile.min_resolution = profile_info->min_resolution;
profiles.push_back(profile);
}
return profiles;
@@ -1550,6 +1680,12 @@ VAEntrypoint VaapiWrapper::GetDefaultVaEntryPoint(CodecMode mode,
switch (mode) {
case VaapiWrapper::kDecode:
return VAEntrypointVLD;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ case VaapiWrapper::kDecodeProtected:
+ if (profile == VAProfileProtected)
+ return VAEntrypointProtectedContent;
+ return VAEntrypointVLD;
+#endif
case VaapiWrapper::kEncode:
case VaapiWrapper::kEncodeConstantQuantizationParameter:
if (profile == VAProfileJPEGBaseline)
@@ -1585,6 +1721,29 @@ uint32_t VaapiWrapper::BufferFormatToVARTFormat(gfx::BufferFormat fmt) {
}
}
+// static
+uint32_t VaapiWrapper::BufferFormatToVAFourCC(gfx::BufferFormat fmt) {
+ switch (fmt) {
+ case gfx::BufferFormat::BGRX_8888:
+ return VA_FOURCC_BGRX;
+ case gfx::BufferFormat::BGRA_8888:
+ return VA_FOURCC_BGRA;
+ case gfx::BufferFormat::RGBX_8888:
+ return VA_FOURCC_RGBX;
+ case gfx::BufferFormat::RGBA_8888:
+ return VA_FOURCC_RGBA;
+ case gfx::BufferFormat::YVU_420:
+ return VA_FOURCC_YV12;
+ case gfx::BufferFormat::YUV_420_BIPLANAR:
+ return VA_FOURCC_NV12;
+ case gfx::BufferFormat::P010:
+ return VA_FOURCC_P010;
+ default:
+ NOTREACHED() << gfx::BufferFormatToString(fmt);
+ return 0;
+ }
+}
+
bool VaapiWrapper::CreateContextAndSurfaces(
unsigned int va_format,
const gfx::Size& size,
@@ -1633,6 +1792,214 @@ std::unique_ptr<ScopedVASurface> VaapiWrapper::CreateContextAndScopedVASurface(
return nullptr;
}
+bool VaapiWrapper::CreateProtectedSession(
+ EncryptionScheme encryption,
+ const std::vector<uint8_t>& hw_config,
+ std::vector<uint8_t>* hw_identifier_out) {
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ DCHECK_EQ(va_protected_config_id_, VA_INVALID_ID);
+ DCHECK_EQ(va_protected_session_id_, VA_INVALID_ID);
+ DCHECK(hw_identifier_out);
+ if (mode_ != kDecodeProtected) {
+ LOG(ERROR) << "Cannot attached protected context if not in protected mode";
+ return false;
+ }
+ if (encryption == EncryptionScheme::kUnencrypted) {
+ LOG(ERROR) << "Must specify encryption scheme for protected mode";
+ return false;
+ }
+ const VAProfile va_profile = VAProfileProtected;
+ const VAEntrypoint entrypoint = GetDefaultVaEntryPoint(mode_, va_profile);
+ {
+ base::AutoLock auto_lock(*va_lock_);
+ std::vector<VAConfigAttrib> required_attribs;
+ if (!GetRequiredAttribs(va_lock_, va_display_, mode_, va_profile,
+ entrypoint, &required_attribs)) {
+ LOG(ERROR) << "Failed getting required attributes for protected mode";
+ return false;
+ }
+ DCHECK(!required_attribs.empty());
+
+ // We need to adjust the attribute for encryption scheme.
+ for (auto& attrib : required_attribs) {
+ if (attrib.type == VAConfigAttribProtectedContentCipherMode) {
+ attrib.value = (encryption == EncryptionScheme::kCbcs)
+ ? VA_PC_CIPHER_MODE_CBC
+ : VA_PC_CIPHER_MODE_CTR;
+ }
+ }
+
+ VAStatus va_res = vaCreateConfig(
+ va_display_, va_profile, entrypoint, &required_attribs[0],
+ required_attribs.size(), &va_protected_config_id_);
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVACreateConfig, false);
+
+ va_res = vaCreateProtectedSession(va_display_, va_protected_config_id_,
+ &va_protected_session_id_);
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVACreateProtectedSession,
+ false);
+ }
+ // We have to hold the VABuffer outside of the lock because its destructor
+ // will acquire the lock when it goes out of scope. We also must do this after
+ // we create the protected session.
+ VAProtectedSessionExecuteBuffer hw_update_buf;
+ std::unique_ptr<ScopedVABuffer> hw_update = CreateVABuffer(
+ VAProtectedSessionExecuteBufferType, sizeof(hw_update_buf));
+ {
+ base::AutoLock auto_lock(*va_lock_);
+ constexpr size_t kHwIdentifierMaxSize = 64;
+ memset(&hw_update_buf, 0, sizeof(hw_update_buf));
+ hw_update_buf.function_id = VA_TEE_EXEC_TEE_FUNCID_HW_UPDATE;
+ hw_update_buf.input.data_size = hw_config.size();
+ hw_update_buf.input.data =
+ static_cast<void*>(const_cast<uint8_t*>(hw_config.data()));
+ hw_update_buf.output.max_data_size = kHwIdentifierMaxSize;
+ hw_identifier_out->resize(kHwIdentifierMaxSize);
+ hw_update_buf.output.data = hw_identifier_out->data();
+ if (!MapAndCopy_Locked(
+ hw_update->id(),
+ {hw_update->type(), hw_update->size(), &hw_update_buf})) {
+ LOG(ERROR) << "Failed mapping Execute buf";
+ return false;
+ }
+
+ VAStatus va_res = vaProtectedSessionExecute(
+ va_display_, va_protected_session_id_, hw_update->id());
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAProtectedSessionExecute,
+ false);
+
+ ScopedVABufferMapping mapping(va_lock_, va_display_, hw_update->id());
+ if (!mapping.IsValid()) {
+ LOG(ERROR) << "Failed mapping returned Execute buf";
+ return false;
+ }
+ auto* hw_update_buf_out =
+ reinterpret_cast<VAProtectedSessionExecuteBuffer*>(mapping.data());
+ if (!hw_update_buf_out->output.data_size) {
+ LOG(ERROR) << "Received empty HW identifier";
+ return false;
+ }
+ hw_identifier_out->resize(hw_update_buf_out->output.data_size);
+ memcpy(hw_identifier_out->data(), hw_update_buf_out->output.data,
+ hw_update_buf_out->output.data_size);
+
+ // If the decoding context is created, attach the protected session.
+ // Otherwise this is done in CreateContext when the decoding context is
+ // created.
+ return MaybeAttachProtectedSession_Locked();
+ }
+#else
+ NOTIMPLEMENTED() << "Protected content mode not supported";
+ return false;
+#endif
+}
+
+// static
+uint32_t VaapiWrapper::GetProtectedInstanceID() {
+ // This sends an ioctl to query for the current instance ID of the protected
+ // system. This allows us to track if it was torn down and rebuilt which
+ // invalidates everything from prior instances.
+
+ // This is the struct/union used to setup the bitfields properly.
+ struct pxp_tag {
+ union {
+ uint32_t value;
+ struct {
+ uint32_t session_id : 8;
+ uint32_t instance_id : 8;
+ uint32_t enable : 1;
+ uint32_t hm : 1;
+ uint32_t reserved_1 : 1;
+ uint32_t sm : 1;
+ uint32_t reserved_2 : 12;
+ };
+ };
+ };
+ pxp_tag query_tag;
+ query_tag.value = 0;
+ query_tag.session_id = 0xf;
+ query_tag.instance_id = 1;
+ query_tag.enable = 1;
+ query_tag.hm = 1;
+
+ // Setup the structure for the ioctl.
+ struct pxp_info pxp_info;
+ pxp_info.action = 0; // PXP_ACTION_QUERY_PXP_TAG
+ pxp_info.query_pxp_tag.session_is_alive = 1;
+ pxp_info.query_pxp_tag.pxp_tag = query_tag.value;
+ struct drm_i915_pxp_ops pxp_ops = {.info_ptr = &pxp_info,
+ .info_size = sizeof(pxp_info)};
+
+ base::ScopedFD drm_fd = VADisplayState::Get()->GetDrmFd();
+ if (drmIoctl(drm_fd.get(), DRM_IOCTL_I915_PXP_OPS, &pxp_ops)) {
+ PLOG(ERROR) << "Error issuing ioctl to get protected instance ID";
+ // Zero indicates no protected instance, if we can't query it, then we
+ // should behave like we don't have it.
+ return 0;
+ }
+
+ if (!pxp_info.query_pxp_tag.session_is_alive) {
+ // This means that the instance is not alive, return as if there is no
+ // instance.
+ return 0;
+ }
+
+ // Put the result back in the bitfield so we can extract the instance ID.
+ query_tag.value = pxp_info.query_pxp_tag.pxp_tag;
+ DCHECK_NE(query_tag.instance_id, 0u);
+ return query_tag.instance_id;
+}
+
+bool VaapiWrapper::IsProtectedSessionDead() {
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (va_protected_session_id_ == VA_INVALID_ID)
+ return false;
+
+ uint8_t alive;
+ VAProtectedSessionExecuteBuffer tee_exec_buf = {};
+ tee_exec_buf.function_id = VA_TEE_EXEC_TEE_FUNCID_IS_SESSION_ALIVE;
+ tee_exec_buf.input.data_size = 0;
+ tee_exec_buf.input.data = nullptr;
+ tee_exec_buf.output.data_size = sizeof(alive);
+ tee_exec_buf.output.data = &alive;
+
+ base::AutoLock auto_lock(*va_lock_);
+ VABufferID buf_id;
+ VAStatus va_res =
+ vaCreateBuffer(va_display_, va_protected_session_id_,
+ VAProtectedSessionExecuteBufferType, sizeof(tee_exec_buf),
+ 1, &tee_exec_buf, &buf_id);
+ // Failure here is valid if the protected session has been closed.
+ if (va_res != VA_STATUS_SUCCESS)
+ return true;
+
+ va_res =
+ vaProtectedSessionExecute(va_display_, va_protected_session_id_, buf_id);
+ vaDestroyBuffer(va_display_, buf_id);
+ if (va_res != VA_STATUS_SUCCESS)
+ return true;
+
+ return !alive;
+#else // BUILDFLAG(IS_CHROMEOS_ASH)
+ return false;
+#endif
+}
+
+void VaapiWrapper::DestroyProtectedSession() {
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (va_protected_session_id_ == VA_INVALID_ID)
+ return;
+ base::AutoLock auto_lock(*va_lock_);
+ VAStatus va_res =
+ vaDestroyProtectedSession(va_display_, va_protected_session_id_);
+ VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVADestroyProtectedSession);
+ va_res = vaDestroyConfig(va_display_, va_protected_config_id_);
+ VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVADestroyConfig);
+ va_protected_session_id_ = VA_INVALID_ID;
+ va_protected_config_id_ = VA_INVALID_ID;
+#endif
+}
+
void VaapiWrapper::DestroyContextAndSurfaces(
std::vector<VASurfaceID> va_surfaces) {
DestroyContext();
@@ -1654,12 +2021,39 @@ bool VaapiWrapper::CreateContext(const gfx::Size& size) {
// vpp, just passing 0x0.
const int flag = mode_ != kVideoProcess ? VA_PROGRESSIVE : 0x0;
const gfx::Size picture_size = mode_ != kVideoProcess ? size : gfx::Size();
- const VAStatus va_res = vaCreateContext(
+ if (base::FeatureList::IsEnabled(kVaapiEnforceVideoMinMaxResolution) &&
+ mode_ != kVideoProcess) {
+ const VASupportedProfiles::ProfileInfo* profile_info =
+ VASupportedProfiles::Get().IsProfileSupported(mode_, va_profile_,
+ va_entrypoint_);
+ DCHECK(profile_info);
+ const bool is_picture_within_bounds =
+ gfx::Rect(picture_size)
+ .Contains(gfx::Rect(profile_info->min_resolution)) &&
+ gfx::Rect(profile_info->max_resolution)
+ .Contains(gfx::Rect(picture_size));
+ if (!is_picture_within_bounds) {
+ VLOG(2) << "Requested resolution=" << picture_size.ToString()
+ << " is not within bounds ["
+ << profile_info->min_resolution.ToString() << ", "
+ << profile_info->max_resolution.ToString() << "]";
+ return false;
+ }
+ }
+
+ VAStatus va_res = vaCreateContext(
va_display_, va_config_id_, picture_size.width(), picture_size.height(),
flag, empty_va_surfaces_ids_pointer, empty_va_surfaces_ids_size,
&va_context_id_);
VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVACreateContext);
- return va_res == VA_STATUS_SUCCESS;
+ if (va_res != VA_STATUS_SUCCESS)
+ return false;
+
+ if (IsModeEncoding(mode_) && IsLowPowerIntelProcessor())
+ MaybeSetLowQualityEncoding_Locked();
+
+ // If we have a protected session already, attach it to this new context.
+ return MaybeAttachProtectedSession_Locked();
}
scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForPixmap(
@@ -1682,13 +2076,27 @@ scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForPixmap(
}
va_attrib_extbuf.num_planes = num_planes;
- if (pixmap->GetDmaBufFd(0) < 0) {
+ const int dma_buf_fd = pixmap->GetDmaBufFd(0);
+ if (dma_buf_fd < 0) {
LOG(ERROR) << "Failed to get dmabuf from an Ozone NativePixmap";
return nullptr;
}
+ const off_t data_size = lseek(dma_buf_fd, /*offset=*/0, SEEK_END);
+ if (data_size == static_cast<off_t>(-1)) {
+ PLOG(ERROR) << "Failed to get the size of the dma-buf";
+ return nullptr;
+ }
+ if (lseek(dma_buf_fd, /*offset=*/0, SEEK_SET) == static_cast<off_t>(-1)) {
+ PLOG(ERROR) << "Failed to reset the file offset of the dma-buf";
+ return nullptr;
+ }
+ // If the data size doesn't fit in a uint32_t, we probably have bigger
+ // problems.
+ va_attrib_extbuf.data_size = base::checked_cast<uint32_t>(data_size);
+
// We only have to pass the first file descriptor to a driver. A VA-API driver
// shall create a VASurface from the single fd correctly.
- uintptr_t fd = base::checked_cast<uintptr_t>(pixmap->GetDmaBufFd(0));
+ uintptr_t fd = base::checked_cast<uintptr_t>(dma_buf_fd);
va_attrib_extbuf.buffers = &fd;
va_attrib_extbuf.num_buffers = 1u;
@@ -1726,6 +2134,52 @@ scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForPixmap(
base::BindOnce(&VaapiWrapper::DestroySurface, this));
}
+scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForUserPtr(
+ const gfx::Size& size,
+ uintptr_t* buffers,
+ size_t buffer_size) {
+ VASurfaceAttribExternalBuffers va_attrib_extbuf{};
+ va_attrib_extbuf.num_planes = 3;
+ va_attrib_extbuf.buffers = buffers;
+ va_attrib_extbuf.data_size = base::checked_cast<uint32_t>(buffer_size);
+ va_attrib_extbuf.num_buffers = 1u;
+ va_attrib_extbuf.width = base::checked_cast<uint32_t>(size.width());
+ va_attrib_extbuf.height = base::checked_cast<uint32_t>(size.height());
+ va_attrib_extbuf.offsets[0] = 0;
+ va_attrib_extbuf.offsets[1] = size.GetCheckedArea().ValueOrDie<uint32_t>();
+ va_attrib_extbuf.offsets[2] =
+ (size.GetCheckedArea() * 2).ValueOrDie<uint32_t>();
+ std::fill(va_attrib_extbuf.pitches, va_attrib_extbuf.pitches + 3,
+ base::checked_cast<uint32_t>(size.width()));
+ va_attrib_extbuf.pixel_format = VA_FOURCC_RGBP;
+
+ std::vector<VASurfaceAttrib> va_attribs(2);
+ va_attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ va_attribs[0].type = VASurfaceAttribMemoryType;
+ va_attribs[0].value.type = VAGenericValueTypeInteger;
+ va_attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_USER_PTR;
+
+ va_attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
+ va_attribs[1].type = VASurfaceAttribExternalBufferDescriptor;
+ va_attribs[1].value.type = VAGenericValueTypePointer;
+ va_attribs[1].value.value.p = &va_attrib_extbuf;
+
+ VASurfaceID va_surface_id = VA_INVALID_ID;
+ const unsigned int va_format = VA_RT_FORMAT_RGBP;
+ {
+ base::AutoLock auto_lock(*va_lock_);
+ VAStatus va_res = vaCreateSurfaces(
+ va_display_, va_format, base::checked_cast<unsigned int>(size.width()),
+ base::checked_cast<unsigned int>(size.height()), &va_surface_id, 1,
+ &va_attribs[0], va_attribs.size());
+ VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVACreateSurfaces_Importing,
+ nullptr);
+ }
+ DVLOG(2) << __func__ << " " << va_surface_id;
+ return new VASurface(va_surface_id, size, va_format,
+ base::BindOnce(&VaapiWrapper::DestroySurface, this));
+}
+
std::unique_ptr<NativePixmapAndSizeInfo>
VaapiWrapper::ExportVASurfaceAsNativePixmapDmaBuf(
const ScopedVASurface& scoped_va_surface) {
@@ -2058,9 +2512,17 @@ std::unique_ptr<ScopedVABuffer> VaapiWrapper::CreateVABuffer(VABufferType type,
TRACE_EVENT0("media,gpu", "VaapiWrapper::CreateVABuffer");
base::AutoLock auto_lock(*va_lock_);
TRACE_EVENT0("media,gpu", "VaapiWrapper::CreateVABufferLocked");
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ VAContextID context_id = type == VAProtectedSessionExecuteBufferType
+ ? va_protected_session_id_
+ : va_context_id_;
+#else
+ VAContextID context_id = va_context_id_;
+#endif
- return ScopedVABuffer::Create(va_lock_, va_display_, va_context_id_, type,
- size);
+ if (context_id == VA_INVALID_ID)
+ return nullptr;
+ return ScopedVABuffer::Create(va_lock_, va_display_, context_id, type, size);
}
uint64_t VaapiWrapper::GetEncodedChunkSize(VABufferID buffer_id,
@@ -2142,9 +2604,8 @@ bool VaapiWrapper::GetVAEncMaxNumOfRefFrames(VideoCodecProfile profile,
attrib.type = VAConfigAttribEncMaxRefFrames;
base::AutoLock auto_lock(*va_lock_);
- VAStatus va_res =
- vaGetConfigAttributes(va_display_, va_profile,
- va_entrypoint_, &attrib, 1);
+ VAStatus va_res = vaGetConfigAttributes(va_display_, va_profile,
+ va_entrypoint_, &attrib, 1);
VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAGetConfigAttributes, false);
*max_ref_frames = attrib.value;
@@ -2266,6 +2727,9 @@ void VaapiWrapper::PreSandboxInitialization() {
if (!features::IsUsingOzonePlatform())
paths[kModuleVa_x11].push_back(std::string("libva-x11.so.") + va_suffix);
#endif
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ paths[kModuleVa_prot].push_back(std::string("libva.so.") + va_suffix);
+#endif
// InitializeStubs dlopen() VA-API libraries
// libva.so
@@ -2274,7 +2738,7 @@ void VaapiWrapper::PreSandboxInitialization() {
static bool result = InitializeStubs(paths);
if (!result) {
static const char kErrorMsg[] = "Failed to initialize VAAPI libs";
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
// When Chrome runs on Linux with target_os="chromeos", do not log error
// message without VAAPI libraries.
LOG_IF(ERROR, base::SysInfo::IsRunningOnChromeOS()) << kErrorMsg;
@@ -2295,8 +2759,8 @@ VaapiWrapper::VaapiWrapper(CodecMode mode)
: mode_(mode),
va_lock_(VADisplayState::Get()->va_lock()),
va_display_(NULL),
- va_config_id_(VA_INVALID_ID),
- va_context_id_(VA_INVALID_ID) {}
+ va_profile_(VAProfileNone),
+ va_entrypoint_(kVAEntrypointInvalid) {}
VaapiWrapper::~VaapiWrapper() {
// Destroy ScopedVABuffer before VaapiWrappers are destroyed to ensure
@@ -2307,7 +2771,9 @@ VaapiWrapper::~VaapiWrapper() {
Deinitialize();
}
-bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) {
+bool VaapiWrapper::Initialize(CodecMode mode,
+ VAProfile va_profile,
+ EncryptionScheme encryption_scheme) {
#if DCHECK_IS_ON()
if (mode == kEncodeConstantQuantizationParameter) {
DCHECK_NE(va_profile, VAProfileJPEGBaseline)
@@ -2315,6 +2781,12 @@ bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) {
}
#endif // DCHECK_IS_ON()
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (encryption_scheme != EncryptionScheme::kUnencrypted &&
+ mode != kDecodeProtected)
+ return false;
+#endif
+
const VAEntrypoint entrypoint = GetDefaultVaEntryPoint(mode, va_profile);
base::AutoLock auto_lock(*va_lock_);
@@ -2324,11 +2796,25 @@ bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) {
return false;
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (encryption_scheme != EncryptionScheme::kUnencrypted) {
+ DCHECK(!required_attribs.empty());
+ // We need to adjust the attribute for encryption scheme.
+ for (auto& attrib : required_attribs) {
+ if (attrib.type == VAConfigAttribEncryption) {
+ attrib.value = (encryption_scheme == EncryptionScheme::kCbcs)
+ ? VA_ENCRYPTION_TYPE_SUBSAMPLE_CBC
+ : VA_ENCRYPTION_TYPE_SUBSAMPLE_CTR;
+ }
+ }
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
const VAStatus va_res =
vaCreateConfig(va_display_, va_profile, entrypoint,
required_attribs.empty() ? nullptr : &required_attribs[0],
required_attribs.size(), &va_config_id_);
-
+ va_profile_ = va_profile;
va_entrypoint_ = entrypoint;
VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVACreateConfig, false);
@@ -2338,10 +2824,23 @@ bool VaapiWrapper::Initialize(CodecMode mode, VAProfile va_profile) {
void VaapiWrapper::Deinitialize() {
{
base::AutoLock auto_lock(*va_lock_);
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (va_protected_session_id_ != VA_INVALID_ID) {
+ VAStatus va_res =
+ vaDestroyProtectedSession(va_display_, va_protected_session_id_);
+ VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVADestroyProtectedSession);
+ va_res = vaDestroyConfig(va_display_, va_protected_config_id_);
+ VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVADestroyConfig);
+ }
+#endif
if (va_config_id_ != VA_INVALID_ID) {
- VAStatus va_res = vaDestroyConfig(va_display_, va_config_id_);
+ const VAStatus va_res = vaDestroyConfig(va_display_, va_config_id_);
VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVADestroyConfig);
}
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ va_protected_session_id_ = VA_INVALID_ID;
+ va_protected_config_id_ = VA_INVALID_ID;
+#endif
va_config_id_ = VA_INVALID_ID;
va_display_ = nullptr;
}
@@ -2369,6 +2868,13 @@ void VaapiWrapper::DestroyContext() {
DVLOG(2) << "Destroying context";
if (va_context_id_ != VA_INVALID_ID) {
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (va_protected_session_id_ != VA_INVALID_ID) {
+ const VAStatus va_res =
+ vaDetachProtectedSession(va_display_, va_context_id_);
+ VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVADetachProtectedSession);
+ }
+#endif
const VAStatus va_res = vaDestroyContext(va_display_, va_context_id_);
VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVADestroyContext);
}
@@ -2421,7 +2927,8 @@ bool VaapiWrapper::CreateSurfaces(unsigned int va_format,
std::unique_ptr<ScopedVASurface> VaapiWrapper::CreateScopedVASurface(
unsigned int va_rt_format,
const gfx::Size& size,
- const base::Optional<gfx::Size>& visible_size) {
+ const base::Optional<gfx::Size>& visible_size,
+ uint32_t va_fourcc) {
if (kInvalidVaRtFormat == va_rt_format) {
LOG(ERROR) << "Invalid VA RT format to CreateScopedVASurface";
return nullptr;
@@ -2432,12 +2939,20 @@ std::unique_ptr<ScopedVASurface> VaapiWrapper::CreateScopedVASurface(
return nullptr;
}
+ VASurfaceAttrib attrib;
+ memset(&attrib, 0, sizeof(attrib));
+ if (va_fourcc) {
+ attrib.type = VASurfaceAttribPixelFormat;
+ attrib.flags = VA_SURFACE_ATTRIB_SETTABLE;
+ attrib.value.type = VAGenericValueTypeInteger;
+ attrib.value.value.i = base::checked_cast<int32_t>(va_fourcc);
+ }
base::AutoLock auto_lock(*va_lock_);
VASurfaceID va_surface_id = VA_INVALID_ID;
VAStatus va_res = vaCreateSurfaces(
va_display_, va_rt_format, base::checked_cast<unsigned int>(size.width()),
- base::checked_cast<unsigned int>(size.height()), &va_surface_id, 1u, NULL,
- 0);
+ base::checked_cast<unsigned int>(size.height()), &va_surface_id, 1u,
+ va_fourcc ? &attrib : nullptr, va_fourcc ? 1 : 0);
VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVACreateSurfaces_Allocating,
nullptr);
@@ -2515,12 +3030,14 @@ bool VaapiWrapper::SubmitBuffer_Locked(const VABufferDescriptor& va_buffer) {
TRACE_EVENT0("media,gpu", "VaapiWrapper::SubmitBuffer_Locked");
va_lock_->AssertAcquired();
- DCHECK_LT(va_buffer.type, VABufferTypeMax);
- DCHECK(va_buffer.data);
-
+ DCHECK(IsValidVABufferType(va_buffer.type));
+ base::ScopedClosureRunner pending_buffers_destroyer_on_failure(base::BindOnce(
+ &VaapiWrapper::DestroyPendingBuffers_Locked, base::Unretained(this)));
unsigned int va_buffer_size;
- if (!base::CheckedNumeric<size_t>(va_buffer.size)
- .AssignIfValid(&va_buffer_size)) {
+ // We use a null |va_buffer|.data for testing: it signals that we want this
+ // SubmitBuffer_Locked() call to fail.
+ if (!va_buffer.data || !base::CheckedNumeric<size_t>(va_buffer.size)
+ .AssignIfValid(&va_buffer_size)) {
return false;
}
@@ -2538,6 +3055,7 @@ bool VaapiWrapper::SubmitBuffer_Locked(const VABufferDescriptor& va_buffer) {
return false;
pending_va_buffers_.push_back(buffer_id);
+ pending_buffers_destroyer_on_failure.ReplaceClosure(base::DoNothing());
return true;
}
@@ -2546,7 +3064,7 @@ bool VaapiWrapper::MapAndCopy_Locked(VABufferID va_buffer_id,
va_lock_->AssertAcquired();
DCHECK_NE(va_buffer_id, VA_INVALID_ID);
- DCHECK_LT(va_buffer.type, VABufferTypeMax);
+ DCHECK(IsValidVABufferType(va_buffer.type));
DCHECK(va_buffer.data);
ScopedVABufferMapping mapping(
@@ -2558,4 +3076,57 @@ bool VaapiWrapper::MapAndCopy_Locked(VABufferID va_buffer_id,
return memcpy(mapping.data(), va_buffer.data, va_buffer.size);
}
+void VaapiWrapper::MaybeSetLowQualityEncoding_Locked() {
+ DCHECK(IsModeEncoding(mode_));
+ va_lock_->AssertAcquired();
+
+ // Query if encoding quality (VAConfigAttribEncQualityRange) is supported, and
+ // if so, use the associated value for lowest quality and power consumption.
+ VAConfigAttrib attrib{};
+ attrib.type = VAConfigAttribEncQualityRange;
+ const VAStatus va_res = vaGetConfigAttributes(va_display_, va_profile_,
+ va_entrypoint_, &attrib, 1);
+ if (va_res != VA_STATUS_SUCCESS) {
+ LOG(ERROR) << "vaGetConfigAttributes failed: " << vaProfileStr(va_profile_);
+ return;
+ }
+ // From libva's va.h: 'A value less than or equal to 1 means that the
+ // encoder only has a single "quality setting,"'.
+ if (attrib.value == VA_ATTRIB_NOT_SUPPORTED || attrib.value <= 1u)
+ return;
+
+ const size_t temp_size = sizeof(VAEncMiscParameterBuffer) +
+ sizeof(VAEncMiscParameterBufferQualityLevel);
+ std::vector<char> temp(temp_size);
+
+ auto* const va_buffer =
+ reinterpret_cast<VAEncMiscParameterBuffer*>(temp.data());
+ va_buffer->type = VAEncMiscParameterTypeQualityLevel;
+ auto* const enc_quality =
+ reinterpret_cast<VAEncMiscParameterBufferQualityLevel*>(va_buffer->data);
+ enc_quality->quality_level = attrib.value;
+
+ const bool success =
+ SubmitBuffer_Locked({VAEncMiscParameterBufferType, temp_size, va_buffer});
+ LOG_IF(ERROR, !success) << "Error setting encoding quality to "
+ << enc_quality->quality_level;
+}
+
+bool VaapiWrapper::MaybeAttachProtectedSession_Locked() {
+ va_lock_->AssertAcquired();
+ if (va_context_id_ == VA_INVALID_ID)
+ return true;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (va_protected_session_id_ == VA_INVALID_ID)
+ return true;
+
+ VAStatus va_res = vaAttachProtectedSession(va_display_, va_context_id_,
+ va_protected_session_id_);
+ VA_LOG_ON_ERROR(va_res, VaapiFunctions::kVAAttachProtectedSession);
+ return va_res == VA_STATUS_SUCCESS;
+#else
+ return true;
+#endif
+}
+
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h
index fd1fd823ee1..29b68649982 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h
@@ -27,6 +27,7 @@
#include "base/optional.h"
#include "base/synchronization/lock.h"
#include "base/thread_annotations.h"
+#include "build/chromeos_buildflags.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_utils.h"
@@ -109,6 +110,17 @@ class MEDIA_GPU_EXPORT VaapiWrapper
public:
enum CodecMode {
kDecode,
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // NOTE: A kDecodeProtected VaapiWrapper is created using the actual video
+ // profile and an extra VAProfileProtected, each with some special added
+ // VAConfigAttribs. Then when CreateProtectedSession() is called, it will
+ // then create a protected session using protected profile & entrypoint
+ // which gets attached to the decoding context (or attached when the
+ // decoding context is created or re-created). This then enables
+ // decrypt + decode support in the driver and encrypted frame data can then
+ // be submitted.
+ kDecodeProtected, // Decrypt + decode to protected surface.
+#endif
kEncode, // Encode with Constant Bitrate algorithm.
kEncodeConstantQuantizationParameter, // Encode with Constant Quantization
// Parameter algorithm.
@@ -140,6 +152,7 @@ class MEDIA_GPU_EXPORT VaapiWrapper
static scoped_refptr<VaapiWrapper> Create(
CodecMode mode,
VAProfile va_profile,
+ EncryptionScheme encryption_scheme,
const ReportErrorToUMACB& report_error_to_uma_cb);
// Create VaapiWrapper for VideoCodecProfile. It maps VideoCodecProfile
@@ -149,6 +162,7 @@ class MEDIA_GPU_EXPORT VaapiWrapper
static scoped_refptr<VaapiWrapper> CreateForVideoCodec(
CodecMode mode,
VideoCodecProfile profile,
+ EncryptionScheme encryption_scheme,
const ReportErrorToUMACB& report_error_to_uma_cb);
// Return the supported video encode profiles.
@@ -226,6 +240,13 @@ class MEDIA_GPU_EXPORT VaapiWrapper
static VAEntrypoint GetDefaultVaEntryPoint(CodecMode mode, VAProfile profile);
static uint32_t BufferFormatToVARTFormat(gfx::BufferFormat fmt);
+ static uint32_t BufferFormatToVAFourCC(gfx::BufferFormat fmt);
+
+ // Returns the current instance identifier for the protected content system.
+ // This can be used to detect when protected context loss has occurred, so any
+ // protected surfaces associated with a specific instance ID can be
+ // invalidated when the ID changes.
+ static uint32_t GetProtectedInstanceID();
// Creates |num_surfaces| VASurfaceIDs of |va_format|, |size| and
// |surface_usage_hint| and, if successful, creates a |va_context_id_| of the
@@ -250,30 +271,52 @@ class MEDIA_GPU_EXPORT VaapiWrapper
const gfx::Size& size,
const base::Optional<gfx::Size>& visible_size = base::nullopt);
+ // Attempts to create a protected session that will be attached to the
+ // decoding context to enable encrypted video decoding. If it cannot be
+ // attached now, it will be attached when the decoding context is created or
+ // re-created. |encryption| should be the encryption scheme from the
+ // DecryptConfig. |hw_config| should have been obtained from the OEMCrypto
+ // implementation via the CdmFactoryDaemonProxy. |hw_identifier_out| is an
+ // output parameter which will return session specific information which can
+ // be passed through the ChromeOsCdmContext to retrieve encrypted key
+ // information. Returns true on success and false otherwise.
+ bool CreateProtectedSession(media::EncryptionScheme encryption,
+ const std::vector<uint8_t>& hw_config,
+ std::vector<uint8_t>* hw_identifier_out);
+ // Returns true if and only if we have created a protected session and
+ // querying libva indicates that our protected session is no longer alive,
+ // otherwise this will return false.
+ bool IsProtectedSessionDead();
+ // If we have a protected session, destroys it immediately. This should be
+ // used as part of recovering dead protected sessions.
+ void DestroyProtectedSession();
+
// Releases the |va_surfaces| and destroys |va_context_id_|.
void DestroyContextAndSurfaces(std::vector<VASurfaceID> va_surfaces);
- // Creates a VA Context of |size| and sets |va_context_id_|. In the case of a
- // VPP VaapiWrapper, |size| is ignored and 0x0 is used to create the context.
- // The client is responsible for releasing it via DestroyContext() or
- // DestroyContextAndSurfaces(), or it will be released on dtor.
+ // Creates a VAContextID of |size| (unless it's a Vpp context in which case
+ // |size| is ignored and 0x0 is used instead). The client is responsible for
+ // releasing said context via DestroyContext() or DestroyContextAndSurfaces(),
+ // or it will be released on dtor. If a valid |va_protected_session_id_|
+ // exists, it will be attached to the newly created |va_context_id_| as well.
virtual bool CreateContext(const gfx::Size& size) WARN_UNUSED_RESULT;
// Destroys the context identified by |va_context_id_|.
virtual void DestroyContext();
- // Requests a VA surface of size |size| and |va_rt_format|. Returns a
- // self-cleaning ScopedVASurface or nullptr if creation failed. If
- // |visible_size| is supplied, the returned ScopedVASurface's size is set to
- // it: for example, we may want to request a 16x16 surface to decode a 13x12
- // JPEG: we may want to keep track of the visible size 13x12 inside the
- // ScopedVASurface to inform the surface's users that that's the only region
- // with meaningful content. If |visible_size| is not supplied, we store |size|
- // in the returned ScopedVASurface.
+ // Requests a VA surface of size |size|, |va_rt_format| and optionally
+ // |va_fourcc|. Returns a self-cleaning ScopedVASurface or nullptr if creation
+ // failed. If |visible_size| is supplied, the returned ScopedVASurface's size
+ // is set to it: for example, we may want to request a 16x16 surface to decode
+ // a 13x12 JPEG: we may want to keep track of the visible size 13x12 inside
+ // the ScopedVASurface to inform the surface's users that that's the only
+ // region with meaningful content. If |visible_size| is not supplied, we store
+ // |size| in the returned ScopedVASurface.
std::unique_ptr<ScopedVASurface> CreateScopedVASurface(
unsigned int va_rt_format,
const gfx::Size& size,
- const base::Optional<gfx::Size>& visible_size = base::nullopt);
+ const base::Optional<gfx::Size>& visible_size = base::nullopt,
+ uint32_t va_fourcc = 0);
// Creates a self-releasing VASurface from |pixmap|. The created VASurface
// shares the ownership of the underlying buffer represented by |pixmap|. The
@@ -283,6 +326,16 @@ class MEDIA_GPU_EXPORT VaapiWrapper
scoped_refptr<VASurface> CreateVASurfaceForPixmap(
scoped_refptr<gfx::NativePixmap> pixmap);
+ // Creates a self-releasing VASurface from |buffers|. The ownership of the
+ // surface is transferred to the caller. |buffers| should be a pointer array
+ // of size 1, with |buffer_size| corresponding to its size. |size| should be
+ // the desired surface dimensions (which does not need to map to |buffer_size|
+ // in any relevant way). |buffers| should be kept alive when using the
+ // VASurface and for accessing the data after the operation is complete.
+ scoped_refptr<VASurface> CreateVASurfaceForUserPtr(const gfx::Size& size,
+ uintptr_t* buffers,
+ size_t buffer_size);
+
// Syncs and exports |va_surface| as a gfx::NativePixmapDmaBuf. Currently, the
// only VAAPI surface pixel formats supported are VA_FOURCC_IMC3 and
// VA_FOURCC_NV12.
@@ -311,15 +364,16 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// allocated VABufferIDs stay alive until DestroyPendingBuffers_Locked(). Note
// that this method does not submit the buffers for execution, they are simply
// stored until ExecuteAndDestroyPendingBuffers()/Execute_Locked(). The
- // ownership of |data| stays with the caller.
+ // ownership of |data| stays with the caller. On failure, all pending buffers
+ // are destroyed.
bool SubmitBuffer(VABufferType va_buffer_type,
size_t size,
const void* data) WARN_UNUSED_RESULT;
// Convenient templatized version of SubmitBuffer() where |size| is deduced to
// be the size of the type of |*data|.
template <typename T>
- bool SubmitBuffer(VABufferType va_buffer_type,
- const T* data) WARN_UNUSED_RESULT {
+ bool WARN_UNUSED_RESULT SubmitBuffer(VABufferType va_buffer_type,
+ const T* data) {
return SubmitBuffer(va_buffer_type, sizeof(T), data);
}
// Batch-version of SubmitBuffer(), where the lock for accessing libva is
@@ -433,12 +487,16 @@ class MEDIA_GPU_EXPORT VaapiWrapper
private:
friend class base::RefCountedThreadSafe<VaapiWrapper>;
+ friend class VaapiWrapperTest;
+ FRIEND_TEST_ALL_PREFIXES(VaapiTest, LowQualityEncodingSetting);
FRIEND_TEST_ALL_PREFIXES(VaapiUtilsTest, ScopedVAImage);
FRIEND_TEST_ALL_PREFIXES(VaapiUtilsTest, BadScopedVAImage);
FRIEND_TEST_ALL_PREFIXES(VaapiUtilsTest, BadScopedVABufferMapping);
- bool Initialize(CodecMode mode, VAProfile va_profile) WARN_UNUSED_RESULT;
+ bool Initialize(CodecMode mode,
+ VAProfile va_profile,
+ EncryptionScheme encryption_scheme) WARN_UNUSED_RESULT;
void Deinitialize();
bool VaInitialize(const ReportErrorToUMACB& report_error_to_uma_cb)
WARN_UNUSED_RESULT;
@@ -457,11 +515,13 @@ class MEDIA_GPU_EXPORT VaapiWrapper
const std::vector<VABufferID>& va_buffers)
EXCLUSIVE_LOCKS_REQUIRED(va_lock_) WARN_UNUSED_RESULT;
- void DestroyPendingBuffers_Locked() EXCLUSIVE_LOCKS_REQUIRED(va_lock_);
+ virtual void DestroyPendingBuffers_Locked()
+ EXCLUSIVE_LOCKS_REQUIRED(va_lock_);
// Requests libva to allocate a new VABufferID of type |va_buffer.type|, then
- // maps-and-copies |va_buffer.size| contents of |va_buffer.data| to it.
- bool SubmitBuffer_Locked(const VABufferDescriptor& va_buffer)
+ // maps-and-copies |va_buffer.size| contents of |va_buffer.data| to it. If a
+ // failure occurs, calls DestroyPendingBuffers_Locked() and returns false.
+ virtual bool SubmitBuffer_Locked(const VABufferDescriptor& va_buffer)
EXCLUSIVE_LOCKS_REQUIRED(va_lock_) WARN_UNUSED_RESULT;
// Maps |va_buffer_id| and, if successful, copies the contents of |va_buffer|
@@ -470,6 +530,15 @@ class MEDIA_GPU_EXPORT VaapiWrapper
const VABufferDescriptor& va_buffer)
EXCLUSIVE_LOCKS_REQUIRED(va_lock_) WARN_UNUSED_RESULT;
+ // Queries whether |va_profile_| and |va_entrypoint_| support encoding quality
+ // setting and, if available, configures it to its maximum value, for lower
+ // consumption and maximum speed.
+ void MaybeSetLowQualityEncoding_Locked() EXCLUSIVE_LOCKS_REQUIRED(va_lock_);
+
+ // If a protected session is active, attaches it to the decoding context.
+ bool MaybeAttachProtectedSession_Locked()
+ EXCLUSIVE_LOCKS_REQUIRED(va_lock_) WARN_UNUSED_RESULT;
+
const CodecMode mode_;
// Pointer to VADisplayState's member |va_lock_|. Guaranteed to be valid for
@@ -479,12 +548,13 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// VA handles.
// All valid after successful Initialize() and until Deinitialize().
VADisplay va_display_ GUARDED_BY(va_lock_);
- VAConfigID va_config_id_;
+ VAConfigID va_config_id_{VA_INVALID_ID};
// Created in CreateContext() or CreateContextAndSurfaces() and valid until
// DestroyContext() or DestroyContextAndSurfaces().
- VAContextID va_context_id_;
+ VAContextID va_context_id_{VA_INVALID_ID};
- //Entrypoint configured for the corresponding context
+ // Profile and entrypoint configured for the corresponding |va_context_id_|.
+ VAProfile va_profile_;
VAEntrypoint va_entrypoint_;
// Data queued up for HW codec, to be committed on next execution.
@@ -495,6 +565,12 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// and reused afterwards.
std::unique_ptr<ScopedVABuffer> va_buffer_for_vpp_;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // For protected decode mode.
+ VAConfigID va_protected_config_id_{VA_INVALID_ID};
+ VAProtectedSessionID va_protected_session_id_{VA_INVALID_ID};
+#endif
+
// Called to report codec errors to UMA. Errors to clients are reported via
// return values from public methods.
ReportErrorToUMACB report_error_to_uma_cb_;
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper_unittest.cc b/chromium/media/gpu/vaapi/vaapi_wrapper_unittest.cc
new file mode 100644
index 00000000000..cded354e7e8
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper_unittest.cc
@@ -0,0 +1,149 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <va/va.h>
+
+#include "media/gpu/vaapi/vaapi_wrapper.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace media {
+namespace {
+VaapiWrapper::VABufferDescriptor CreateVABufferDescriptor() {
+ constexpr static char kData[] = "vaBufferData";
+ return VaapiWrapper::VABufferDescriptor{VAProcPipelineParameterBufferType,
+ sizeof(kData), kData};
+}
+
+class MockVaapiWrapper : public VaapiWrapper {
+ public:
+ MockVaapiWrapper() : VaapiWrapper(kVideoProcess) {}
+ MOCK_METHOD1(SubmitBuffer_Locked, bool(const VABufferDescriptor&));
+ MOCK_METHOD0(DestroyPendingBuffers_Locked, void());
+
+ protected:
+ ~MockVaapiWrapper() override = default;
+};
+} // namespace
+
+class VaapiWrapperTest : public testing::Test {
+ public:
+ VaapiWrapperTest() = default;
+
+ void SetUp() override {
+ // Create a VaapiWrapper for testing.
+ mock_vaapi_wrapper_ = base::MakeRefCounted<MockVaapiWrapper>();
+ ASSERT_TRUE(mock_vaapi_wrapper_);
+
+ ON_CALL(*mock_vaapi_wrapper_, SubmitBuffer_Locked)
+ .WillByDefault(
+ Invoke(this, &VaapiWrapperTest::DefaultSubmitBuffer_Locked));
+ ON_CALL(*mock_vaapi_wrapper_, DestroyPendingBuffers_Locked)
+ .WillByDefault(Invoke(
+ this, &VaapiWrapperTest::DefaultDestroyPendingBuffers_Locked));
+ }
+ void TearDown() override {
+ // The VaapiWrapper destructor calls DestroyPendingBuffers_Locked(). Since
+ // MockVaapiWrapper is a derived class,
+ // MockVaapiWrapper::DestroyPendingBuffers_Locked() won't get called during
+ // destruction even though it's a virtual function. Instead,
+ // VaapiWrapper::DestroyPendingBuffers_Locked() will get called. Therefore,
+ // we need to clear |pending_va_buffers_| before this happens so that
+ // VaapiWrapper::DestroyPendingBuffers_Locked() doesn't call
+ // vaDestroyBuffer().
+ mock_vaapi_wrapper_->pending_va_buffers_.clear();
+ mock_vaapi_wrapper_.reset();
+ }
+
+ bool DefaultSubmitBuffer_Locked(
+ const VaapiWrapper::VABufferDescriptor& va_buffer)
+ EXCLUSIVE_LOCKS_REQUIRED(mock_vaapi_wrapper_->va_lock_) {
+ if (va_buffer.data) {
+ constexpr VABufferID kFakeBufferId = 1234;
+ mock_vaapi_wrapper_->pending_va_buffers_.push_back(kFakeBufferId);
+ return true;
+ }
+ // When |va_buffer|.data is null, the base method should return false and
+ // no libva calls should be made.
+ const bool submit_buffer_res =
+ (*mock_vaapi_wrapper_).VaapiWrapper::SubmitBuffer_Locked(va_buffer);
+ if (submit_buffer_res)
+ ADD_FAILURE();
+ return false;
+ }
+
+ void DefaultDestroyPendingBuffers_Locked()
+ EXCLUSIVE_LOCKS_REQUIRED(mock_vaapi_wrapper_->va_lock_) {
+ mock_vaapi_wrapper_->pending_va_buffers_.clear();
+ }
+
+ size_t GetPendingBuffersSize() const {
+ return mock_vaapi_wrapper_->pending_va_buffers_.size();
+ }
+
+ protected:
+ scoped_refptr<MockVaapiWrapper> mock_vaapi_wrapper_;
+};
+
+// This test ensures SubmitBuffer() calls SubmitBuffer_Locked().
+TEST_F(VaapiWrapperTest, SubmitBuffer) {
+ constexpr size_t kNumBuffers = 3;
+ auto va_buffer = CreateVABufferDescriptor();
+
+ EXPECT_CALL(*mock_vaapi_wrapper_, SubmitBuffer_Locked(_)).Times(kNumBuffers);
+ for (size_t i = 0; i < kNumBuffers; ++i) {
+ EXPECT_TRUE(mock_vaapi_wrapper_->SubmitBuffer(
+ va_buffer.type, va_buffer.size, va_buffer.data));
+ }
+ EXPECT_EQ(GetPendingBuffersSize(), kNumBuffers);
+}
+
+// This test ensures SubmitBuffers() calls SubmitBuffer_Locked() as many times
+// as the number of passed buffers.
+TEST_F(VaapiWrapperTest, SubmitBuffers) {
+ constexpr size_t kNumBuffers = 3;
+ auto va_buffer = CreateVABufferDescriptor();
+ std::vector<VaapiWrapper::VABufferDescriptor> buffers(kNumBuffers, va_buffer);
+
+ EXPECT_CALL(*mock_vaapi_wrapper_, SubmitBuffer_Locked(_)).Times(kNumBuffers);
+ EXPECT_TRUE(mock_vaapi_wrapper_->SubmitBuffers(buffers));
+ EXPECT_EQ(GetPendingBuffersSize(), kNumBuffers);
+}
+
+// This test ensures DestroyPendingBuffers_Locked() is executed on a failure of
+// SubmitBuffer().
+TEST_F(VaapiWrapperTest, FailOnSubmitBuffer) {
+ auto va_buffer = CreateVABufferDescriptor();
+
+ ::testing::InSequence s;
+ EXPECT_CALL(*mock_vaapi_wrapper_, SubmitBuffer_Locked(_)).Times(2);
+ EXPECT_CALL(*mock_vaapi_wrapper_, DestroyPendingBuffers_Locked);
+ EXPECT_TRUE(mock_vaapi_wrapper_->SubmitBuffer(va_buffer.type, va_buffer.size,
+ va_buffer.data));
+ EXPECT_FALSE(mock_vaapi_wrapper_->SubmitBuffer(va_buffer.type, va_buffer.size,
+ /*data=*/nullptr));
+ EXPECT_EQ(GetPendingBuffersSize(), 0u);
+}
+
+// This test ensures DestroyPendingBuffers_Locked() is executed on a failure of
+// SubmitBuffers().
+TEST_F(VaapiWrapperTest, FailOnSubmitBuffers) {
+ constexpr size_t kNumBuffers = 3;
+ auto va_buffer = CreateVABufferDescriptor();
+ std::vector<VaapiWrapper::VABufferDescriptor> buffers(kNumBuffers, va_buffer);
+ // Set data to nullptr so that VaapiWrapper::SubmitBuffer_Locked() fails.
+ buffers[1].data = nullptr;
+
+ ::testing::InSequence s;
+ EXPECT_CALL(*mock_vaapi_wrapper_, SubmitBuffer_Locked(_))
+ .Times(kNumBuffers - 1);
+ EXPECT_CALL(*mock_vaapi_wrapper_, DestroyPendingBuffers_Locked);
+ EXPECT_FALSE(mock_vaapi_wrapper_->SubmitBuffers(buffers));
+ EXPECT_EQ(GetPendingBuffersSize(), 0u);
+}
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.cc
index e9e2150a1b2..a202817e35b 100644
--- a/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.cc
@@ -16,7 +16,10 @@ namespace media {
VP8VaapiVideoDecoderDelegate::VP8VaapiVideoDecoderDelegate(
DecodeSurfaceHandler<VASurface>* const vaapi_dec,
scoped_refptr<VaapiWrapper> vaapi_wrapper)
- : VaapiVideoDecoderDelegate(vaapi_dec, std::move(vaapi_wrapper)) {}
+ : VaapiVideoDecoderDelegate(vaapi_dec,
+ std::move(vaapi_wrapper),
+ base::DoNothing(),
+ nullptr) {}
VP8VaapiVideoDecoderDelegate::~VP8VaapiVideoDecoderDelegate() {
DCHECK(!iq_matrix_);
@@ -112,7 +115,8 @@ bool VP8VaapiVideoDecoderDelegate::OutputPicture(
void VP8VaapiVideoDecoderDelegate::OnVAContextDestructionSoon() {
// Destroy the member ScopedVABuffers below since they refer to a VAContextID
- // that will be destroyed soon. iq_matrix_.reset();
+ // that will be destroyed soon.
+ iq_matrix_.reset();
prob_buffer_.reset();
picture_params_.reset();
slice_params_.reset();
diff --git a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc
index 19087ffb5c0..34ac822cdeb 100644
--- a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc
+++ b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.cc
@@ -8,6 +8,7 @@
#include "base/stl_util.h"
#include "base/trace_event/trace_event.h"
+#include "build/chromeos_buildflags.h"
#include "media/gpu/decode_surface_handler.h"
#include "media/gpu/macros.h"
#include "media/gpu/vaapi/va_surface.h"
@@ -16,14 +17,25 @@
namespace media {
+using DecodeStatus = VP9Decoder::VP9Accelerator::Status;
+
VP9VaapiVideoDecoderDelegate::VP9VaapiVideoDecoderDelegate(
DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper)
- : VaapiVideoDecoderDelegate(vaapi_dec, std::move(vaapi_wrapper)) {}
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb,
+ CdmContext* cdm_context,
+ EncryptionScheme encryption_scheme)
+ : VaapiVideoDecoderDelegate(vaapi_dec,
+ std::move(vaapi_wrapper),
+ std::move(on_protected_session_update_cb),
+ cdm_context,
+ encryption_scheme) {}
VP9VaapiVideoDecoderDelegate::~VP9VaapiVideoDecoderDelegate() {
DCHECK(!picture_params_);
DCHECK(!slice_params_);
+ DCHECK(!crypto_params_);
+ DCHECK(!proc_params_);
}
scoped_refptr<VP9Picture> VP9VaapiVideoDecoderDelegate::CreateVP9Picture() {
@@ -32,10 +44,18 @@ scoped_refptr<VP9Picture> VP9VaapiVideoDecoderDelegate::CreateVP9Picture() {
if (!va_surface)
return nullptr;
- return new VaapiVP9Picture(std::move(va_surface));
+ scoped_refptr<VP9Picture> pic = new VaapiVP9Picture(std::move(va_surface));
+ if (!vaapi_dec_->IsScalingDecode())
+ return pic;
+
+ // Setup the scaling buffer.
+ scoped_refptr<VASurface> scaled_surface = vaapi_dec_->CreateDecodeSurface();
+ CHECK(scaled_surface);
+ pic->AsVaapiVP9Picture()->SetDecodeSurface(std::move(scaled_surface));
+ return pic;
}
-bool VP9VaapiVideoDecoderDelegate::SubmitDecode(
+DecodeStatus VP9VaapiVideoDecoderDelegate::SubmitDecode(
scoped_refptr<VP9Picture> pic,
const Vp9SegmentationParams& seg,
const Vp9LoopFilterParams& lf,
@@ -56,13 +76,13 @@ bool VP9VaapiVideoDecoderDelegate::SubmitDecode(
picture_params_ = vaapi_wrapper_->CreateVABuffer(
VAPictureParameterBufferType, sizeof(pic_param));
if (!picture_params_)
- return false;
+ return DecodeStatus::kFail;
}
if (!slice_params_) {
slice_params_ = vaapi_wrapper_->CreateVABuffer(VASliceParameterBufferType,
sizeof(slice_param));
if (!slice_params_)
- return false;
+ return DecodeStatus::kFail;
}
// Always re-create |encoded_data| because reusing the buffer causes horrific
// artifacts in decoded buffers. TODO(b/169725321): This seems to be a driver
@@ -70,7 +90,39 @@ bool VP9VaapiVideoDecoderDelegate::SubmitDecode(
auto encoded_data = vaapi_wrapper_->CreateVABuffer(VASliceDataBufferType,
frame_hdr->frame_size);
if (!encoded_data)
- return false;
+ return DecodeStatus::kFail;
+
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ const DecryptConfig* decrypt_config = pic->decrypt_config();
+ if (decrypt_config && !SetDecryptConfig(decrypt_config->Clone()))
+ return DecodeStatus::kFail;
+
+ bool uses_crypto = false;
+ std::vector<VAEncryptionSegmentInfo> encryption_segment_info;
+ VAEncryptionParameters crypto_param{};
+ if (IsEncryptedSession()) {
+ const ProtectedSessionState state = SetupDecryptDecode(
+ /*full_sample=*/false, frame_hdr->frame_size, &crypto_param,
+ &encryption_segment_info,
+ decrypt_config ? decrypt_config->subsamples()
+ : std::vector<SubsampleEntry>());
+ if (state == ProtectedSessionState::kFailed) {
+ LOG(ERROR)
+ << "SubmitDecode fails because we couldn't setup the protected "
+ "session";
+ return DecodeStatus::kFail;
+ } else if (state != ProtectedSessionState::kCreated) {
+ return DecodeStatus::kTryAgain;
+ }
+ uses_crypto = true;
+ if (!crypto_params_) {
+ crypto_params_ = vaapi_wrapper_->CreateVABuffer(
+ VAEncryptionParameterBufferType, sizeof(crypto_param));
+ if (!crypto_params_)
+ return DecodeStatus::kFail;
+ }
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
pic_param.frame_width = base::checked_cast<uint16_t>(frame_hdr->frame_width);
pic_param.frame_height =
@@ -80,7 +132,7 @@ bool VP9VaapiVideoDecoderDelegate::SubmitDecode(
auto ref_pic = ref_frames.GetFrame(i);
if (ref_pic) {
pic_param.reference_frames[i] =
- ref_pic->AsVaapiVP9Picture()->GetVASurfaceID();
+ ref_pic->AsVaapiVP9Picture()->GetVADecodeSurfaceID();
} else {
pic_param.reference_frames[i] = VA_INVALID_SURFACE;
}
@@ -159,14 +211,48 @@ bool VP9VaapiVideoDecoderDelegate::SubmitDecode(
seg_param.chroma_ac_quant_scale = seg.uv_dequant[i][1];
}
- return vaapi_wrapper_->MapAndCopyAndExecute(
- pic->AsVaapiVP9Picture()->va_surface()->id(),
+ std::vector<std::pair<VABufferID, VaapiWrapper::VABufferDescriptor>> buffers =
{{picture_params_->id(),
{picture_params_->type(), picture_params_->size(), &pic_param}},
{slice_params_->id(),
{slice_params_->type(), slice_params_->size(), &slice_param}},
{encoded_data->id(),
- {encoded_data->type(), frame_hdr->frame_size, frame_hdr->data}}});
+ {encoded_data->type(), frame_hdr->frame_size, frame_hdr->data}}};
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ if (uses_crypto) {
+ buffers.push_back(
+ {crypto_params_->id(),
+ {crypto_params_->type(), crypto_params_->size(), &crypto_param}});
+ }
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
+
+ const VaapiVP9Picture* vaapi_pic = pic->AsVaapiVP9Picture();
+ VAProcPipelineParameterBuffer proc_buffer;
+ if (vaapi_dec_->IsScalingDecode()) {
+ if (!proc_params_) {
+ proc_params_ = vaapi_wrapper_->CreateVABuffer(
+ VAProcPipelineParameterBufferType, sizeof(proc_buffer));
+ if (!proc_params_)
+ return DecodeStatus::kFail;
+ }
+ CHECK(gfx::Rect(vaapi_pic->GetDecodeSize()).Contains(pic->visible_rect()));
+ CHECK(FillDecodeScalingIfNeeded(
+ pic->visible_rect(), vaapi_pic->GetVADecodeSurfaceID(),
+ pic->AsVaapiVP9Picture()->va_surface(), &proc_buffer));
+ buffers.push_back(
+ {proc_params_->id(),
+ {proc_params_->type(), proc_params_->size(), &proc_buffer}});
+ }
+
+ bool success = vaapi_wrapper_->MapAndCopyAndExecute(
+ vaapi_pic->GetVADecodeSurfaceID(), buffers);
+ if (!success && NeedsProtectedSessionRecovery())
+ return DecodeStatus::kTryAgain;
+
+ if (success && IsEncryptedSession())
+ ProtectedDecodedSucceeded();
+
+ return success ? DecodeStatus::kOk : DecodeStatus::kFail;
}
bool VP9VaapiVideoDecoderDelegate::OutputPicture(
@@ -174,9 +260,11 @@ bool VP9VaapiVideoDecoderDelegate::OutputPicture(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
const VaapiVP9Picture* vaapi_pic = pic->AsVaapiVP9Picture();
- vaapi_dec_->SurfaceReady(vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
- vaapi_pic->visible_rect(),
- vaapi_pic->get_colorspace());
+ vaapi_dec_->SurfaceReady(
+ vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
+ vaapi_dec_->GetOutputVisibleRect(vaapi_pic->visible_rect(),
+ vaapi_pic->va_surface()->size()),
+ vaapi_pic->get_colorspace());
return true;
}
@@ -197,6 +285,8 @@ void VP9VaapiVideoDecoderDelegate::OnVAContextDestructionSoon() {
// that will be destroyed soon.
picture_params_.reset();
slice_params_.reset();
+ crypto_params_.reset();
+ proc_params_.reset();
}
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h
index 2e55b23cc44..6a9b8137d63 100644
--- a/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h
+++ b/chromium/media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h
@@ -19,17 +19,22 @@ class VP9Picture;
class VP9VaapiVideoDecoderDelegate : public VP9Decoder::VP9Accelerator,
public VaapiVideoDecoderDelegate {
public:
- VP9VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* const vaapi_dec,
- scoped_refptr<VaapiWrapper> vaapi_wrapper);
+ VP9VaapiVideoDecoderDelegate(
+ DecodeSurfaceHandler<VASurface>* const vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ ProtectedSessionUpdateCB on_protected_session_update_cb =
+ base::DoNothing(),
+ CdmContext* cdm_context = nullptr,
+ EncryptionScheme encryption_scheme = EncryptionScheme::kUnencrypted);
~VP9VaapiVideoDecoderDelegate() override;
// VP9Decoder::VP9Accelerator implementation.
scoped_refptr<VP9Picture> CreateVP9Picture() override;
- bool SubmitDecode(scoped_refptr<VP9Picture> pic,
- const Vp9SegmentationParams& seg,
- const Vp9LoopFilterParams& lf,
- const Vp9ReferenceFrameVector& reference_frames,
- base::OnceClosure done_cb) override;
+ Status SubmitDecode(scoped_refptr<VP9Picture> pic,
+ const Vp9SegmentationParams& seg,
+ const Vp9LoopFilterParams& lf,
+ const Vp9ReferenceFrameVector& reference_frames,
+ base::OnceClosure done_cb) override;
bool OutputPicture(scoped_refptr<VP9Picture> pic) override;
bool IsFrameContextRequired() const override;
@@ -42,6 +47,8 @@ class VP9VaapiVideoDecoderDelegate : public VP9Decoder::VP9Accelerator,
private:
std::unique_ptr<ScopedVABuffer> picture_params_;
std::unique_ptr<ScopedVABuffer> slice_params_;
+ std::unique_ptr<ScopedVABuffer> crypto_params_;
+ std::unique_ptr<ScopedVABuffer> proc_params_;
DISALLOW_COPY_AND_ASSIGN(VP9VaapiVideoDecoderDelegate);
};
diff --git a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
index 1282d2cfe8e..7bede6c6bb6 100644
--- a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
@@ -440,7 +440,9 @@ int main(int argc, char** argv) {
// Set up our test environment.
media::test::VideoPlayerTestEnvironment* test_environment =
media::test::VideoPlayerTestEnvironment::Create(
- video_path, video_metadata_path, false, implementation,
+ video_path, video_metadata_path, /*validator_type=*/
+ media::test::VideoPlayerTestEnvironment::ValidatorType::kNone,
+ implementation,
base::FilePath(output_folder));
if (!test_environment)
return EXIT_FAILURE;
diff --git a/chromium/media/gpu/video_decode_accelerator_tests.cc b/chromium/media/gpu/video_decode_accelerator_tests.cc
index b9e75cd2506..5f63994dbd2 100644
--- a/chromium/media/gpu/video_decode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_tests.cc
@@ -6,8 +6,14 @@
#include "base/command_line.h"
#include "base/files/file_util.h"
+#include "base/numerics/safe_conversions.h"
#include "base/strings/string_number_conversions.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/encryption_scheme.h"
+#include "media/base/media_util.h"
#include "media/base/test_data_util.h"
+#include "media/base/video_decoder_config.h"
+#include "media/base/video_transformation.h"
#include "media/gpu/test/video.h"
#include "media/gpu/test/video_frame_file_writer.h"
#include "media/gpu/test/video_frame_validator.h"
@@ -16,8 +22,16 @@
#include "media/gpu/test/video_player/video_decoder_client.h"
#include "media/gpu/test/video_player/video_player.h"
#include "media/gpu/test/video_player/video_player_test_environment.h"
+#include "media/gpu/test/video_test_helpers.h"
+#include "media/media_buildflags.h"
#include "testing/gtest/include/gtest/gtest.h"
+#if BUILDFLAG(ENABLE_DAV1D_DECODER)
+#include "media/filters/dav1d_video_decoder.h"
+#elif BUILDFLAG(ENABLE_LIBGAV1_DECODER)
+#include "media/filters/gav1_video_decoder.h"
+#endif
+
namespace media {
namespace test {
@@ -27,7 +41,8 @@ namespace {
// under docs/media/gpu/video_decoder_test_usage.md when making changes here.
constexpr const char* usage_msg =
"usage: video_decode_accelerator_tests\n"
- " [-v=<level>] [--vmodule=<config>] [--disable_validator]\n"
+ " [-v=<level>] [--vmodule=<config>]\n"
+ " [--validator_type=(none|md5|ssim)]\n"
" [--output_frames=(all|corrupt)] [--output_format=(png|yuv)]\n"
" [--output_limit=<number>] [--output_folder=<folder>]\n"
" ([--use_vd]|[--use_vd_vda]) [--gtest_help] [--help]\n"
@@ -45,7 +60,11 @@ constexpr const char* help_msg =
" -v enable verbose mode, e.g. -v=2.\n"
" --vmodule enable verbose mode for the specified module,\n"
" e.g. --vmodule=*media/gpu*=2.\n\n"
- " --disable_validator disable frame validation.\n"
+ " --validator_type validate decoded frames, possible values are \n"
+ " md5 (default, compare against md5hash of expected\n"
+ " frames), ssim (compute SSIM against expected\n"
+ " frames, currently allowed for AV1 streams only)\n"
+ " and none (disable frame validation).\n"
" --use_vd use the new VD-based video decoders, instead of\n"
" the default VDA-based video decoders.\n"
" --use_vd_vda use the new VD-based video decoders with a\n"
@@ -103,16 +122,34 @@ class VideoDecoderTest : public ::testing::Test {
output_folder, g_env->GetFrameOutputFormat(),
g_env->GetFrameOutputLimit());
}
-
- // VP9 profile 2 supports 10 and 12 bit color depths, but we currently
- // assume a profile 2 stream contains 10 bit color depth only.
- // TODO(hiroh): Add bit depth info to Video class and follow it here.
+ if (g_env->Video()->BitDepth() != 8u &&
+ g_env->Video()->BitDepth() != 10u) {
+ LOG(ERROR) << "Unsupported bit depth: "
+ << base::strict_cast<int>(g_env->Video()->BitDepth());
+ ADD_FAILURE();
+ }
const VideoPixelFormat validation_format =
- g_env->Video()->Profile() == VP9PROFILE_PROFILE2
- ? PIXEL_FORMAT_YUV420P10
- : PIXEL_FORMAT_I420;
- frame_processors.push_back(media::test::MD5VideoFrameValidator::Create(
- video->FrameChecksums(), validation_format, std::move(frame_writer)));
+ g_env->Video()->BitDepth() == 10 ? PIXEL_FORMAT_YUV420P10
+ : PIXEL_FORMAT_I420;
+ if (g_env->GetValidatorType() ==
+ VideoPlayerTestEnvironment::ValidatorType::kMD5) {
+ frame_processors.push_back(media::test::MD5VideoFrameValidator::Create(
+ video->FrameChecksums(), validation_format,
+ std::move(frame_writer)));
+ } else {
+ DCHECK_EQ(g_env->GetValidatorType(),
+ VideoPlayerTestEnvironment::ValidatorType::kSSIM);
+ if (!CreateModelFrames(g_env->Video())) {
+ LOG(ERROR) << "Failed creating model frames";
+ ADD_FAILURE();
+ }
+ constexpr double kSSIMTolerance = 0.915;
+ frame_processors.push_back(media::test::SSIMVideoFrameValidator::Create(
+ base::BindRepeating(&VideoDecoderTest::GetModelFrame,
+ base::Unretained(this)),
+ std::move(frame_writer),
+ VideoFrameValidator::ValidationMode::kThreshold, kSSIMTolerance));
+ }
}
config.implementation = g_env->GetDecoderImplementation();
@@ -130,6 +167,82 @@ class VideoDecoderTest : public ::testing::Test {
}
return video_player;
}
+
+ private:
+ // TODO(hiroh): Move this to Video class or video_frame_helpers.h.
+ // TODO(hiroh): Create model frames once during the test.
+ bool CreateModelFrames(const Video* video) {
+ if (video->Codec() != VideoCodec::kCodecAV1) {
+ LOG(ERROR) << "Frame validation by SSIM is allowed for AV1 streams only";
+ return false;
+ }
+#if BUILDFLAG(ENABLE_DAV1D_DECODER)
+ Dav1dVideoDecoder decoder(
+#elif BUILDFLAG(ENABLE_LIBGAV1_DECODER)
+ Gav1VideoDecoder decoder(
+#endif
+ /*media_log=*/nullptr,
+ OffloadableVideoDecoder::OffloadState::kOffloaded);
+ VideoDecoderConfig decoder_config(
+ video->Codec(), video->Profile(),
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
+ kNoTransformation, video->Resolution(), video->VisibleRect(),
+ video->VisibleRect().size(), EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
+
+ bool init_success = false;
+ VideoDecoder::InitCB init_cb = base::BindOnce(
+ [](bool* init_success, media::Status result) {
+ *init_success = result.is_ok();
+ },
+ &init_success);
+ decoder.Initialize(decoder_config, /*low_delay=*/false,
+ /*cdm_context=*/nullptr, std::move(init_cb),
+ base::BindRepeating(&VideoDecoderTest::AddModelFrame,
+ base::Unretained(this)),
+ /*waiting_cb=*/base::NullCallback());
+ if (!init_success)
+ return false;
+ auto encoded_data_helper =
+ std::make_unique<EncodedDataHelper>(video->Data(), video->Profile());
+ DCHECK(encoded_data_helper);
+ while (!encoded_data_helper->ReachEndOfStream()) {
+ bool decode_success = false;
+ media::VideoDecoder::DecodeCB decode_cb = base::BindOnce(
+ [](bool* decode_success, media::Status status) {
+ *decode_success = status.is_ok();
+ },
+ &decode_success);
+ scoped_refptr<DecoderBuffer> bitstream_buffer =
+ encoded_data_helper->GetNextBuffer();
+ if (!bitstream_buffer) {
+ LOG(ERROR) << "Failed to get next video stream data";
+ return false;
+ }
+ decoder.Decode(std::move(bitstream_buffer), std::move(decode_cb));
+ if (!decode_success)
+ return false;
+ }
+ bool flush_success = false;
+ media::VideoDecoder::DecodeCB flush_cb = base::BindOnce(
+ [](bool* flush_success, media::Status status) {
+ *flush_success = status.is_ok();
+ },
+ &flush_success);
+ decoder.Decode(DecoderBuffer::CreateEOSBuffer(), std::move(flush_cb));
+
+ return flush_success && model_frames_.size() == video->NumFrames();
+ }
+
+ void AddModelFrame(scoped_refptr<VideoFrame> frame) {
+ model_frames_.push_back(std::move(frame));
+ }
+
+ scoped_refptr<const VideoFrame> GetModelFrame(size_t frame_index) {
+ CHECK_LT(frame_index, model_frames_.size());
+ return model_frames_[frame_index];
+ }
+ std::vector<scoped_refptr<VideoFrame>> model_frames_;
};
} // namespace
@@ -189,7 +302,11 @@ TEST_F(VideoDecoderTest, ResetMidStream) {
EXPECT_TRUE(tvp->WaitForFlushDone());
EXPECT_EQ(tvp->GetResetDoneCount(), 1u);
- EXPECT_EQ(tvp->GetFlushDoneCount(), 1u);
+ // In the case of a very short clip the decoder may be able
+ // to decode all the frames before a reset is sent.
+ // A flush occurs after the last frame, so in this situation
+ // there will be 2 flushes that occur.
+ EXPECT_TRUE(tvp->GetFlushDoneCount() == 1u || tvp->GetFlushDoneCount() == 2u);
EXPECT_EQ(tvp->GetFrameDecodedCount(),
numFramesDecoded + g_env->Video()->NumFrames());
EXPECT_TRUE(tvp->WaitForFrameProcessors());
@@ -424,7 +541,8 @@ int main(int argc, char** argv) {
(args.size() >= 2) ? base::FilePath(args[1]) : base::FilePath();
// Parse command line arguments.
- bool enable_validator = true;
+ auto validator_type =
+ media::test::VideoPlayerTestEnvironment::ValidatorType::kMD5;
media::test::FrameOutputConfig frame_output_config;
base::FilePath::StringType output_folder = base::FilePath::kCurrentDirectory;
bool use_vd = false;
@@ -439,8 +557,21 @@ int main(int argc, char** argv) {
continue;
}
- if (it->first == "disable_validator") {
- enable_validator = false;
+ if (it->first == "validator_type") {
+ if (it->second == "none") {
+ validator_type =
+ media::test::VideoPlayerTestEnvironment::ValidatorType::kNone;
+ } else if (it->second == "md5") {
+ validator_type =
+ media::test::VideoPlayerTestEnvironment::ValidatorType::kMD5;
+ } else if (it->second == "ssim") {
+ validator_type =
+ media::test::VideoPlayerTestEnvironment::ValidatorType::kSSIM;
+ } else {
+ std::cout << "unknown validator type \"" << it->second
+ << "\", possible values are \"none|md5|ssim\"\n";
+ return EXIT_FAILURE;
+ }
} else if (it->first == "output_frames") {
if (it->second == "all") {
frame_output_config.output_mode = media::test::FrameOutputMode::kAll;
@@ -496,7 +627,7 @@ int main(int argc, char** argv) {
// Set up our test environment.
media::test::VideoPlayerTestEnvironment* test_environment =
media::test::VideoPlayerTestEnvironment::Create(
- video_path, video_metadata_path, enable_validator, implementation,
+ video_path, video_metadata_path, validator_type, implementation,
base::FilePath(output_folder), frame_output_config);
if (!test_environment)
return EXIT_FAILURE;
diff --git a/chromium/media/gpu/video_encode_accelerator_perf_tests.cc b/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
index f8e9e3a2034..13eac6a68b0 100644
--- a/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
+++ b/chromium/media/gpu/video_encode_accelerator_perf_tests.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include <algorithm>
+#include <map>
#include <numeric>
#include <vector>
@@ -11,11 +12,16 @@
#include "base/json/json_writer.h"
#include "base/strings/stringprintf.h"
#include "media/base/bitstream_buffer.h"
+#include "media/base/media_util.h"
#include "media/base/test_data_util.h"
+#include "media/base/video_decoder_config.h"
#include "media/gpu/test/video.h"
+#include "media/gpu/test/video_encoder/bitstream_validator.h"
#include "media/gpu/test/video_encoder/video_encoder.h"
#include "media/gpu/test/video_encoder/video_encoder_client.h"
#include "media/gpu/test/video_encoder/video_encoder_test_environment.h"
+#include "media/gpu/test/video_frame_validator.h"
+#include "media/gpu/test/video_test_helpers.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
@@ -59,6 +65,12 @@ constexpr base::FilePath::CharType kDefaultTestVideoPath[] =
media::test::VideoEncoderTestEnvironment* g_env;
+constexpr size_t kNumFramesToEncodeForPerformance = 300;
+
+// The event timeout used in perf tests because encoding 2160p
+// |kNumFramesToEncodeForPerformance| frames take much time.
+constexpr base::TimeDelta kPerfEventTimeout = base::TimeDelta::FromSeconds(180);
+
// Default output folder used to store performance metrics.
constexpr const base::FilePath::CharType* kDefaultOutputFolder =
FILE_PATH_LITERAL("perf_metrics");
@@ -266,23 +278,202 @@ void PerformanceMetrics::WriteToFile() const {
VLOG(0) << "Wrote performance metrics to: " << metrics_file_path;
}
+struct BitstreamQualityMetrics {
+ BitstreamQualityMetrics(const PSNRVideoFrameValidator& psnr_validator,
+ const SSIMVideoFrameValidator& ssim_validator);
+ void WriteToConsole() const;
+ void WriteToFile() const;
+
+ private:
+ struct QualityStats {
+ double avg = 0;
+ double percentile_25 = 0;
+ double percentile_50 = 0;
+ double percentile_75 = 0;
+ std::vector<double> values_in_order;
+ };
+
+ static QualityStats ComputeQualityStats(
+ const std::map<size_t, double>& values);
+
+ const QualityStats psnr_stats;
+ const QualityStats ssim_stats;
+};
+
+BitstreamQualityMetrics::BitstreamQualityMetrics(
+ const PSNRVideoFrameValidator& psnr_validator,
+ const SSIMVideoFrameValidator& ssim_validator)
+ : psnr_stats(ComputeQualityStats(psnr_validator.GetPSNRValues())),
+ ssim_stats(ComputeQualityStats(ssim_validator.GetSSIMValues())) {}
+
+// static
+BitstreamQualityMetrics::QualityStats
+BitstreamQualityMetrics::ComputeQualityStats(
+ const std::map<size_t, double>& values) {
+ if (values.empty())
+ return QualityStats();
+ std::vector<double> sorted_values;
+ std::vector<std::pair<size_t, double>> index_and_value;
+ sorted_values.reserve(values.size());
+ index_and_value.reserve(values.size());
+ for (const auto& v : values) {
+ sorted_values.push_back(v.second);
+ index_and_value.emplace_back(v.first, v.second);
+ }
+ std::sort(sorted_values.begin(), sorted_values.end());
+ std::sort(index_and_value.begin(), index_and_value.end());
+ QualityStats stats;
+ stats.avg = std::accumulate(sorted_values.begin(), sorted_values.end(), 0.0) /
+ sorted_values.size();
+ stats.percentile_25 = sorted_values[sorted_values.size() / 4];
+ stats.percentile_50 = sorted_values[sorted_values.size() / 2];
+ stats.percentile_75 = sorted_values[(sorted_values.size() * 3) / 4];
+ stats.values_in_order.resize(index_and_value.size());
+ for (size_t i = 0; i < index_and_value.size(); ++i)
+ stats.values_in_order[i] = index_and_value[i].second;
+ return stats;
+}
+
+void BitstreamQualityMetrics::WriteToConsole() const {
+ std::cout << "SSIM - average: " << ssim_stats.avg << std::endl;
+ std::cout << "SSIM - percentile 25: " << ssim_stats.percentile_25
+ << std::endl;
+ std::cout << "SSIM - percentile 50: " << ssim_stats.percentile_50
+ << std::endl;
+ std::cout << "SSIM - percentile 75: " << ssim_stats.percentile_75
+ << std::endl;
+ std::cout << "PSNR - average: " << psnr_stats.avg << std::endl;
+ std::cout << "PSNR - percentile 25: " << psnr_stats.percentile_25
+ << std::endl;
+ std::cout << "PSNR - percentile 50: " << psnr_stats.percentile_50
+ << std::endl;
+ std::cout << "PSNR - percentile 75: " << psnr_stats.percentile_75
+ << std::endl;
+}
+
+void BitstreamQualityMetrics::WriteToFile() const {
+ base::FilePath output_folder_path = base::FilePath(g_env->OutputFolder());
+ if (!DirectoryExists(output_folder_path))
+ base::CreateDirectory(output_folder_path);
+ output_folder_path = base::MakeAbsoluteFilePath(output_folder_path);
+ // Write quality metrics to json.
+ base::Value metrics(base::Value::Type::DICTIONARY);
+ metrics.SetKey("SSIMAverage", base::Value(ssim_stats.avg));
+ metrics.SetKey("SSIMPercentile25", base::Value(ssim_stats.percentile_25));
+ metrics.SetKey("SSIMPercentile50", base::Value(ssim_stats.percentile_50));
+ metrics.SetKey("SSIMPercentile75", base::Value(psnr_stats.percentile_75));
+ metrics.SetKey("PSNRAverage", base::Value(psnr_stats.avg));
+ metrics.SetKey("PSNRPercentile25", base::Value(psnr_stats.percentile_25));
+ metrics.SetKey("PSNRPercentile50", base::Value(psnr_stats.percentile_50));
+ metrics.SetKey("PSNRPercentile75", base::Value(psnr_stats.percentile_75));
+ // Write ssim values bitstream delivery times to json.
+ base::Value ssim_values(base::Value::Type::LIST);
+ for (double value : ssim_stats.values_in_order)
+ ssim_values.Append(value);
+ metrics.SetKey("SSIMValues", std::move(ssim_values));
+
+ // Write psnr values to json.
+ base::Value psnr_values(base::Value::Type::LIST);
+ for (double value : psnr_stats.values_in_order)
+ psnr_values.Append(value);
+ metrics.SetKey("PSNRValues", std::move(psnr_values));
+
+ // Write json to file.
+ std::string metrics_str;
+ ASSERT_TRUE(base::JSONWriter::WriteWithOptions(
+ metrics, base::JSONWriter::OPTIONS_PRETTY_PRINT, &metrics_str));
+ base::FilePath metrics_file_path = output_folder_path.Append(
+ g_env->GetTestOutputFilePath().AddExtension(FILE_PATH_LITERAL(".json")));
+ // Make sure that the directory into which json is saved is created.
+ LOG_ASSERT(base::CreateDirectory(metrics_file_path.DirName()));
+ base::File metrics_output_file(
+ base::FilePath(metrics_file_path),
+ base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
+ int bytes_written = metrics_output_file.WriteAtCurrentPos(
+ metrics_str.data(), metrics_str.length());
+ ASSERT_EQ(bytes_written, static_cast<int>(metrics_str.length()));
+ VLOG(0) << "Wrote performance metrics to: " << metrics_file_path;
+}
+
// Video encode test class. Performs setup and teardown for each single test.
class VideoEncoderTest : public ::testing::Test {
public:
// Create a new video encoder instance.
- std::unique_ptr<VideoEncoder> CreateVideoEncoder(const Video* video,
+ std::unique_ptr<VideoEncoder> CreateVideoEncoder(Video* video,
VideoCodecProfile profile,
- uint32_t bitrate) {
- LOG_ASSERT(video);
-
- std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors;
+ uint32_t bitrate,
+ uint32_t encoder_rate = 0) {
auto performance_evaluator = std::make_unique<PerformanceEvaluator>();
performance_evaluator_ = performance_evaluator.get();
+ std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors;
bitstream_processors.push_back(std::move(performance_evaluator));
+ return CreateVideoEncoderWithProcessors(
+ video, profile, bitrate, encoder_rate, std::move(bitstream_processors));
+ }
+
+ // Create a new video encoder instance for quality performance tests.
+ std::unique_ptr<VideoEncoder> CreateVideoEncoderForQualityPerformance(
+ Video* video,
+ VideoCodecProfile profile,
+ uint32_t bitrate) {
+ raw_data_helper_ = RawDataHelper::Create(video);
+ if (!raw_data_helper_) {
+ LOG(ERROR) << "Failed to create raw data helper";
+ return nullptr;
+ }
+ std::vector<std::unique_ptr<VideoFrameProcessor>> video_frame_processors;
+ VideoFrameValidator::GetModelFrameCB get_model_frame_cb =
+ base::BindRepeating(&VideoEncoderTest::GetModelFrame,
+ base::Unretained(this));
+ auto ssim_validator = SSIMVideoFrameValidator::Create(
+ get_model_frame_cb, /*corrupt_frame_processor=*/nullptr,
+ VideoFrameValidator::ValidationMode::kAverage,
+ /*tolerance=*/0.0);
+ LOG_ASSERT(ssim_validator);
+ ssim_validator_ = ssim_validator.get();
+ video_frame_processors.push_back(std::move(ssim_validator));
+ auto psnr_validator = PSNRVideoFrameValidator::Create(
+ get_model_frame_cb, /*corrupt_frame_processor=*/nullptr,
+ VideoFrameValidator::ValidationMode::kAverage,
+ /*tolerance=*/0.0);
+ LOG_ASSERT(psnr_validator);
+ psnr_validator_ = psnr_validator.get();
+ video_frame_processors.push_back(std::move(psnr_validator));
+
+ const gfx::Rect visible_rect(video->Resolution());
+ VideoDecoderConfig decoder_config(
+ VideoCodecProfileToVideoCodec(profile), profile,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
+ kNoTransformation, visible_rect.size(), visible_rect,
+ visible_rect.size(), EmptyExtraData(), EncryptionScheme::kUnencrypted);
+
+ auto bitstream_validator = BitstreamValidator::Create(
+ decoder_config, kNumFramesToEncodeForPerformance - 1,
+ std::move(video_frame_processors));
+ LOG_ASSERT(bitstream_validator);
+ std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors;
+ bitstream_processors.push_back(std::move(bitstream_validator));
+ return CreateVideoEncoderWithProcessors(video, profile, bitrate,
+ /*encoder_rate=*/0,
+ std::move(bitstream_processors));
+ }
+
+ std::unique_ptr<VideoEncoder> CreateVideoEncoderWithProcessors(
+ Video* video,
+ VideoCodecProfile profile,
+ uint32_t bitrate,
+ uint32_t encoder_rate,
+ std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors) {
+ LOG_ASSERT(video);
constexpr size_t kNumTemporalLayers = 1u;
VideoEncoderClientConfig config(video, profile, kNumTemporalLayers,
bitrate);
+ config.num_frames_to_encode = kNumFramesToEncodeForPerformance;
+ if (encoder_rate != 0)
+ config.encode_interval =
+ base::TimeDelta::FromSeconds(/*secs=*/1u) / encoder_rate;
+
auto video_encoder =
VideoEncoder::Create(config, g_env->GetGpuMemoryBufferFactory(),
std::move(bitstream_processors));
@@ -292,17 +483,28 @@ class VideoEncoderTest : public ::testing::Test {
return video_encoder;
}
+ scoped_refptr<const VideoFrame> GetModelFrame(size_t frame_index) {
+ LOG_ASSERT(raw_data_helper_);
+ return raw_data_helper_->GetFrame(frame_index %
+ g_env->Video()->NumFrames());
+ }
+
+ std::unique_ptr<RawDataHelper> raw_data_helper_;
+
PerformanceEvaluator* performance_evaluator_;
+ SSIMVideoFrameValidator* ssim_validator_;
+ PSNRVideoFrameValidator* psnr_validator_;
};
} // namespace
-// Encode video from start to end while measuring uncapped performance. This
-// test will encode a video as fast as possible, and gives an idea about the
-// maximum output of the encoder.
+// Encode |kNumFramesToEncodeForPerformance| frames while measuring uncapped
+// performance. This test will encode a video as fast as possible, and gives an
+// idea about the maximum output of the encoder.
TEST_F(VideoEncoderTest, MeasureUncappedPerformance) {
auto encoder =
CreateVideoEncoder(g_env->Video(), g_env->Profile(), g_env->Bitrate());
+ encoder->SetEventWaitTimeout(kPerfEventTimeout);
performance_evaluator_->StartMeasuring();
encoder->Encode();
@@ -314,9 +516,46 @@ TEST_F(VideoEncoderTest, MeasureUncappedPerformance) {
metrics.WriteToFile();
EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
- EXPECT_EQ(encoder->GetFrameReleasedCount(), g_env->Video()->NumFrames());
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), kNumFramesToEncodeForPerformance);
}
+// Encode |kNumFramesToEncodeForPerformance| frames while measuring uncapped
+// performance. This test will encode a video at a fixed ratio, 30fps.
+// This test can be used to measure the cpu metrics during encoding.
+TEST_F(VideoEncoderTest, MeasureCappedPerformance) {
+ const uint32_t kEncodeRate = 30;
+ auto encoder = CreateVideoEncoder(g_env->Video(), g_env->Profile(),
+ g_env->Bitrate(), kEncodeRate);
+ encoder->SetEventWaitTimeout(kPerfEventTimeout);
+
+ performance_evaluator_->StartMeasuring();
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ performance_evaluator_->StopMeasuring();
+
+ auto metrics = performance_evaluator_->Metrics();
+ metrics.WriteToConsole();
+ metrics.WriteToFile();
+
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), kNumFramesToEncodeForPerformance);
+}
+
+TEST_F(VideoEncoderTest, MeasureProducedBitstreamQuality) {
+ auto encoder = CreateVideoEncoderForQualityPerformance(
+ g_env->Video(), g_env->Profile(), g_env->Bitrate());
+ encoder->SetEventWaitTimeout(kPerfEventTimeout);
+
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), kNumFramesToEncodeForPerformance);
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+
+ BitstreamQualityMetrics metrics(*psnr_validator_, *ssim_validator_);
+ metrics.WriteToConsole();
+ metrics.WriteToFile();
+}
} // namespace test
} // namespace media
diff --git a/chromium/media/gpu/video_encode_accelerator_tests.cc b/chromium/media/gpu/video_encode_accelerator_tests.cc
index 4219a9b3685..3e396fb32d3 100644
--- a/chromium/media/gpu/video_encode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_encode_accelerator_tests.cc
@@ -122,11 +122,57 @@ class VideoEncoderTest : public ::testing::Test {
}
private:
+ std::unique_ptr<BitstreamProcessor> CreateBitstreamValidator(
+ const Video* video,
+ const VideoDecoderConfig& decoder_config,
+ const size_t last_frame_index,
+ VideoFrameValidator::GetModelFrameCB get_model_frame_cb,
+ base::Optional<size_t> num_vp9_temporal_layers_to_decode) {
+ std::vector<std::unique_ptr<VideoFrameProcessor>> video_frame_processors;
+
+ // Attach a video frame writer to store individual frames to disk if
+ // requested.
+ std::unique_ptr<VideoFrameProcessor> image_writer;
+ auto frame_output_config = g_env->ImageOutputConfig();
+ base::FilePath output_folder = base::FilePath(g_env->OutputFolder())
+ .Append(g_env->GetTestOutputFilePath());
+ if (frame_output_config.output_mode != FrameOutputMode::kNone) {
+ image_writer = VideoFrameFileWriter::Create(
+ output_folder, frame_output_config.output_format,
+ frame_output_config.output_limit,
+ num_vp9_temporal_layers_to_decode
+ ? base::NumberToString(*num_vp9_temporal_layers_to_decode)
+ : "");
+ LOG_ASSERT(image_writer);
+ if (frame_output_config.output_mode == FrameOutputMode::kAll)
+ video_frame_processors.push_back(std::move(image_writer));
+ }
+
+ // For a resolution less than 360p, we lower the tolerance. Some platforms
+ // couldn't compress a low resolution video efficiently with a low bitrate.
+ constexpr gfx::Size k360p(640, 360);
+ constexpr double kSSIMToleranceForLowerResolution = 0.65;
+ const gfx::Size encode_resolution = video->VisibleRect().size();
+ const double ssim_tolerance =
+ encode_resolution.GetArea() < k360p.GetArea()
+ ? kSSIMToleranceForLowerResolution
+ : SSIMVideoFrameValidator::kDefaultTolerance;
+
+ auto ssim_validator = SSIMVideoFrameValidator::Create(
+ get_model_frame_cb, std::move(image_writer),
+ VideoFrameValidator::ValidationMode::kAverage, ssim_tolerance);
+ LOG_ASSERT(ssim_validator);
+ video_frame_processors.push_back(std::move(ssim_validator));
+ return BitstreamValidator::Create(decoder_config, last_frame_index,
+ std::move(video_frame_processors),
+ num_vp9_temporal_layers_to_decode);
+ }
+
std::vector<std::unique_ptr<BitstreamProcessor>> CreateBitstreamProcessors(
Video* video,
const VideoEncoderClientConfig& config) {
std::vector<std::unique_ptr<BitstreamProcessor>> bitstream_processors;
- const gfx::Rect visible_rect(video->Resolution());
+ const gfx::Rect visible_rect(config.output_resolution);
const VideoCodec codec =
VideoCodecProfileToVideoCodec(config.output_profile);
if (g_env->SaveOutputBitstream()) {
@@ -137,11 +183,24 @@ class VideoEncoderTest : public ::testing::Test {
g_env->OutputFolder()
.Append(g_env->GetTestOutputFilePath())
.Append(video->FilePath().BaseName().ReplaceExtension(extension));
- auto bitstream_writer = BitstreamFileWriter::Create(
- output_bitstream_filepath, codec, visible_rect.size(),
- config.framerate, config.num_frames_to_encode);
- LOG_ASSERT(bitstream_writer);
- bitstream_processors.emplace_back(std::move(bitstream_writer));
+ if (config.num_temporal_layers > 1) {
+ for (size_t num_vp9_temporal_layers_to_write = 1;
+ num_vp9_temporal_layers_to_write <= config.num_temporal_layers;
+ ++num_vp9_temporal_layers_to_write) {
+ bitstream_processors.emplace_back(BitstreamFileWriter::Create(
+ output_bitstream_filepath.InsertBeforeExtensionASCII(
+ FILE_PATH_LITERAL(".TL") +
+ base::NumberToString(num_vp9_temporal_layers_to_write)),
+ codec, visible_rect.size(), config.framerate,
+ config.num_frames_to_encode, num_vp9_temporal_layers_to_write));
+ LOG_ASSERT(bitstream_processors.back());
+ }
+ } else {
+ bitstream_processors.emplace_back(BitstreamFileWriter::Create(
+ output_bitstream_filepath, codec, visible_rect.size(),
+ config.framerate, config.num_frames_to_encode));
+ LOG_ASSERT(bitstream_processors.back());
+ }
}
if (!g_env->IsBitstreamValidatorEnabled()) {
@@ -174,7 +233,6 @@ class VideoEncoderTest : public ::testing::Test {
codec, config.output_profile, VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, visible_rect.size(), visible_rect,
visible_rect.size(), EmptyExtraData(), EncryptionScheme::kUnencrypted);
- std::vector<std::unique_ptr<VideoFrameProcessor>> video_frame_processors;
raw_data_helper_ = RawDataHelper::Create(video);
if (!raw_data_helper_) {
LOG(ERROR) << "Failed to create raw data helper";
@@ -183,48 +241,55 @@ class VideoEncoderTest : public ::testing::Test {
VideoFrameValidator::GetModelFrameCB get_model_frame_cb =
base::BindRepeating(&VideoEncoderTest::GetModelFrame,
- base::Unretained(this));
-
- // Attach a video frame writer to store individual frames to disk if
- // requested.
- std::unique_ptr<VideoFrameProcessor> image_writer;
- auto frame_output_config = g_env->ImageOutputConfig();
- base::FilePath output_folder = base::FilePath(g_env->OutputFolder())
- .Append(g_env->GetTestOutputFilePath());
- if (frame_output_config.output_mode != FrameOutputMode::kNone) {
- image_writer = VideoFrameFileWriter::Create(
- output_folder, frame_output_config.output_format,
- frame_output_config.output_limit);
- LOG_ASSERT(image_writer);
- if (frame_output_config.output_mode == FrameOutputMode::kAll)
- video_frame_processors.push_back(std::move(image_writer));
+ base::Unretained(this), visible_rect);
+ if (config.num_temporal_layers > 1) {
+ for (size_t num_temporal_layers_to_decode = 1;
+ num_temporal_layers_to_decode <= config.num_temporal_layers;
+ ++num_temporal_layers_to_decode) {
+ bitstream_processors.emplace_back(CreateBitstreamValidator(
+ video, decoder_config, config.num_frames_to_encode - 1,
+ get_model_frame_cb, num_temporal_layers_to_decode));
+ LOG_ASSERT(bitstream_processors.back());
+ }
+ } else {
+ bitstream_processors.emplace_back(CreateBitstreamValidator(
+ video, decoder_config, config.num_frames_to_encode - 1,
+ get_model_frame_cb, base::nullopt));
+ LOG_ASSERT(bitstream_processors.back());
}
- auto ssim_validator = SSIMVideoFrameValidator::Create(
- get_model_frame_cb, std::move(image_writer),
- VideoFrameValidator::ValidationMode::kAverage);
- LOG_ASSERT(ssim_validator);
- video_frame_processors.push_back(std::move(ssim_validator));
- auto bitstream_validator = BitstreamValidator::Create(
- decoder_config, config.num_frames_to_encode - 1,
- std::move(video_frame_processors));
- LOG_ASSERT(bitstream_validator);
- bitstream_processors.emplace_back(std::move(bitstream_validator));
return bitstream_processors;
}
- scoped_refptr<const VideoFrame> GetModelFrame(size_t frame_index) {
+ scoped_refptr<const VideoFrame> GetModelFrame(const gfx::Rect& visible_rect,
+ size_t frame_index) {
LOG_ASSERT(raw_data_helper_);
- return raw_data_helper_->GetFrame(frame_index %
- g_env->Video()->NumFrames());
+ auto frame =
+ raw_data_helper_->GetFrame(frame_index % g_env->Video()->NumFrames());
+ if (!frame)
+ return nullptr;
+ if (visible_rect.size() == frame->visible_rect().size())
+ return frame;
+ return ScaleVideoFrame(frame.get(), visible_rect.size());
}
std::unique_ptr<RawDataHelper> raw_data_helper_;
};
-} // namespace
+base::Optional<std::string> SupportsDynamicFramerate() {
+ return g_env->IsKeplerUsed()
+ ? base::make_optional<std::string>(
+ "The rate controller in the kepler firmware doesn't handle "
+ "frame rate changes correctly.")
+ : base::nullopt;
+}
-// TODO(dstaessens): Add more test scenarios:
-// - Forcing key frames
+base::Optional<std::string> SupportsNV12DmaBufInput() {
+ return g_env->IsKeplerUsed() ? base::make_optional<std::string>(
+ "Encoding with dmabuf input frames is not "
+ "supported in kepler.")
+ : base::nullopt;
+}
+} // namespace
// Encode video from start to end. Wait for the kFlushDone event at the end of
// the stream, that notifies us all frames have been encoded.
@@ -260,6 +325,46 @@ TEST_F(VideoEncoderTest, DestroyBeforeInitialize) {
EXPECT_NE(video_encoder, nullptr);
}
+// Test forcing key frames while encoding a video.
+TEST_F(VideoEncoderTest, ForceKeyFrame) {
+ auto config = GetDefaultConfig();
+ const size_t middle_frame = config.num_frames_to_encode;
+ config.num_frames_to_encode *= 2;
+ auto encoder = CreateVideoEncoder(g_env->Video(), config);
+
+ // It is expected that our hw encoders don't produce key frames in a short
+ // time span like a few hundred frames.
+ // TODO(hiroh): This might be wrong on some platforms. Needs to update.
+ // Encode the first frame, this should always be a keyframe.
+ encoder->EncodeUntil(VideoEncoder::kBitstreamReady, 1u);
+ EXPECT_TRUE(encoder->WaitUntilIdle());
+ EXPECT_EQ(encoder->GetEventCount(VideoEncoder::kKeyFrame), 1u);
+ // Encode until the middle of stream and request force_keyframe.
+ encoder->EncodeUntil(VideoEncoder::kFrameReleased, middle_frame);
+ EXPECT_TRUE(encoder->WaitUntilIdle());
+ // Check if there is no keyframe except the first frame.
+ EXPECT_EQ(encoder->GetEventCount(VideoEncoder::kKeyFrame), 1u);
+ encoder->ForceKeyFrame();
+ // Since kFrameReleased and kBitstreamReady events are asynchronous, the
+ // number of bitstreams being processed is unknown. We check keyframe request
+ // is applied by seeing if there is a keyframe in a few frames after
+ // requested. 10 is arbitrarily chosen.
+ constexpr size_t kKeyFrameRequestWindow = 10u;
+ encoder->EncodeUntil(VideoEncoder::kBitstreamReady,
+ std::min(middle_frame + kKeyFrameRequestWindow,
+ config.num_frames_to_encode));
+ EXPECT_TRUE(encoder->WaitUntilIdle());
+ EXPECT_EQ(encoder->GetEventCount(VideoEncoder::kKeyFrame), 2u);
+
+ // Encode until the end of stream.
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ EXPECT_EQ(encoder->GetEventCount(VideoEncoder::kKeyFrame), 2u);
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), config.num_frames_to_encode);
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+}
+
// Encode video from start to end. Multiple buffer encodes will be queued in the
// encoder, without waiting for the result of the previous encode requests.
TEST_F(VideoEncoderTest, FlushAtEndOfStream_MultipleOutstandingEncodes) {
@@ -277,8 +382,11 @@ TEST_F(VideoEncoderTest, FlushAtEndOfStream_MultipleOutstandingEncodes) {
// Encode multiple videos simultaneously from start to finish.
TEST_F(VideoEncoderTest, FlushAtEndOfStream_MultipleConcurrentEncodes) {
- // The minimal number of concurrent encoders we expect to be supported.
- constexpr size_t kMinSupportedConcurrentEncoders = 3;
+ // Run two encoders for larger resolutions to avoid creating shared memory
+ // buffers during the test on lower end devices.
+ constexpr gfx::Size k1080p(1920, 1080);
+ const size_t kMinSupportedConcurrentEncoders =
+ g_env->Video()->Resolution().GetArea() >= k1080p.GetArea() ? 2 : 3;
auto config = GetDefaultConfig();
std::vector<std::unique_ptr<VideoEncoder>> encoders(
@@ -328,8 +436,7 @@ TEST_F(VideoEncoderTest, BitrateCheck_DynamicBitrate) {
const uint32_t first_bitrate = config.bitrate;
encoder->EncodeUntil(VideoEncoder::kFrameReleased,
kNumFramesToEncodeForBitrateCheck);
- encoder->WaitForEvent(VideoEncoder::kFrameReleased,
- kNumFramesToEncodeForBitrateCheck);
+ EXPECT_TRUE(encoder->WaitUntilIdle());
EXPECT_NEAR(encoder->GetStats().Bitrate(), first_bitrate,
kBitrateTolerance * first_bitrate);
@@ -348,6 +455,8 @@ TEST_F(VideoEncoderTest, BitrateCheck_DynamicBitrate) {
}
TEST_F(VideoEncoderTest, BitrateCheck_DynamicFramerate) {
+ if (auto skip_reason = SupportsDynamicFramerate())
+ GTEST_SKIP() << *skip_reason;
auto config = GetDefaultConfig();
config.num_frames_to_encode = kNumFramesToEncodeForBitrateCheck * 2;
auto encoder = CreateVideoEncoder(g_env->Video(), config);
@@ -360,8 +469,7 @@ TEST_F(VideoEncoderTest, BitrateCheck_DynamicFramerate) {
encoder->EncodeUntil(VideoEncoder::kFrameReleased,
kNumFramesToEncodeForBitrateCheck);
- encoder->WaitForEvent(VideoEncoder::kFrameReleased,
- kNumFramesToEncodeForBitrateCheck);
+ EXPECT_TRUE(encoder->WaitUntilIdle());
EXPECT_NEAR(encoder->GetStats().Bitrate(), config.bitrate,
kBitrateTolerance * config.bitrate);
@@ -380,22 +488,141 @@ TEST_F(VideoEncoderTest, BitrateCheck_DynamicFramerate) {
}
TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12Dmabuf) {
- auto nv12_video = g_env->Video()->ConvertToNV12();
- ASSERT_TRUE(nv12_video);
+ if (auto skip_reason = SupportsNV12DmaBufInput())
+ GTEST_SKIP() << *skip_reason;
- VideoEncoderClientConfig config(nv12_video.get(), g_env->Profile(),
+ Video* nv12_video = g_env->GenerateNV12Video();
+ VideoEncoderClientConfig config(nv12_video, g_env->Profile(),
g_env->NumTemporalLayers(), g_env->Bitrate());
config.input_storage_type =
- VideoEncodeAccelerator::Config::StorageType::kDmabuf;
+ VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
+
+ auto encoder = CreateVideoEncoder(nv12_video, config);
+
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), nv12_video->NumFrames());
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+}
+
+// Downscaling is required in VideoEncodeAccelerator when zero-copy video
+// capture is enabled. One example is simulcast, camera produces 360p VideoFrame
+// and there are two VideoEncodeAccelerator for 360p and 180p. VideoEncoder for
+// 180p is fed 360p and thus has to perform the scaling from 360p to 180p.
+TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12DmabufScaling) {
+ if (auto skip_reason = SupportsNV12DmaBufInput())
+ GTEST_SKIP() << *skip_reason;
+ constexpr gfx::Size kMinOutputResolution(240, 180);
+ const gfx::Size output_resolution =
+ gfx::Size(g_env->Video()->Resolution().width() / 2,
+ g_env->Video()->Resolution().height() / 2);
+ if (!gfx::Rect(output_resolution).Contains(gfx::Rect(kMinOutputResolution))) {
+ GTEST_SKIP() << "Skip test if video resolution is too small, "
+ << "output_resolution=" << output_resolution.ToString()
+ << ", minimum output resolution="
+ << kMinOutputResolution.ToString();
+ }
- auto encoder = CreateVideoEncoder(nv12_video.get(), config);
+ auto* nv12_video = g_env->GenerateNV12Video();
+ // Set 1/4 of the original bitrate because the area of |output_resolution| is
+ // 1/4 of the original resolution.
+ VideoEncoderClientConfig config(nv12_video, g_env->Profile(),
+ g_env->NumTemporalLayers(),
+ g_env->Bitrate() / 4);
+ config.output_resolution = output_resolution;
+ config.input_storage_type =
+ VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
+ auto encoder = CreateVideoEncoder(nv12_video, config);
encoder->Encode();
EXPECT_TRUE(encoder->WaitForFlushDone());
EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
EXPECT_EQ(encoder->GetFrameReleasedCount(), nv12_video->NumFrames());
EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
}
+
+// Encode VideoFrames with cropping the rectangle (0, 60, size).
+// Cropping is required in VideoEncodeAccelerator when zero-copy video
+// capture is enabled. One example is when 640x360 capture recording is
+// requested, a camera cannot produce the resolution and instead produces
+// 640x480 frames with visible_rect=0, 60, 640x360.
+TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12DmabufCroppingTopAndBottom) {
+ if (auto skip_reason = SupportsNV12DmaBufInput())
+ GTEST_SKIP() << *skip_reason;
+ constexpr int kGrowHeight = 120;
+ const gfx::Size original_resolution = g_env->Video()->Resolution();
+ const gfx::Rect expanded_visible_rect(0, kGrowHeight / 2,
+ original_resolution.width(),
+ original_resolution.height());
+ const gfx::Size expanded_resolution(
+ original_resolution.width(), original_resolution.height() + kGrowHeight);
+ constexpr gfx::Size kMaxExpandedResolution(1920, 1080);
+ if (!gfx::Rect(kMaxExpandedResolution)
+ .Contains(gfx::Rect(expanded_resolution))) {
+ GTEST_SKIP() << "Expanded video resolution is too large, "
+ << "expanded_resolution=" << expanded_resolution.ToString()
+ << ", maximum expanded resolution="
+ << kMaxExpandedResolution.ToString();
+ }
+
+ auto nv12_expanded_video = g_env->GenerateNV12Video()->Expand(
+ expanded_resolution, expanded_visible_rect);
+ ASSERT_TRUE(nv12_expanded_video);
+ VideoEncoderClientConfig config(nv12_expanded_video.get(), g_env->Profile(),
+ g_env->NumTemporalLayers(), g_env->Bitrate());
+ config.output_resolution = original_resolution;
+ config.input_storage_type =
+ VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
+
+ auto encoder = CreateVideoEncoder(nv12_expanded_video.get(), config);
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), nv12_expanded_video->NumFrames());
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+}
+
+// Encode VideoFrames with cropping the rectangle (60, 0, size).
+// Cropping is required in VideoEncodeAccelerator when zero-copy video
+// capture is enabled. One example is when 640x360 capture recording is
+// requested, a camera cannot produce the resolution and instead produces
+// 760x360 frames with visible_rect=60, 0, 640x360.
+TEST_F(VideoEncoderTest, FlushAtEndOfStream_NV12DmabufCroppingRightAndLeft) {
+ if (auto skip_reason = SupportsNV12DmaBufInput())
+ GTEST_SKIP() << *skip_reason;
+ constexpr int kGrowWidth = 120;
+ const gfx::Size original_resolution = g_env->Video()->Resolution();
+ const gfx::Rect expanded_visible_rect(kGrowWidth / 2, 0,
+ original_resolution.width(),
+ original_resolution.height());
+ const gfx::Size expanded_resolution(original_resolution.width() + kGrowWidth,
+ original_resolution.height());
+ constexpr gfx::Size kMaxExpandedResolution(1920, 1080);
+ if (!gfx::Rect(kMaxExpandedResolution)
+ .Contains(gfx::Rect(expanded_resolution))) {
+ GTEST_SKIP() << "Expanded video resolution is too large, "
+ << "expanded_resolution=" << expanded_resolution.ToString()
+ << ", maximum expanded resolution="
+ << kMaxExpandedResolution.ToString();
+ }
+
+ auto nv12_expanded_video = g_env->GenerateNV12Video()->Expand(
+ expanded_resolution, expanded_visible_rect);
+ ASSERT_TRUE(nv12_expanded_video);
+ VideoEncoderClientConfig config(nv12_expanded_video.get(), g_env->Profile(),
+ g_env->NumTemporalLayers(), g_env->Bitrate());
+ config.output_resolution = original_resolution;
+ config.input_storage_type =
+ VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
+
+ auto encoder = CreateVideoEncoder(nv12_expanded_video.get(), config);
+ encoder->Encode();
+ EXPECT_TRUE(encoder->WaitForFlushDone());
+ EXPECT_EQ(encoder->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(encoder->GetFrameReleasedCount(), nv12_expanded_video->NumFrames());
+ EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
+}
} // namespace test
} // namespace media
diff --git a/chromium/media/gpu/video_encode_accelerator_unittest.cc b/chromium/media/gpu/video_encode_accelerator_unittest.cc
deleted file mode 100644
index 177eb122c83..00000000000
--- a/chromium/media/gpu/video_encode_accelerator_unittest.cc
+++ /dev/null
@@ -1,3311 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <inttypes.h>
-#include <stddef.h>
-#include <stdint.h>
-
-#include <algorithm>
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "base/at_exit.h"
-#include "base/bind.h"
-#include "base/bits.h"
-#include "base/callback_helpers.h"
-#include "base/cancelable_callback.h"
-#include "base/command_line.h"
-#include "base/containers/queue.h"
-#include "base/files/file_util.h"
-#include "base/macros.h"
-#include "base/memory/aligned_memory.h"
-#include "base/memory/ref_counted.h"
-#include "base/memory/unsafe_shared_memory_region.h"
-#include "base/memory/weak_ptr.h"
-#include "base/message_loop/message_pump_type.h"
-#include "base/numerics/safe_conversions.h"
-#include "base/process/process_handle.h"
-#include "base/single_thread_task_runner.h"
-#include "base/strings/pattern.h"
-#include "base/strings/string_number_conversions.h"
-#include "base/strings/string_split.h"
-#include "base/strings/stringprintf.h"
-#include "base/strings/utf_string_conversions.h"
-#include "base/system/sys_info.h"
-#include "base/test/launcher/unit_test_launcher.h"
-#include "base/test/scoped_feature_list.h"
-#include "base/test/task_environment.h"
-#include "base/test/test_suite.h"
-#include "base/threading/thread.h"
-#include "base/threading/thread_checker.h"
-#include "base/threading/thread_task_runner_handle.h"
-#include "base/time/time.h"
-#include "base/timer/timer.h"
-#include "build/build_config.h"
-#include "build/chromeos_buildflags.h"
-#include "gpu/config/gpu_driver_bug_workarounds.h"
-#include "gpu/config/gpu_preferences.h"
-#include "gpu/ipc/service/gpu_memory_buffer_factory.h"
-#include "media/base/bind_to_current_loop.h"
-#include "media/base/bitstream_buffer.h"
-#include "media/base/cdm_context.h"
-#include "media/base/color_plane_layout.h"
-#include "media/base/decoder_buffer.h"
-#include "media/base/media.h"
-#include "media/base/media_switches.h"
-#include "media/base/media_util.h"
-#include "media/base/test_data_util.h"
-#include "media/base/video_decoder.h"
-#include "media/base/video_frame.h"
-#include "media/ffmpeg/ffmpeg_common.h"
-#include "media/filters/ffmpeg_video_decoder.h"
-#include "media/filters/in_memory_url_protocol.h"
-#include "media/filters/ivf_parser.h"
-#include "media/filters/vp9_parser.h"
-#include "media/filters/vpx_video_decoder.h"
-#include "media/gpu/buildflags.h"
-#include "media/gpu/gpu_video_encode_accelerator_factory.h"
-#include "media/gpu/h264_decoder.h"
-#include "media/gpu/h264_dpb.h"
-#include "media/gpu/macros.h"
-#include "media/gpu/test/video_frame_helpers.h"
-#include "media/gpu/test/video_test_helpers.h"
-#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
-#include "media/parsers/vp8_parser.h"
-#include "media/video/fake_video_encode_accelerator.h"
-#include "media/video/h264_level_limits.h"
-#include "media/video/h264_parser.h"
-#include "media/video/video_encode_accelerator.h"
-#include "mojo/core/embedder/embedder.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "third_party/libyuv/include/libyuv/planar_functions.h"
-#include "ui/gfx/geometry/size.h"
-
-#if BUILDFLAG(USE_VAAPI)
-#include "media/gpu/vaapi/vaapi_wrapper.h"
-#elif defined(OS_WIN)
-#include "media/gpu/windows/media_foundation_video_encode_accelerator_win.h"
-#endif
-
-#if defined(USE_OZONE)
-#include "ui/ozone/public/ozone_gpu_test_helper.h"
-#include "ui/ozone/public/ozone_platform.h"
-#include "ui/ozone/public/surface_factory_ozone.h"
-#endif
-
-namespace media {
-namespace {
-
-// The absolute differences between original frame and decoded frame usually
-// ranges aroud 1 ~ 7. So we pick 10 as an extreme value to detect abnormal
-// decoded frames.
-const double kDecodeSimilarityThreshold = 10.0;
-
-// Arbitrarily chosen to add some depth to the pipeline.
-const unsigned int kNumOutputBuffers = 4;
-const unsigned int kNumExtraInputFrames = 4;
-// Maximum delay between requesting a keyframe and receiving one, in frames.
-// Arbitrarily chosen as a reasonable requirement.
-const unsigned int kMaxKeyframeDelay = 4;
-// Default initial bitrate.
-const uint32_t kDefaultBitrate = 2000000;
-// Default ratio of requested_subsequent_bitrate to initial_bitrate
-// (see test parameters below) if one is not provided.
-const double kDefaultSubsequentBitrateRatio = 2.0;
-// Default initial framerate.
-const uint32_t kDefaultFramerate = 30;
-// Default ratio of requested_subsequent_framerate to initial_framerate
-// (see test parameters below) if one is not provided.
-const double kDefaultSubsequentFramerateRatio = 0.1;
-// Tolerance factor for how encoded bitrate can differ from requested bitrate.
-const double kBitrateTolerance = 0.1;
-// Minimum (arbitrary) number of frames required to enforce bitrate requirements
-// over. Streams shorter than this may be too short to realistically require
-// an encoder to be able to converge to the requested bitrate over.
-// The input stream will be looped as many times as needed in bitrate tests
-// to reach at least this number of frames before calculating final bitrate.
-const unsigned int kMinFramesForBitrateTests = 300;
-// The percentiles to measure for encode latency.
-const unsigned int kLoggedLatencyPercentiles[] = {50, 75, 95};
-// Timeout between each BitstreamBufferReady() call and flush callback.
-// In the multiple encoder test case, the FPS might be lower than expected.
-// To rule out a flakiness on low-end devices, the timeout is set to 10 sec,
-// https://crbug.com/1019307.
-const unsigned int kBitstreamBufferReadyTimeoutMs =
- 10 * base::Time::kMillisecondsPerSecond;
-// How much to scale down the input stream by for the scaling test.
-constexpr unsigned int kScalingDenominator = 2u;
-// The smallest test stream visible size for which the scaling test will run. If
-// any of the test streams has a size below this, the scaling test will be
-// skipped. This is used to ensure that all boards are able to pass the scaling
-// test successfully. For example, if |kScalingDenominator| is 2 and
-// |kMinVisibleSizeForScalingTest| is 640x360, it's expected that all boards can
-// encode a stream that is 320x180.
-constexpr gfx::Size kMinVisibleSizeForScalingTest(640, 360);
-
-// The syntax of multiple test streams is:
-// test-stream1;test-stream2;test-stream3
-// The syntax of each test stream is:
-// "in_filename:width:height:profile:out_filename:requested_bitrate
-// :requested_framerate:requested_subsequent_bitrate
-// :requested_subsequent_framerate:pixel_format:requested_level"
-// Instead of ":", "," can be used as a seperator as well. Note that ":" does
-// not work on Windows as it interferes with file paths.
-// - |in_filename| is YUV raw stream. Its format must be |pixel_format|
-// (see http://www.fourcc.org/yuv.php#IYUV).
-// - |width| and |height| are in pixels.
-// - |profile| to encode into (values of VideoCodecProfile).
-// - |out_filename| filename to save the encoded stream to (optional). The
-// format for H264 is Annex-B byte stream. The format for VP8 and VP9 is IVF.
-// Output stream is saved for the simple encode test only. H264 raw stream and
-// IVF can be used as input of VDA unittest. H264 raw stream can be played by
-// "mplayer -fps 25 out.h264" and IVF can be played by mplayer directly.
-// Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
-// Further parameters are optional (need to provide preceding positional
-// parameters if a specific subsequent parameter is required):
-// - |requested_bitrate| requested bitrate in bits per second, use
-// kDefaultBitrate if not provided.
-// Bitrate is only forced for tests that test bitrate.
-// - |requested_framerate| requested initial framerate, use kDefaultFramerate
-// if not provided.
-// - |requested_subsequent_bitrate| bitrate to switch to in the middle of the
-// stream.
-// - |requested_subsequent_framerate| framerate to switch to in the middle
-// of the stream.
-// - |pixel_format| is the VideoPixelFormat of |in_filename|. Users needs to
-// set the value corresponding to the desired format. If it is not specified,
-// this would be PIXEL_FORMAT_I420.
-// - |requested_level| requested output level. Currently only for H264 codec and
-// the value should be assigned as H264LevelIDC enum in
-// h264_parser.h. Use kDefaultH264Level if not provided.
-
-#if defined(OS_CHROMEOS) || defined(OS_LINUX)
-const char kDefaultInputFileName[] = "bear_320x192_40frames.yuv.webm";
-const base::FilePath::CharType kDefaultInputParameters[] =
- FILE_PATH_LITERAL(":320:192:1:out.h264:200000");
-#elif defined(OS_MAC)
-// VideoToolbox falls back to SW encoder with resolutions lower than this.
-const char kDefaultInputFileName[] = "bear_640x384_40frames.yuv.webm";
-const base::FilePath::CharType kDefaultInputParameters[] =
- FILE_PATH_LITERAL(":640:384:1:out.h264:200000");
-#elif defined(OS_WIN)
-const char kDefaultInputFileName[] = "bear_320x192_40frames.yuv.webm";
-const base::FilePath::CharType kDefaultInputParameters[] =
- FILE_PATH_LITERAL(",320,192,0,out.h264,200000");
-#endif // defined(OS_CHROMEOS) || defined(OS_LINUX)
-
-// Default params that can be overriden via command line.
-std::unique_ptr<base::FilePath::StringType> g_test_stream_data(
- new base::FilePath::StringType(
- media::GetTestDataFilePath(media::kDefaultInputFileName).value() +
- media::kDefaultInputParameters));
-
-base::FilePath g_log_path;
-
-base::FilePath g_frame_stats_path;
-
-bool g_run_at_fps = false;
-
-bool g_needs_encode_latency = false;
-
-bool g_verify_all_output = false;
-
-bool g_fake_encoder = false;
-
-// Enable/Disable ForceLevel test. Since currently not all devices support level
-// configuration, test could be disabled by the command line
-// "--force_level=false" (or set "true" to enable).
-// TODO(johnylin): enable ForceLevel after supporting dynamically query drivers
-// level capability. https://crbug.com/878674.
-bool g_force_level = false;
-
-// This identifies the storage type of inputting VideoFrame on Encode().
-// If |native_input| is true, inputting VideoFrame on Encode() is DmaBuf-backed
-// VideoFrame. Otherwise, it is MEM-backed VideoFrame.
-bool g_native_input = false;
-
-// Environment to store test stream data for all test cases.
-class VideoEncodeAcceleratorTestEnvironment;
-VideoEncodeAcceleratorTestEnvironment* g_env;
-
-std::unique_ptr<base::test::ScopedFeatureList> CreateScopedFeatureList() {
-#if BUILDFLAG(USE_VAAPI)
- auto scoped_feature_list = std::make_unique<base::test::ScopedFeatureList>();
- std::vector<base::Feature> enabled_features = {
- // TODO(crbug.com/828482): remove once enabled by default.
- media::kVaapiLowPowerEncoderGen9x,
- // TODO(crbug.com/811912): remove once enabled by default.
- media::kVaapiVP9Encoder};
- scoped_feature_list->InitWithFeatures(enabled_features, {});
- return scoped_feature_list;
-#else
- return nullptr;
-#endif // BUILDFLAG(USE_VAAPI)
-}
-
-// The number of frames to be encoded. This variable is set by the switch
-// "--num_frames_to_encode". Ignored if 0.
-int g_num_frames_to_encode = 0;
-
-struct TestStream {
- TestStream()
- : num_frames(0),
- aligned_buffer_size(0),
- requested_bitrate(kDefaultBitrate),
- requested_framerate(kDefaultFramerate),
- requested_subsequent_bitrate(0),
- requested_subsequent_framerate(0) {}
- ~TestStream() {}
-
- VideoPixelFormat pixel_format;
- gfx::Size visible_size;
- gfx::Size coded_size;
- unsigned int num_frames;
-
- // Original unaligned YUV input file name provided as an argument to the test.
- std::string in_filename;
-
- // A vector used to prepare aligned input buffers of |in_filename|. This
- // makes sure starting addresses of YUV planes are aligned to
- // kPlatformBufferAlignment bytes.
- std::vector<char,
- test::AlignedAllocator<char, test::kPlatformBufferAlignment>>
- aligned_in_file_data;
-
- // Byte size of a frame of |aligned_in_file_data|.
- size_t aligned_buffer_size;
-
- // Byte size for each aligned plane of a frame.
- std::vector<size_t> aligned_plane_size;
-
- std::string out_filename;
- VideoCodecProfile requested_profile;
- unsigned int requested_bitrate;
- unsigned int requested_framerate;
- unsigned int requested_subsequent_bitrate;
- unsigned int requested_subsequent_framerate;
- base::Optional<uint8_t> requested_level;
-};
-
-// Return the |percentile| from a sorted vector.
-static base::TimeDelta Percentile(
- const std::vector<base::TimeDelta>& sorted_values,
- unsigned int percentile) {
- size_t size = sorted_values.size();
- LOG_ASSERT(size > 0UL);
- LOG_ASSERT(percentile <= 100UL);
- // Use Nearest Rank method in http://en.wikipedia.org/wiki/Percentile.
- int index =
- std::max(static_cast<int>(ceil(0.01f * percentile * size)) - 1, 0);
- return sorted_values[index];
-}
-
-static bool IsH264(VideoCodecProfile profile) {
- return profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX;
-}
-
-static bool IsVP8(VideoCodecProfile profile) {
- return profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX;
-}
-
-static bool IsVP9(VideoCodecProfile profile) {
- return profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX;
-}
-
-#if BUILDFLAG(IS_ASH)
-// Determine the test is known-to-fail and should be skipped.
-bool ShouldSkipTest(VideoPixelFormat format) {
- struct Pattern {
- const char* board_pattern;
- const char* suite_name_prefix;
- VideoPixelFormat format; // Set PIXEL_FORMAT_UNKNOWN for any format.
- };
-
- // Warning: The list should be only used as a last resort for known vendor
- // issues that will never be fixed.
- constexpr Pattern kSkipTestPatterns[] = {
- // crbug.com/769722: MTK driver doesn't compute bitrate correctly.
- // Disable mid_stream_bitrate_switch test cases for elm/hana.
- {"elm", "MidStreamParamSwitchBitrate", PIXEL_FORMAT_UNKNOWN},
- {"elm", "MultipleEncoders", PIXEL_FORMAT_UNKNOWN},
- {"elm-kernelnext", "MidStreamParamSwitchBitrate", PIXEL_FORMAT_UNKNOWN},
- {"elm-kernelnext", "MultipleEncoders", PIXEL_FORMAT_UNKNOWN},
- {"hana", "MidStreamParamSwitchBitrate", PIXEL_FORMAT_UNKNOWN},
- {"hana", "MultipleEncoders", PIXEL_FORMAT_UNKNOWN},
- {"hana-kernelnext", "MidStreamParamSwitchBitrate", PIXEL_FORMAT_UNKNOWN},
- {"hana-kernelnext", "MultipleEncoders", PIXEL_FORMAT_UNKNOWN},
-
- // crbug.com/965348#c6: Tegra driver calculates the wrong plane size of
- // NV12. Disable all tests on nyan family for NV12 test.
- // TODO(akahuang): Remove this after nyan family are EOL.
- {"nyan_*", "", PIXEL_FORMAT_NV12},
- };
-
- const std::string board = base::SysInfo::GetLsbReleaseBoard();
- if (board == "unknown") {
- LOG(WARNING) << "Cannot get CrOS board name. Do you run at CrOS device?";
- return false;
- }
-
- const std::string suite_name = ::testing::UnitTest::GetInstance()
- ->current_test_info()
- ->test_suite_name();
- for (const auto& pattern : kSkipTestPatterns) {
- if (suite_name.find(pattern.suite_name_prefix) == 0 &&
- base::MatchPattern(board, pattern.board_pattern) &&
- (pattern.format == PIXEL_FORMAT_UNKNOWN || pattern.format == format)) {
- return true;
- }
- }
-
- return false;
-}
-#endif // BUILDFLAG(IS_ASH)
-
-// Helper functions to do string conversions.
-static base::FilePath::StringType StringToFilePathStringType(
- const std::string& str) {
-#if defined(OS_WIN)
- return base::UTF8ToWide(str);
-#else
- return str;
-#endif // defined(OS_WIN)
-}
-
-static std::string FilePathStringTypeToString(
- const base::FilePath::StringType& str) {
-#if defined(OS_WIN)
- return base::WideToUTF8(str);
-#else
- return str;
-#endif // defined(OS_WIN)
-}
-
-// Decodes webm vp9 |src_file| into |test_stream_->aligned_in_file_data|. Used
-// to save storage size in media/test/data since raw YUV files are huge.
-static bool DecodeFile(const base::FilePath& src_file,
- TestStream* test_stream) {
- InitializeMediaLibrary();
-
- const int file_size = base::checked_cast<int>([src_file]() {
- int64_t tmp = 0;
- CHECK(base::GetFileSize(src_file, &tmp))
- << "Failed to get file size for '" << src_file << "'";
- return tmp;
- }());
-
- // Read file data into memory.
- auto buffer = base::MakeRefCounted<DecoderBuffer>(file_size);
- auto* data = reinterpret_cast<char*>(buffer->writable_data());
- CHECK_EQ(file_size, base::ReadFile(src_file, data, file_size))
- << "Failed to read '" << src_file << "'";
-
- // Initialize ffmpeg with the file data.
- InMemoryUrlProtocol protocol(buffer->data(), buffer->data_size(), false);
- FFmpegGlue glue(&protocol);
- CHECK(glue.OpenContext());
-
- // Find first vp9 stream in the file.
- int stream_index = -1;
- VideoDecoderConfig config;
- for (size_t i = 0; i < glue.format_context()->nb_streams; ++i) {
- AVStream* stream = glue.format_context()->streams[i];
- const AVCodecParameters* codec_parameters = stream->codecpar;
- const AVMediaType codec_type = codec_parameters->codec_type;
- const AVCodecID codec_id = codec_parameters->codec_id;
- if (codec_type == AVMEDIA_TYPE_VIDEO && codec_id == AV_CODEC_ID_VP9) {
- CHECK(AVStreamToVideoDecoderConfig(stream, &config));
- stream_index = i;
- break;
- }
- }
-
- CHECK(config.IsValidConfig());
-
- test_stream->num_frames = 0;
- test_stream->aligned_in_file_data.clear();
-
- // Writes VideoFrames into the |test_stream_->aligned_in_file_data| structure.
- class FrameWriter {
- public:
- explicit FrameWriter(TestStream* test_stream) : test_stream_(test_stream) {}
- ~FrameWriter() = default;
-
- void FrameReady(scoped_refptr<VideoFrame> frame) {
- const size_t previous_end = test_stream_->aligned_in_file_data.size();
-
- ++test_stream_->num_frames;
- test_stream_->aligned_in_file_data.resize(
- test_stream_->num_frames * test_stream_->aligned_buffer_size);
- uint8_t* dest = reinterpret_cast<uint8_t*>(
- &test_stream_->aligned_in_file_data[previous_end]);
-
- for (size_t plane = 0;
- plane < VideoFrame::NumPlanes(test_stream_->pixel_format); plane++) {
- libyuv::CopyPlane(
- frame->data(plane), frame->stride(plane), dest,
- VideoFrame::RowBytes(plane, test_stream_->pixel_format,
- test_stream_->coded_size.width()),
- VideoFrame::RowBytes(plane, test_stream_->pixel_format,
- test_stream_->visible_size.width()),
- VideoFrame::Rows(plane, test_stream_->pixel_format,
- test_stream_->visible_size.height()));
- dest += test_stream_->aligned_plane_size[plane];
- }
- }
-
- private:
- TestStream* const test_stream_;
- DISALLOW_COPY_AND_ASSIGN(FrameWriter);
- } frame_writer(test_stream);
-
- // Setup decoder.
- VpxVideoDecoder decoder;
- decoder.Initialize(config, false, nullptr, base::DoNothing(),
- base::BindRepeating(&FrameWriter::FrameReady,
- base::Unretained(&frame_writer)),
- base::NullCallback());
-
- // Decode frames. No need to flush since VpxVideoDecoder is 1 in 1 out.
- AVPacket packet = {};
- while (av_read_frame(glue.format_context(), &packet) >= 0) {
- if (packet.stream_index == stream_index) {
- decoder.Decode(DecoderBuffer::CopyFrom(packet.data, packet.size),
- base::DoNothing());
- base::RunLoop().RunUntilIdle();
- }
- av_packet_unref(&packet);
- }
-
- return true;
-}
-
-// Some platforms may have requirements on physical memory buffer alignment.
-// Since we are just mapping and passing chunks of the input file directly to
-// the VEA as input frames, to avoid copying large chunks of raw data on each
-// frame, and thus affecting performance measurements, we have to prepare a
-// temporary file with all planes aligned to the required alignment beforehand.
-static void CreateAlignedInputStreamFile(const gfx::Size& coded_size,
- TestStream* test_stream) {
- // Test case may have many encoders and memory should be prepared once.
- if (test_stream->coded_size == coded_size &&
- !test_stream->aligned_in_file_data.empty())
- return;
-
- // All encoders in multiple encoder test reuse the same test_stream, make
- // sure they requested the same coded_size
- ASSERT_TRUE(test_stream->aligned_in_file_data.empty() ||
- coded_size == test_stream->coded_size);
- test_stream->coded_size = coded_size;
-
- ASSERT_NE(test_stream->pixel_format, PIXEL_FORMAT_UNKNOWN);
- const VideoPixelFormat pixel_format = test_stream->pixel_format;
- size_t num_planes = VideoFrame::NumPlanes(pixel_format);
- std::vector<size_t> coded_bpl(num_planes);
- std::vector<size_t> visible_bpl(num_planes);
- std::vector<size_t> visible_plane_rows(num_planes);
-
- // Calculate padding in bytes to be added after each plane required to keep
- // starting addresses of all planes at a byte boundary required by the
- // platform. This padding will be added after each plane when copying to the
- // temporary file.
- // At the same time we also need to take into account coded_size requested by
- // the VEA; each row of visible_bpl bytes in the original file needs to be
- // copied into a row of coded_bpl bytes in the aligned file.
- for (size_t i = 0; i < num_planes; i++) {
- coded_bpl[i] = VideoFrame::RowBytes(i, pixel_format, coded_size.width());
- visible_bpl[i] = VideoFrame::RowBytes(i, pixel_format,
- test_stream->visible_size.width());
- visible_plane_rows[i] =
- VideoFrame::Rows(i, pixel_format, test_stream->visible_size.height());
- size_t coded_area_size =
- coded_bpl[i] * VideoFrame::Rows(i, pixel_format, coded_size.height());
- const size_t aligned_size =
- test::AlignToPlatformRequirements(coded_area_size);
- test_stream->aligned_plane_size.push_back(aligned_size);
- test_stream->aligned_buffer_size += aligned_size;
- }
-
- base::FilePath src_file(StringToFilePathStringType(test_stream->in_filename));
-
- // File is encoded and must be decoded first.
- if (src_file.MatchesExtension(FILE_PATH_LITERAL(".webm"))) {
- ASSERT_TRUE(DecodeFile(src_file, test_stream));
- return;
- }
-
- int64_t src_file_size = 0;
- LOG_ASSERT(base::GetFileSize(src_file, &src_file_size));
-
- // NOTE: VideoFrame::AllocationSize() cannot used here because the width and
- // height on each plane is aligned by 2 for YUV format.
- size_t frame_buffer_size = 0;
- for (size_t i = 0; i < num_planes; ++i) {
- size_t row_bytes = VideoFrame::RowBytes(i, pixel_format,
- test_stream->visible_size.width());
- size_t rows =
- VideoFrame::Rows(i, pixel_format, test_stream->visible_size.height());
- frame_buffer_size += rows * row_bytes;
- }
-
- LOG_ASSERT(src_file_size % frame_buffer_size == 0U)
- << "Stream byte size is not a product of calculated frame byte size";
-
- test_stream->num_frames =
- static_cast<unsigned int>(src_file_size / frame_buffer_size);
-
- LOG_ASSERT(test_stream->aligned_buffer_size > 0UL);
- test_stream->aligned_in_file_data.resize(test_stream->aligned_buffer_size *
- test_stream->num_frames);
-
- base::File src(src_file, base::File::FLAG_OPEN | base::File::FLAG_READ);
- std::vector<char> src_data(frame_buffer_size);
- off_t src_offset = 0, dest_offset = 0;
- for (size_t frame = 0; frame < test_stream->num_frames; frame++) {
- LOG_ASSERT(src.Read(src_offset, &src_data[0],
- static_cast<int>(frame_buffer_size)) ==
- static_cast<int>(frame_buffer_size));
- const char* src_ptr = &src_data[0];
- for (size_t i = 0; i < num_planes; i++) {
- // Assert that each plane of frame starts at required byte boundary.
- ASSERT_TRUE(base::IsAligned(dest_offset, test::kPlatformBufferAlignment))
- << "Planes of frame should be mapped per platform requirements";
- char* dst_ptr = &test_stream->aligned_in_file_data[dest_offset];
- for (size_t j = 0; j < visible_plane_rows[i]; j++) {
- memcpy(dst_ptr, src_ptr, visible_bpl[i]);
- src_ptr += visible_bpl[i];
- dst_ptr += static_cast<off_t>(coded_bpl[i]);
- }
- dest_offset += test_stream->aligned_plane_size[i];
- }
- src_offset += static_cast<off_t>(frame_buffer_size);
- }
- src.Close();
-
- LOG_ASSERT(test_stream->num_frames > 0UL);
-}
-
-// Parse |data| into its constituent parts, set the various output fields
-// accordingly, read in video stream, and store them to |test_streams|.
-static void ParseAndReadTestStreamData(
- const base::FilePath::StringType& data,
- std::vector<std::unique_ptr<TestStream>>* test_streams) {
- // Split the string to individual test stream data.
- std::vector<base::FilePath::StringType> test_streams_data =
- base::SplitString(data, base::FilePath::StringType(1, ';'),
- base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
- LOG_ASSERT(test_streams_data.size() >= 1U) << data;
-
- // Parse each test stream data and read the input file.
- for (size_t index = 0; index < test_streams_data.size(); ++index) {
- std::vector<base::FilePath::StringType> fields = base::SplitString(
- test_streams_data[index], base::FilePath::StringType(1, ','),
- base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
- // Try using ":" as the seperator if "," isn't used.
- if (fields.size() == 1U) {
- fields = base::SplitString(test_streams_data[index],
- base::FilePath::StringType(1, ':'),
- base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
- }
- LOG_ASSERT(fields.size() >= 4U) << data;
- LOG_ASSERT(fields.size() <= 11U) << data;
- auto test_stream = std::make_unique<TestStream>();
-
- test_stream->in_filename = FilePathStringTypeToString(fields[0]);
- int width, height;
- bool result = base::StringToInt(fields[1], &width);
- LOG_ASSERT(result);
- result = base::StringToInt(fields[2], &height);
- LOG_ASSERT(result);
- test_stream->visible_size = gfx::Size(width, height);
- LOG_ASSERT(!test_stream->visible_size.IsEmpty());
- int profile;
- result = base::StringToInt(fields[3], &profile);
- LOG_ASSERT(result);
- LOG_ASSERT(profile > VIDEO_CODEC_PROFILE_UNKNOWN);
- LOG_ASSERT(profile <= VIDEO_CODEC_PROFILE_MAX);
- test_stream->requested_profile = static_cast<VideoCodecProfile>(profile);
- test_stream->pixel_format = PIXEL_FORMAT_I420;
-
- if (fields.size() >= 5 && !fields[4].empty())
- test_stream->out_filename = FilePathStringTypeToString(fields[4]);
-
- if (fields.size() >= 6 && !fields[5].empty())
- LOG_ASSERT(
- base::StringToUint(fields[5], &test_stream->requested_bitrate));
-
- if (fields.size() >= 7 && !fields[6].empty())
- LOG_ASSERT(
- base::StringToUint(fields[6], &test_stream->requested_framerate));
-
- if (fields.size() >= 8 && !fields[7].empty()) {
- LOG_ASSERT(base::StringToUint(
- fields[7], &test_stream->requested_subsequent_bitrate));
- }
-
- if (fields.size() >= 9 && !fields[8].empty()) {
- LOG_ASSERT(base::StringToUint(
- fields[8], &test_stream->requested_subsequent_framerate));
- }
-
- if (fields.size() >= 10 && !fields[9].empty()) {
- unsigned int format = 0;
- LOG_ASSERT(base::StringToUint(fields[9], &format));
- test_stream->pixel_format = static_cast<VideoPixelFormat>(format);
- }
-
- if (fields.size() >= 11 && !fields[10].empty()) {
- unsigned int requested_level = 0;
- LOG_ASSERT(base::StringToUint(fields[10], &requested_level));
- test_stream->requested_level =
- base::checked_cast<uint8_t>(requested_level);
- }
- test_streams->push_back(std::move(test_stream));
- }
-}
-
-// Check if the parameter set in |test_stream| are valid for initializing
-// encoder.
-static bool CheckH264InitConfigValidity(const TestStream* test_stream) {
- if (!test_stream->requested_level)
- return true; // regard as valid if level is not assigned.
-
- constexpr size_t kH264MacroblockSizeInPixels = 16;
- gfx::Size coded_size =
- gfx::Size(base::bits::Align(test_stream->visible_size.width(),
- kH264MacroblockSizeInPixels),
- base::bits::Align(test_stream->visible_size.height(),
- kH264MacroblockSizeInPixels));
- uint32_t framesize_in_mbs =
- coded_size.GetArea() /
- (kH264MacroblockSizeInPixels * kH264MacroblockSizeInPixels);
-
- // Check if frame size, initial bitrate and macroblock processing rate are
- // valid from the limit of profile and level.
- return CheckH264LevelLimits(
- test_stream->requested_profile, test_stream->requested_level.value(),
- test_stream->requested_bitrate, test_stream->requested_framerate,
- framesize_in_mbs);
-}
-
-// Basic test environment shared across multiple test cases. We only need to
-// setup it once for all test cases.
-// It helps
-// - maintain test stream data and other test settings.
-// - clean up temporary aligned files.
-// - output log to file.
-class VideoEncodeAcceleratorTestEnvironment : public ::testing::Environment {
- public:
- VideoEncodeAcceleratorTestEnvironment(
- std::unique_ptr<base::FilePath::StringType> data,
- const base::FilePath& log_path,
- const base::FilePath& frame_stats_path,
- bool run_at_fps,
- bool needs_encode_latency,
- bool verify_all_output)
- : rendering_thread_("GLRenderingVEAClientThread"),
- test_stream_data_(std::move(data)),
- log_path_(log_path),
- frame_stats_path_(frame_stats_path),
- run_at_fps_(run_at_fps),
- needs_encode_latency_(needs_encode_latency),
- verify_all_output_(verify_all_output) {}
-
- virtual void SetUp() {
- if (!log_path_.empty()) {
- log_file_.reset(new base::File(
- log_path_, base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE));
- LOG_ASSERT(log_file_->IsValid());
- }
- ParseAndReadTestStreamData(*test_stream_data_, &test_streams_);
-
-#if defined(USE_OZONE)
- // Initialize Ozone so that DMABuf can be created through Ozone DRM.
- ui::OzonePlatform::InitParams params;
- params.single_process = true;
- ui::OzonePlatform::InitializeForUI(params);
-
- base::Thread::Options options;
- options.message_pump_type = base::MessagePumpType::UI;
- ASSERT_TRUE(rendering_thread_.StartWithOptions(options));
- base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED);
- rendering_thread_.task_runner()->PostTask(
- FROM_HERE,
- base::BindOnce(&VideoEncodeAcceleratorTestEnvironment::SetupOzone,
- &done));
- done.Wait();
-
- // To create dmabuf through gbm, Ozone needs to be set up.
- gpu_helper.reset(new ui::OzoneGpuTestHelper());
- gpu_helper->Initialize(base::ThreadTaskRunnerHandle::Get());
-
-#else
- ASSERT_TRUE(rendering_thread_.Start());
-#endif
- }
-
- virtual void TearDown() {
- log_file_.reset();
-
- rendering_thread_.Stop();
- }
-
- scoped_refptr<base::SingleThreadTaskRunner> GetRenderingTaskRunner() const {
- return rendering_thread_.task_runner();
- }
-
- void FlushRenderingThread() { rendering_thread_.FlushForTesting(); }
-
- // Log one entry of machine-readable data to file and LOG(INFO).
- // The log has one data entry per line in the format of "<key>: <value>".
- // Note that Chrome OS video_VEAPerf autotest parses the output key and value
- // pairs. Be sure to keep the autotest in sync.
- void LogToFile(const std::string& key, const std::string& value) {
- std::string s = base::StringPrintf("%s: %s\n", key.c_str(), value.c_str());
- LOG(INFO) << s;
- if (log_file_) {
- log_file_->WriteAtCurrentPos(s.data(), static_cast<int>(s.length()));
- }
- }
-
- // Feed the encoder with the input buffers at the requested framerate. If
- // false, feed as fast as possible. This is set by the command line switch
- // "--run_at_fps".
- bool run_at_fps() const { return run_at_fps_; }
-
- // Whether to measure encode latency. This is set by the command line switch
- // "--measure_latency".
- bool needs_encode_latency() const { return needs_encode_latency_; }
-
- // Verify the encoder output of all testcases. This is set by the command line
- // switch "--verify_all_output".
- bool verify_all_output() const { return verify_all_output_; }
-
- const base::FilePath& frame_stats_path() const { return frame_stats_path_; }
-
- std::vector<std::unique_ptr<TestStream>> test_streams_;
-
- private:
- base::Thread rendering_thread_;
- std::unique_ptr<base::FilePath::StringType> test_stream_data_;
- base::FilePath log_path_;
- base::FilePath frame_stats_path_;
- std::unique_ptr<base::File> log_file_;
- bool run_at_fps_;
- bool needs_encode_latency_;
- bool verify_all_output_;
-
-#if defined(USE_OZONE)
- std::unique_ptr<ui::OzoneGpuTestHelper> gpu_helper;
-
- static void SetupOzone(base::WaitableEvent* done) {
- base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
- cmd_line->AppendSwitchASCII(switches::kUseGL, gl::kGLImplementationEGLName);
- ui::OzonePlatform::InitParams params;
- params.single_process = true;
- ui::OzonePlatform::InitializeForGPU(params);
- done->Signal();
- }
-#endif
-};
-
-enum ClientState {
- CS_CREATED, // Encoder is created.
- CS_INITIALIZED, // Encoder initialization is finished.
- CS_ENCODING, // Encoder is encoding.
- CS_FLUSHING, // Ask encoder to flush.
- CS_FINISHED, // Encoding has finished, all frames are encoded.
- CS_FLUSHED, // Encoder notifies the flush is finished.
- CS_VALIDATED, // Encoded frame quality has been validated.
- CS_ERROR, // Any error occurs.
-};
-
-// Performs basic, codec-specific sanity checks on the stream buffers passed
-// to ProcessStreamBuffer(): whether we've seen keyframes before non-keyframes,
-// correct sequences of H.264 NALUs (SPS before PPS and before slices), etc.
-// Calls given FrameFoundCallback when a complete frame is found while
-// processing.
-class StreamValidator {
- public:
- // To be called when a complete frame is found while processing a stream
- // buffer, passing true if the frame is a keyframe and the visible size.
- // Returns false if we are not interested in more frames and further
- // processing should be aborted.
- typedef base::RepeatingCallback<bool(bool, const gfx::Size&)>
- FrameFoundCallback;
-
- virtual ~StreamValidator() {}
-
- // Provide a StreamValidator instance for the given |profile| and |level|.
- // |level| is optional and should specified only if codec is h264.
- static std::unique_ptr<StreamValidator> Create(VideoCodecProfile profile,
- base::Optional<uint8_t> level,
- FrameFoundCallback frame_cb);
-
- // Process and verify contents of a bitstream buffer.
- virtual void ProcessStreamBuffer(const uint8_t* stream, size_t size) = 0;
-
- protected:
- explicit StreamValidator(FrameFoundCallback frame_cb)
- : frame_cb_(std::move(frame_cb)) {}
-
- FrameFoundCallback frame_cb_;
- gfx::Size visible_size_;
-};
-
-class H264Validator : public StreamValidator {
- public:
- H264Validator(base::Optional<uint8_t> level, FrameFoundCallback frame_cb)
- : StreamValidator(std::move(frame_cb)),
- target_level_(level),
- seen_sps_(false),
- seen_pps_(false),
- seen_idr_(false),
- curr_pic_(new H264Picture) {}
-
- void ProcessStreamBuffer(const uint8_t* stream, size_t size) override;
-
- private:
- bool IsNewPicture(const H264SliceHeader& slice_hdr);
- bool UpdateCurrentPicture(const H264SliceHeader& slice_hdr);
-
- // H264Validator will check output level with the value of |target_level_|
- // unless it is base::nullopt.
- base::Optional<uint8_t> target_level_;
-
- // Set to true when encoder provides us with the corresponding NALU type.
- bool seen_sps_;
- bool seen_pps_;
- bool seen_idr_;
-
- scoped_refptr<H264Picture> curr_pic_;
- int curr_sps_id_ = -1;
- int curr_pps_id_ = -1;
-
- H264Parser h264_parser_;
-};
-
-void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
- h264_parser_.SetStream(stream, static_cast<off_t>(size));
-
- while (1) {
- H264NALU nalu;
- H264Parser::Result result;
-
- result = h264_parser_.AdvanceToNextNALU(&nalu);
- if (result == H264Parser::kEOStream)
- break;
-
- ASSERT_EQ(H264Parser::kOk, result);
-
- bool keyframe = false;
-
- switch (nalu.nal_unit_type) {
- case H264NALU::kIDRSlice:
- ASSERT_TRUE(seen_sps_);
- ASSERT_TRUE(seen_pps_);
- seen_idr_ = true;
- keyframe = true;
- FALLTHROUGH;
- case H264NALU::kNonIDRSlice: {
- ASSERT_TRUE(seen_idr_);
- H264SliceHeader slice_hdr;
- ASSERT_EQ(H264Parser::kOk,
- h264_parser_.ParseSliceHeader(nalu, &slice_hdr));
- if (IsNewPicture(slice_hdr)) {
- if (!frame_cb_.Run(keyframe, visible_size_))
- return;
- ASSERT_TRUE(UpdateCurrentPicture(slice_hdr));
- }
- break;
- }
-
- case H264NALU::kSPS: {
- int sps_id;
- ASSERT_EQ(H264Parser::kOk, h264_parser_.ParseSPS(&sps_id));
- // Check the visible size.
- gfx::Rect visible_size =
- h264_parser_.GetSPS(sps_id)->GetVisibleRect().value_or(gfx::Rect());
- ASSERT_FALSE(visible_size.IsEmpty());
- visible_size_ = visible_size.size();
- // Check the output level is equal to target level.
- if (target_level_) {
- LOG(INFO) << "Target level: "
- << static_cast<int>(target_level_.value())
- << ", output level: "
- << static_cast<int>(
- h264_parser_.GetSPS(sps_id)->GetIndicatedLevel());
- ASSERT_EQ(h264_parser_.GetSPS(sps_id)->GetIndicatedLevel(),
- target_level_.value());
- }
- seen_sps_ = true;
- break;
- }
-
- case H264NALU::kPPS: {
- ASSERT_TRUE(seen_sps_);
- int pps_id;
- ASSERT_EQ(H264Parser::kOk, h264_parser_.ParsePPS(&pps_id));
- seen_pps_ = true;
- break;
- }
-
- default:
- break;
- }
- }
-}
-
-bool H264Validator::IsNewPicture(const H264SliceHeader& slice_hdr) {
- if (!curr_pic_)
- return true;
- return H264Decoder::IsNewPrimaryCodedPicture(
- curr_pic_.get(), curr_pps_id_, h264_parser_.GetSPS(curr_sps_id_),
- slice_hdr);
-}
-
-bool H264Validator::UpdateCurrentPicture(const H264SliceHeader& slice_hdr) {
- curr_pps_id_ = slice_hdr.pic_parameter_set_id;
- const H264PPS* pps = h264_parser_.GetPPS(curr_pps_id_);
- if (!pps) {
- LOG(ERROR) << "Cannot parse pps.";
- return false;
- }
-
- curr_sps_id_ = pps->seq_parameter_set_id;
- const H264SPS* sps = h264_parser_.GetSPS(curr_sps_id_);
- if (!sps) {
- LOG(ERROR) << "Cannot parse sps.";
- return false;
- }
-
- if (!H264Decoder::FillH264PictureFromSliceHeader(sps, slice_hdr,
- curr_pic_.get())) {
- LOG(FATAL) << "Cannot initialize current frame.";
- return false;
- }
- return true;
-}
-
-class VP8Validator : public StreamValidator {
- public:
- explicit VP8Validator(FrameFoundCallback frame_cb)
- : StreamValidator(std::move(frame_cb)), seen_keyframe_(false) {}
-
- void ProcessStreamBuffer(const uint8_t* stream, size_t size) override;
-
- private:
- // Have we already got a keyframe in the stream?
- bool seen_keyframe_;
-};
-
-void VP8Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
- // TODO(posciak): We could be getting more frames in the buffer, but there is
- // no simple way to detect this. We'd need to parse the frames and go through
- // partition numbers/sizes. For now assume one frame per buffer.
- Vp8Parser parser;
- Vp8FrameHeader header;
- EXPECT_TRUE(parser.ParseFrame(stream, size, &header));
- if (header.IsKeyframe()) {
- seen_keyframe_ = true;
- visible_size_.SetSize(header.width, header.height);
- }
-
- EXPECT_TRUE(seen_keyframe_);
- ASSERT_FALSE(visible_size_.IsEmpty());
- frame_cb_.Run(header.IsKeyframe(), visible_size_);
-}
-
-class VP9Validator : public StreamValidator {
- public:
- explicit VP9Validator(FrameFoundCallback frame_cb)
- : StreamValidator(std::move(frame_cb)),
- parser_(false),
- seen_keyframe_(false) {}
-
- void ProcessStreamBuffer(const uint8_t* stream, size_t size) override;
-
- private:
- Vp9Parser parser_;
- // Have we already got a keyframe in the stream?
- bool seen_keyframe_;
-};
-
-void VP9Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
- // TODO(posciak): We could be getting more frames in the buffer, but there is
- // no simple way to detect this. We'd need to parse the frames and go through
- // partition numbers/sizes. For now assume one frame per buffer.
- Vp9FrameHeader header;
- gfx::Size allocate_size;
- parser_.SetStream(stream, size, nullptr);
- EXPECT_TRUE(Vp9Parser::kInvalidStream !=
- parser_.ParseNextFrame(&header, &allocate_size, nullptr));
- if (header.IsKeyframe()) {
- seen_keyframe_ = true;
- visible_size_.SetSize(header.render_width, header.render_height);
- }
-
- EXPECT_TRUE(seen_keyframe_);
- ASSERT_FALSE(visible_size_.IsEmpty());
- frame_cb_.Run(header.IsKeyframe(), visible_size_);
-}
-// static
-std::unique_ptr<StreamValidator> StreamValidator::Create(
- VideoCodecProfile profile,
- base::Optional<uint8_t> level,
- FrameFoundCallback frame_cb) {
- std::unique_ptr<StreamValidator> validator;
-
- if (IsH264(profile)) {
- validator.reset(new H264Validator(level, std::move(frame_cb)));
- } else if (IsVP8(profile)) {
- validator.reset(new VP8Validator(std::move(frame_cb)));
- } else if (IsVP9(profile)) {
- validator.reset(new VP9Validator(std::move(frame_cb)));
- } else {
- LOG(FATAL) << "Unsupported profile: " << GetProfileName(profile);
- }
-
- return validator;
-}
-
-class VideoFrameQualityValidator
- : public base::SupportsWeakPtr<VideoFrameQualityValidator> {
- public:
- VideoFrameQualityValidator(const VideoCodecProfile profile,
- const VideoPixelFormat pixel_format,
- bool verify_quality,
- const base::Closure& flush_complete_cb,
- const base::Closure& decode_error_cb);
- void Initialize(const gfx::Size& coded_size, const gfx::Rect& visible_size);
- // Save original YUV frame to compare it with the decoded frame later.
- void AddOriginalFrame(scoped_refptr<VideoFrame> frame);
- void AddDecodeBuffer(scoped_refptr<DecoderBuffer> buffer);
- // Flush the decoder.
- void Flush();
-
- private:
- void InitializeCB(Status status);
- void DecodeDone(Status status);
- void FlushDone(Status status);
- void VerifyOutputFrame(scoped_refptr<VideoFrame> output_frame);
- void Decode();
- void WriteFrameStats();
-
- enum State { UNINITIALIZED, INITIALIZED, DECODING, DECODER_ERROR };
- struct FrameStats {
- int width;
- int height;
- double ssim[VideoFrame::kMaxPlanes];
- uint64_t mse[VideoFrame::kMaxPlanes];
- };
-
- FrameStats CompareFrames(const VideoFrame& original_frame,
- const VideoFrame& output_frame);
- NullMediaLog media_log_;
- const VideoCodecProfile profile_;
- const VideoPixelFormat pixel_format_;
- const bool verify_quality_;
- std::unique_ptr<FFmpegVideoDecoder> decoder_;
- // Callback of Flush(). Called after all frames are decoded.
- const base::Closure flush_complete_cb_;
- const base::Closure decode_error_cb_;
- State decoder_state_;
- base::queue<scoped_refptr<VideoFrame>> original_frames_;
- base::queue<scoped_refptr<DecoderBuffer>> decode_buffers_;
- std::vector<FrameStats> frame_stats_;
- base::ThreadChecker thread_checker_;
-};
-
-VideoFrameQualityValidator::VideoFrameQualityValidator(
- const VideoCodecProfile profile,
- const VideoPixelFormat pixel_format,
- const bool verify_quality,
- const base::Closure& flush_complete_cb,
- const base::Closure& decode_error_cb)
- : profile_(profile),
- pixel_format_(pixel_format),
- verify_quality_(verify_quality),
- decoder_(new FFmpegVideoDecoder(&media_log_)),
- flush_complete_cb_(flush_complete_cb),
- decode_error_cb_(decode_error_cb),
- decoder_state_(UNINITIALIZED) {
- // Allow decoding of individual NALU. Entire frames are required by default.
- decoder_->set_decode_nalus(true);
-
- DETACH_FROM_THREAD(thread_checker_);
-}
-
-void VideoFrameQualityValidator::Initialize(const gfx::Size& coded_size,
- const gfx::Rect& visible_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- gfx::Size natural_size(visible_size.size());
- // The default output format of ffmpeg video decoder is YV12.
- VideoDecoderConfig config;
- const auto alpha_mode = IsOpaque(pixel_format_)
- ? VideoDecoderConfig::AlphaMode::kIsOpaque
- : VideoDecoderConfig::AlphaMode::kHasAlpha;
- if (IsVP8(profile_)) {
- config.Initialize(kCodecVP8, VP8PROFILE_ANY, alpha_mode, VideoColorSpace(),
- kNoTransformation, coded_size, visible_size, natural_size,
- EmptyExtraData(), EncryptionScheme::kUnencrypted);
- } else if (IsVP9(profile_)) {
- config.Initialize(kCodecVP9, VP9PROFILE_PROFILE0, alpha_mode,
- VideoColorSpace(), kNoTransformation, coded_size,
- visible_size, natural_size, EmptyExtraData(),
- EncryptionScheme::kUnencrypted);
- } else if (IsH264(profile_)) {
- config.Initialize(kCodecH264, H264PROFILE_MAIN, alpha_mode,
- VideoColorSpace(), kNoTransformation, coded_size,
- visible_size, natural_size, EmptyExtraData(),
- EncryptionScheme::kUnencrypted);
- } else {
- LOG_ASSERT(0) << "Invalid profile " << GetProfileName(profile_);
- }
-
- decoder_->Initialize(
- config, false, nullptr,
- base::BindOnce(&VideoFrameQualityValidator::InitializeCB,
- base::Unretained(this)),
- base::BindRepeating(&VideoFrameQualityValidator::VerifyOutputFrame,
- base::Unretained(this)),
- base::NullCallback());
-}
-
-void VideoFrameQualityValidator::InitializeCB(Status status) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (status.is_ok()) {
- decoder_state_ = INITIALIZED;
- Decode();
- } else {
- decoder_state_ = DECODER_ERROR;
- if (IsH264(profile_))
- LOG(ERROR) << "Chromium does not support H264 decode. Try Chrome.";
- decode_error_cb_.Run();
- FAIL() << "Decoder initialization error";
- }
-}
-
-void VideoFrameQualityValidator::AddOriginalFrame(
- scoped_refptr<VideoFrame> frame) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- original_frames_.push(frame);
-}
-
-void VideoFrameQualityValidator::DecodeDone(Status status) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (status.is_ok()) {
- decoder_state_ = INITIALIZED;
- Decode();
- } else {
- decoder_state_ = DECODER_ERROR;
- decode_error_cb_.Run();
- FAIL() << "Unexpected decode status = " << status.code()
- << ". Stop decoding.";
- }
-}
-
-void VideoFrameQualityValidator::FlushDone(Status status) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- WriteFrameStats();
- flush_complete_cb_.Run();
-}
-
-void VideoFrameQualityValidator::Flush() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (decoder_state_ != DECODER_ERROR) {
- decode_buffers_.push(DecoderBuffer::CreateEOSBuffer());
- Decode();
- }
-}
-
-void VideoFrameQualityValidator::WriteFrameStats() {
- if (g_env->frame_stats_path().empty())
- return;
-
- base::File frame_stats_file(
- g_env->frame_stats_path(),
- base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
- LOG_ASSERT(frame_stats_file.IsValid());
-
- // TODO(pbos): Consider adding encoded bytes per frame + key/delta frame.
- std::string csv =
- "frame,width,height,ssim-y,ssim-u,ssim-v,mse-y,mse-u,mse-v\n";
- frame_stats_file.WriteAtCurrentPos(csv.data(), static_cast<int>(csv.size()));
- for (size_t frame_idx = 0; frame_idx < frame_stats_.size(); ++frame_idx) {
- const FrameStats& frame_stats = frame_stats_[frame_idx];
- csv = base::StringPrintf(
- "%d,%d,%d,%f,%f,%f,%" PRIu64 ",%" PRIu64 ",%" PRIu64 "\n",
- static_cast<int>(frame_idx), frame_stats.width, frame_stats.height,
- frame_stats.ssim[VideoFrame::kYPlane],
- frame_stats.ssim[VideoFrame::kUPlane],
- frame_stats.ssim[VideoFrame::kVPlane],
- frame_stats.mse[VideoFrame::kYPlane],
- frame_stats.mse[VideoFrame::kUPlane],
- frame_stats.mse[VideoFrame::kVPlane]);
- frame_stats_file.WriteAtCurrentPos(csv.data(),
- static_cast<int>(csv.size()));
- }
-}
-
-void VideoFrameQualityValidator::AddDecodeBuffer(
- scoped_refptr<DecoderBuffer> buffer) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (decoder_state_ != DECODER_ERROR) {
- decode_buffers_.push(buffer);
- Decode();
- }
-}
-
-void VideoFrameQualityValidator::Decode() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (decoder_state_ == INITIALIZED && !decode_buffers_.empty()) {
- scoped_refptr<DecoderBuffer> next_buffer = decode_buffers_.front();
- decode_buffers_.pop();
- decoder_state_ = DECODING;
- if (next_buffer->end_of_stream()) {
- decoder_->Decode(
- next_buffer,
- base::BindOnce(&VideoFrameQualityValidator::FlushDone, AsWeakPtr()));
- } else {
- decoder_->Decode(
- next_buffer,
- base::BindOnce(&VideoFrameQualityValidator::DecodeDone, AsWeakPtr()));
- }
- }
-}
-
-namespace {
-// Gets SSIM (see below) parameters for a 8x8 block.
-void GetSsimParams8x8(const uint8_t* orig,
- size_t orig_stride,
- const uint8_t* recon,
- size_t recon_stride,
- int* sum_orig,
- int* sum_recon,
- int* sum_sq_orig,
- int* sum_sq_recon,
- int* sum_orig_x_recon) {
- for (size_t i = 0; i < 8; ++i, orig += orig_stride, recon += recon_stride) {
- for (size_t j = 0; j < 8; ++j) {
- *sum_orig += orig[j];
- *sum_recon += recon[j];
- *sum_sq_orig += orig[j] * orig[j];
- *sum_sq_recon += recon[j] * recon[j];
- *sum_orig_x_recon += orig[j] * recon[j];
- }
- }
-}
-
-// Generates SSIM (see below) for a 8x8 block from input parameters.
-// https://en.wikipedia.org/wiki/Structural_similarity
-double GenerateSimilarity(int sum_orig,
- int sum_recon,
- int sum_sq_orig,
- int sum_sq_recon,
- int sum_orig_x_recon,
- int count) {
- // Same constants as used inside libvpx.
- static const int64_t kC1 = 26634; // (64^2*(.01*255)^2
- static const int64_t kC2 = 239708; // (64^2*(.03*255)^2
-
- // scale the constants by number of pixels
- int64_t c1 = (kC1 * count * count) >> 12;
- int64_t c2 = (kC2 * count * count) >> 12;
-
- int64_t ssim_n = (2 * sum_orig * sum_recon + c1) *
- (static_cast<int64_t>(2 * count) * sum_orig_x_recon -
- static_cast<int64_t>(2 * sum_orig) * sum_recon + c2);
-
- int64_t ssim_d = (sum_orig * sum_orig + sum_recon * sum_recon + c1) *
- (static_cast<int64_t>(count) * sum_sq_orig -
- static_cast<int64_t>(sum_orig) * sum_orig +
- static_cast<int64_t>(count) * sum_sq_recon -
- static_cast<int64_t>(sum_recon) * sum_recon + c2);
-
- return static_cast<double>(ssim_n) / ssim_d;
-}
-
-// Generates SSIM for a 8x8 block.
-double GenerateSsim8x8(const uint8_t* orig,
- int orig_stride,
- const uint8_t* recon,
- int recon_stride) {
- int sum_orig = 0;
- int sum_recon = 0;
- int sum_sq_orig = 0;
- int sum_sq_recon = 0;
- int sum_orig_x_recon = 0;
- GetSsimParams8x8(orig, orig_stride, recon, recon_stride, &sum_orig,
- &sum_recon, &sum_sq_orig, &sum_sq_recon, &sum_orig_x_recon);
- return GenerateSimilarity(sum_orig, sum_recon, sum_sq_orig, sum_sq_recon,
- sum_orig_x_recon, 64);
-}
-
-// Generates SSIM: https://en.wikipedia.org/wiki/Structural_similarity
-// We are using a 8x8 moving window with starting location of each 8x8 window
-// on the 4x4 pixel grid. Such arrangement allows the windows to overlap
-// block boundaries to penalize blocking artifacts.
-double GenerateSsim(const uint8_t* img1,
- size_t stride_img1,
- const uint8_t* img2,
- size_t stride_img2,
- int width,
- int height) {
- int num_samples = 0;
- double ssim_total = 0;
-
- // sample point start with each 4x4 location
- for (int i = 0; i <= height - 8;
- i += 4, img1 += stride_img1 * 4, img2 += stride_img2 * 4) {
- for (int j = 0; j <= width - 8; j += 4) {
- double v = GenerateSsim8x8(img1 + j, stride_img1, img2 + j, stride_img2);
- ssim_total += v;
- ++num_samples;
- }
- }
- return ssim_total / num_samples;
-}
-
-// Generates MSE that can later be used (outside this file) to generate PSNR
-// either average per-frame or global average.
-// https://en.wikipedia.org/wiki/Mean_squared_error
-// https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio
-static uint64_t GenerateMse(const uint8_t* orig,
- int orig_stride,
- const uint8_t* recon,
- int recon_stride,
- int cols,
- int rows) {
- uint64_t total_sse = 0;
- for (int row = 0; row < rows; ++row) {
- for (int col = 0; col < cols; ++col) {
- int diff = orig[col] - recon[col];
- total_sse += diff * diff;
- }
-
- orig += orig_stride;
- recon += recon_stride;
- }
-
- return total_sse;
-}
-
-void GenerateMseAndSsim(double* ssim,
- uint64_t* mse,
- const uint8_t* buf0,
- size_t stride0,
- const uint8_t* buf1,
- size_t stride1,
- int w,
- int h) {
- *ssim = GenerateSsim(buf0, stride0, buf1, stride1, w, h);
- *mse = GenerateMse(buf0, stride0, buf1, stride1, w, h);
-}
-} // namespace
-
-VideoFrameQualityValidator::FrameStats
-VideoFrameQualityValidator::CompareFrames(const VideoFrame& original_frame,
- const VideoFrame& output_frame) {
- CHECK(original_frame.visible_rect().size() ==
- output_frame.visible_rect().size());
-
- FrameStats frame_stats;
- gfx::Size visible_size = original_frame.visible_rect().size();
- frame_stats.width = visible_size.width();
- frame_stats.height = visible_size.height();
- for (size_t plane :
- {VideoFrame::kYPlane, VideoFrame::kUPlane, VideoFrame::kVPlane}) {
- GenerateMseAndSsim(
- &frame_stats.ssim[plane], &frame_stats.mse[plane],
- original_frame.data(plane), original_frame.stride(plane),
- output_frame.data(plane), output_frame.stride(plane),
- VideoFrame::Columns(plane, pixel_format_, frame_stats.width),
- VideoFrame::Rows(plane, pixel_format_, frame_stats.height));
- }
- return frame_stats;
-}
-
-void VideoFrameQualityValidator::VerifyOutputFrame(
- scoped_refptr<VideoFrame> output_frame) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- scoped_refptr<VideoFrame> original_frame = original_frames_.front();
- original_frames_.pop();
- gfx::Size visible_size = original_frame->visible_rect().size();
-
- if (!g_env->frame_stats_path().empty())
- frame_stats_.push_back(CompareFrames(*original_frame, *output_frame));
-
- // TODO(pbos): Consider rewriting similiarity thresholds to use standard
- // SSIM/PSNR metrics for thresholds instead of abs(difference) / size which
- // correspond less to perceptive distortion.
- if (verify_quality_) {
- int planes[] = {VideoFrame::kYPlane, VideoFrame::kUPlane,
- VideoFrame::kVPlane};
- double difference = 0;
- for (int plane : planes) {
- uint8_t* original_plane = original_frame->data(plane);
- uint8_t* output_plane = output_frame->data(plane);
-
- size_t rows =
- VideoFrame::Rows(plane, pixel_format_, visible_size.height());
- size_t columns =
- VideoFrame::Columns(plane, pixel_format_, visible_size.width());
- size_t stride = original_frame->stride(plane);
-
- for (size_t i = 0; i < rows; i++) {
- for (size_t j = 0; j < columns; j++) {
- difference += std::abs(original_plane[stride * i + j] -
- output_plane[stride * i + j]);
- }
- }
- }
-
- // Divide the difference by the size of frame.
- difference /= VideoFrame::AllocationSize(pixel_format_, visible_size);
- EXPECT_TRUE(difference <= kDecodeSimilarityThreshold)
- << "difference = " << difference << " > decode similarity threshold";
- }
-}
-
-// Base class for all VEA Clients in this file
-class VEAClientBase : public VideoEncodeAccelerator::Client {
- public:
- ~VEAClientBase() override { LOG_ASSERT(!has_encoder()); }
- void NotifyError(VideoEncodeAccelerator::Error error) override {
- DCHECK(thread_checker_.CalledOnValidThread());
- SetState(CS_ERROR);
- }
-
- protected:
- explicit VEAClientBase(
- media::test::ClientStateNotification<ClientState>* note)
- : note_(note), next_output_buffer_id_(0) {}
-
- bool has_encoder() { return encoder_.get(); }
-
- virtual void SetState(ClientState new_state) = 0;
-
- std::unique_ptr<VideoEncodeAccelerator> encoder_;
-
- // Used to notify another thread about the state. VEAClientBase does not own
- // this.
- media::test::ClientStateNotification<ClientState>* note_;
-
- // All methods of this class should be run on the same thread.
- base::ThreadChecker thread_checker_;
-
- std::vector<std::unique_ptr<base::UnsafeSharedMemoryRegion>> output_shms_;
- int32_t next_output_buffer_id_;
-};
-
-class VEAClient : public VEAClientBase {
- public:
- VEAClient(TestStream* test_stream,
- media::test::ClientStateNotification<ClientState>* note,
- bool save_to_file,
- unsigned int keyframe_period,
- bool force_bitrate,
- bool mid_stream_bitrate_switch,
- bool mid_stream_framerate_switch,
- bool verify_output,
- bool verify_output_timestamp,
- bool force_level,
- bool scale);
- void CreateEncoder();
- void DestroyEncoder();
-
- bool requested_scaling() const {
- return encoded_visible_size_ != test_stream_->visible_size;
- }
-
- // VideoDecodeAccelerator::Client implementation.
- void RequireBitstreamBuffers(unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_buffer_size) override;
- void BitstreamBufferReady(
- int32_t bitstream_buffer_id,
- const media::BitstreamBufferMetadata& metadata) override;
-
- private:
- // Return the number of encoded frames per second.
- double frames_per_second();
-
- void SetState(ClientState new_state) override;
-
- // Set current stream parameters to given |bitrate| at |framerate|.
- void SetStreamParameters(unsigned int bitrate, unsigned int framerate);
-
- // Called when encoder is done with a VideoFrame.
- void InputNoLongerNeededCallback(int32_t input_id);
-
- // Feed the encoder with one input frame.
- void FeedEncoderWithOneInput();
-
- // Provide the encoder with a new output buffer.
- void FeedEncoderWithOutput(base::UnsafeSharedMemoryRegion* shm);
-
- // Called on finding a complete frame (with |keyframe| set to true for
- // keyframes, |visible_size| for the visible size of encoded frame) in the
- // stream, to perform codec-independent, per-frame checks and accounting.
- // Returns false once we have collected all frames we needed.
- bool HandleEncodedFrame(bool keyframe, const gfx::Size& visible_size);
-
- // Ask the encoder to flush the frame.
- void FlushEncoder();
-
- // Callback function of encoder_->Flush(). We add the number of received
- // frames at BitstreamBufferReady() and verify the number after flush is
- // completed.
- void FlushEncoderDone(bool success);
- void FlushEncoderSuccessfully();
-
- // Timeout function to check BitstreamBufferReady() and flush callback is
- // called in the short period.
- void BitstreamBufferReadyTimeout(int32_t bitstream_buffer_id);
-
- // Verify that stream bitrate has been close to current_requested_bitrate_,
- // assuming current_framerate_ since the last time VerifyStreamProperties()
- // was called. Fail the test if |force_bitrate_| is true and the bitrate
- // is not within kBitrateTolerance.
- void VerifyStreamProperties();
-
- // Log the performance data.
- void LogPerf();
-
- // Write IVF file header to test_stream_->out_filename.
- void WriteIvfFileHeader(uint32_t fourcc);
-
- // Write an IVF frame header to test_stream_->out_filename.
- void WriteIvfFrameHeader(int frame_index, size_t frame_size);
-
- // Create and return a VideoFrame wrapping the data at |position| bytes in the
- // input stream.
- scoped_refptr<VideoFrame> CreateFrame(off_t position);
-
- // Prepare and return a frame wrapping the data at |position| bytes in the
- // input stream, ready to be sent to encoder.
- // The input frame id is returned in |input_id|.
- scoped_refptr<VideoFrame> PrepareInputFrame(off_t position,
- int32_t* input_id);
-
- // Update the parameters according to |mid_stream_bitrate_switch| and
- // |mid_stream_framerate_switch|.
- void UpdateTestStreamData(bool mid_stream_bitrate_switch,
- bool mid_stream_framerate_switch);
-
- // Callback function of the |input_timer_|.
- void OnInputTimer();
-
- // Called when the quality validator has decoded all the frames.
- void DecodeCompleted();
-
- // Called when the quality validator fails to decode a frame.
- void DecodeFailed();
-
- // Verify that the output timestamp matches input timestamp.
- void VerifyOutputTimestamp(base::TimeDelta timestamp);
-
- // Cancel and reset |buffer_ready_timeout_|.
- void UpdateBitstreamBufferReadyTimeout(int32_t bitstream_buffer_id);
-
- ClientState state_;
-
- TestStream* test_stream_;
-
- // Ids assigned to VideoFrames.
- std::set<int32_t> inputs_at_client_;
- int32_t next_input_id_;
-
- // Encode start time of all encoded frames. The position in the vector is the
- // frame input id.
- std::vector<base::TimeTicks> encode_start_time_;
- // The encode latencies of all encoded frames. We define encode latency as the
- // time delay from input of each VideoFrame (VEA::Encode()) to output of the
- // corresponding BitstreamBuffer (VEA::Client::BitstreamBufferReady()).
- std::vector<base::TimeDelta> encode_latencies_;
- // The 0-based indices of frames that we force as key frames.
- std::queue<size_t> keyframe_indices_;
-
- // Ids for output BitstreamBuffers.
- typedef std::map<int32_t, base::UnsafeSharedMemoryRegion*> IdToSHM;
- IdToSHM output_buffers_at_client_;
-
- // Current offset into input stream.
- off_t pos_in_input_stream_;
- gfx::Size input_coded_size_;
- // Requested by encoder.
- unsigned int num_required_input_buffers_;
- size_t output_buffer_size_;
-
- // Number of frames to encode. This may differ from the number of frames in
- // stream if we need more frames for bitrate tests.
- unsigned int num_frames_to_encode_;
-
- // Number of frames we've sent to the encoder thus far.
- size_t num_frames_submitted_to_encoder_;
-
- // Number of encoded frames we've got from the encoder thus far.
- unsigned int num_encoded_frames_;
-
- // Frames since last bitrate verification.
- unsigned int num_frames_since_last_check_;
-
- // True if we are to save the encoded stream to a file.
- bool save_to_file_;
-
- // Request a keyframe every keyframe_period_ frames.
- const unsigned int keyframe_period_;
-
- // True if we are asking encoder for a particular bitrate.
- bool force_bitrate_;
-
- // Current requested bitrate.
- unsigned int current_requested_bitrate_;
-
- // Current expected framerate.
- unsigned int current_framerate_;
-
- // Byte size of the encoded stream (for bitrate calculation) since last
- // time we checked bitrate.
- size_t encoded_stream_size_since_last_check_;
-
- // Check the output frame quality of the encoder.
- bool verify_output_;
-
- // Check whether the output timestamps match input timestamps.
- bool verify_output_timestamp_;
-
- // The visible size we want the encoded stream to have. This can be different
- // than the visible size of the |test_stream_| when doing scaling in native
- // input mode.
- gfx::Size encoded_visible_size_;
-
- // Used to perform codec-specific sanity checks on the stream.
- std::unique_ptr<StreamValidator> stream_validator_;
-
- // Used to validate the encoded frame quality.
- std::unique_ptr<VideoFrameQualityValidator> quality_validator_;
-
- // The time when the first frame is submitted for encode.
- base::TimeTicks first_frame_start_time_;
-
- // The time when the last encoded frame is ready.
- base::TimeTicks last_frame_ready_time_;
-
- // Requested bitrate in bits per second.
- unsigned int requested_bitrate_;
-
- // Requested initial framerate.
- unsigned int requested_framerate_;
-
- // Bitrate to switch to in the middle of the stream.
- unsigned int requested_subsequent_bitrate_;
-
- // Framerate to switch to in the middle of the stream.
- unsigned int requested_subsequent_framerate_;
-
- // The timer used to feed the encoder with the input frames.
- std::unique_ptr<base::RepeatingTimer> input_timer_;
-
- // The BitstreamBufferReadyTimeout closure. It is set at each
- // BitstreamBufferReady() call, and cancelled at the next
- // BitstreamBufferReady() or flush callback is called.
- base::CancelableClosure buffer_ready_timeout_;
-
- // The timestamps for each frame in the order of CreateFrame() invocation.
- base::queue<base::TimeDelta> frame_timestamps_;
-
- // The last timestamp popped from |frame_timestamps_|.
- base::TimeDelta previous_timestamp_;
-
- // Buffer factory for use with CloneVideoFrame.
- std::unique_ptr<gpu::GpuMemoryBufferFactory> gpu_memory_buffer_factory_;
-};
-
-VEAClient::VEAClient(TestStream* test_stream,
- media::test::ClientStateNotification<ClientState>* note,
- bool save_to_file,
- unsigned int keyframe_period,
- bool force_bitrate,
- bool mid_stream_bitrate_switch,
- bool mid_stream_framerate_switch,
- bool verify_output,
- bool verify_output_timestamp,
- bool force_level,
- bool scale)
- : VEAClientBase(note),
- state_(CS_CREATED),
- test_stream_(test_stream),
- next_input_id_(0),
- pos_in_input_stream_(0),
- num_required_input_buffers_(0),
- output_buffer_size_(0),
- num_frames_to_encode_(0),
- num_frames_submitted_to_encoder_(0),
- num_encoded_frames_(0),
- num_frames_since_last_check_(0),
- save_to_file_(save_to_file),
- keyframe_period_(keyframe_period),
- force_bitrate_(force_bitrate),
- current_requested_bitrate_(0),
- current_framerate_(0),
- encoded_stream_size_since_last_check_(0),
- verify_output_(verify_output),
- verify_output_timestamp_(verify_output_timestamp),
- requested_bitrate_(0),
- requested_framerate_(0),
- requested_subsequent_bitrate_(0),
- requested_subsequent_framerate_(0) {
- if (keyframe_period_)
- LOG_ASSERT(kMaxKeyframeDelay < keyframe_period_);
-
- // Only check target level against requested level if |force_level| is true.
- base::Optional<uint8_t> target_level;
- if (force_level)
- target_level = test_stream_->requested_level;
-
- // Fake encoder produces an invalid stream, so skip validating it.
- if (!g_fake_encoder) {
- stream_validator_ = StreamValidator::Create(
- test_stream_->requested_profile, target_level,
- base::BindRepeating(&VEAClient::HandleEncodedFrame,
- base::Unretained(this)));
- CHECK(stream_validator_);
- // VideoFrameQualityValidator is required to generate frame stats as well as
- // validating encoder quality.
- if (verify_output_ || !g_env->frame_stats_path().empty()) {
- quality_validator_.reset(new VideoFrameQualityValidator(
- test_stream_->requested_profile, test_stream_->pixel_format,
- verify_output_,
- base::BindRepeating(&VEAClient::DecodeCompleted,
- base::Unretained(this)),
- base::BindRepeating(&VEAClient::DecodeFailed,
- base::Unretained(this))));
- }
- }
-
- encoded_visible_size_ = test_stream_->visible_size;
- if (scale) {
- LOG_ASSERT(g_native_input)
- << "Scaling is only supported on native input mode";
- encoded_visible_size_ = gfx::ScaleToFlooredSize(encoded_visible_size_,
- 1.0 / kScalingDenominator);
- }
-
- if (save_to_file_) {
- LOG_ASSERT(!test_stream_->out_filename.empty());
-#if defined(OS_POSIX)
- base::FilePath out_filename(test_stream_->out_filename);
-#elif defined(OS_WIN)
- base::FilePath out_filename(base::UTF8ToWide(test_stream_->out_filename));
-#endif
- // This creates or truncates out_filename.
- // Without it, AppendToFile() will not work.
- EXPECT_EQ(0, base::WriteFile(out_filename, NULL, 0));
- }
-
- gpu_memory_buffer_factory_ =
- gpu::GpuMemoryBufferFactory::CreateNativeType(nullptr);
-
- // Initialize the parameters of the test streams.
- UpdateTestStreamData(mid_stream_bitrate_switch, mid_stream_framerate_switch);
-
- thread_checker_.DetachFromThread();
-}
-
-// Helper function to create VEA.
-static std::unique_ptr<VideoEncodeAccelerator> CreateVideoEncodeAccelerator(
- const VideoEncodeAccelerator::Config& config,
- VideoEncodeAccelerator::Client* client,
- const gpu::GpuPreferences& gpu_preferences,
- const gpu::GpuDriverBugWorkarounds& gpu_workarounds) {
- if (g_fake_encoder) {
- std::unique_ptr<VideoEncodeAccelerator> encoder(
- new FakeVideoEncodeAccelerator(
- scoped_refptr<base::SingleThreadTaskRunner>(
- base::ThreadTaskRunnerHandle::Get())));
- if (encoder->Initialize(config, client))
- return encoder;
- return nullptr;
- } else {
- return GpuVideoEncodeAcceleratorFactory::CreateVEA(
- config, client, gpu_preferences, gpu_workarounds);
- }
-}
-
-void VEAClient::CreateEncoder() {
- DCHECK(thread_checker_.CalledOnValidThread());
- LOG_ASSERT(!has_encoder());
- DVLOG(1) << "Profile: " << test_stream_->requested_profile
- << ", initial bitrate: " << requested_bitrate_;
- auto storage_type = g_native_input
- ? VideoEncodeAccelerator::Config::StorageType::kDmabuf
- : VideoEncodeAccelerator::Config::StorageType::kShmem;
- const VideoEncodeAccelerator::Config config(
- test_stream_->pixel_format, encoded_visible_size_,
- test_stream_->requested_profile, requested_bitrate_, requested_framerate_,
- keyframe_period_, test_stream_->requested_level, false, storage_type);
- encoder_ = CreateVideoEncodeAccelerator(config, this, gpu::GpuPreferences(),
- gpu::GpuDriverBugWorkarounds());
- if (!encoder_) {
- LOG(ERROR) << "Failed creating a VideoEncodeAccelerator.";
- SetState(CS_ERROR);
- return;
- }
- SetStreamParameters(requested_bitrate_, requested_framerate_);
- SetState(CS_INITIALIZED);
-}
-
-void VEAClient::DecodeCompleted() {
- DCHECK(thread_checker_.CalledOnValidThread());
- SetState(CS_VALIDATED);
-}
-
-void VEAClient::DecodeFailed() {
- DCHECK(thread_checker_.CalledOnValidThread());
- SetState(CS_ERROR);
-}
-
-void VEAClient::DestroyEncoder() {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!has_encoder())
- return;
-
- encoder_.reset();
- input_timer_.reset();
- quality_validator_.reset();
-}
-
-void VEAClient::UpdateTestStreamData(bool mid_stream_bitrate_switch,
- bool mid_stream_framerate_switch) {
- requested_bitrate_ = test_stream_->requested_bitrate;
- requested_framerate_ = test_stream_->requested_framerate;
-
- // If bitrate/framerate switch is requested, use the subsequent values if
- // provided, or, if not, calculate them from their initial values using
- // the default ratios.
- // Otherwise, if a switch is not requested, keep the initial values.
- if (mid_stream_bitrate_switch) {
- if (test_stream_->requested_subsequent_bitrate == 0)
- requested_subsequent_bitrate_ =
- requested_bitrate_ * kDefaultSubsequentBitrateRatio;
- else
- requested_subsequent_bitrate_ =
- test_stream_->requested_subsequent_bitrate;
- } else {
- requested_subsequent_bitrate_ = requested_bitrate_;
- }
- if (requested_subsequent_bitrate_ == 0)
- requested_subsequent_bitrate_ = 1;
-
- if (mid_stream_framerate_switch) {
- if (test_stream_->requested_subsequent_framerate == 0)
- requested_subsequent_framerate_ =
- requested_framerate_ * kDefaultSubsequentFramerateRatio;
- else
- requested_subsequent_framerate_ =
- test_stream_->requested_subsequent_framerate;
- } else {
- requested_subsequent_framerate_ = requested_framerate_;
- }
- if (requested_subsequent_framerate_ == 0)
- requested_subsequent_framerate_ = 1;
-}
-
-double VEAClient::frames_per_second() {
- DCHECK(thread_checker_.CalledOnValidThread());
- LOG_ASSERT(num_encoded_frames_ != 0UL);
- base::TimeDelta duration = last_frame_ready_time_ - first_frame_start_time_;
- return num_encoded_frames_ / duration.InSecondsF();
-}
-
-void VEAClient::RequireBitstreamBuffers(unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- ASSERT_EQ(CS_INITIALIZED, state_);
- SetState(CS_ENCODING);
- constexpr uint32_t kVp8Fourcc = 0x30385056;
- constexpr uint32_t kVp9Fourcc = 0x30395056;
-
- if (quality_validator_)
- quality_validator_->Initialize(input_coded_size,
- gfx::Rect(encoded_visible_size_));
-
- // When scaling is requested in native input mode, |input_coded_size| is not
- // useful for building the input video frames because the encoder's image
- // processor will be the one responsible for building the video frames that
- // are fed to the hardware encoder. Instead, we can just use the unscaled
- // visible size as the coded size.
- const gfx::Size coded_size_to_use =
- requested_scaling() ? test_stream_->visible_size : input_coded_size;
- CreateAlignedInputStreamFile(coded_size_to_use, test_stream_);
-
- num_frames_to_encode_ = test_stream_->num_frames;
- if (g_num_frames_to_encode > 0)
- num_frames_to_encode_ = g_num_frames_to_encode;
-
- // We may need to loop over the stream more than once if more frames than
- // provided is required for bitrate tests.
- if (force_bitrate_ && num_frames_to_encode_ < kMinFramesForBitrateTests) {
- DVLOG(1) << "Stream too short for bitrate test ("
- << test_stream_->num_frames << " frames), will loop it to reach "
- << kMinFramesForBitrateTests << " frames";
- num_frames_to_encode_ = kMinFramesForBitrateTests;
- }
- if (save_to_file_) {
- if (IsVP8(test_stream_->requested_profile)) {
- WriteIvfFileHeader(kVp8Fourcc);
- } else if (IsVP9(test_stream_->requested_profile)) {
- WriteIvfFileHeader(kVp9Fourcc);
- }
- }
-
- input_coded_size_ = coded_size_to_use;
- num_required_input_buffers_ = input_count;
- ASSERT_GT(num_required_input_buffers_, 0UL);
-
- output_buffer_size_ = output_size;
- ASSERT_GT(output_buffer_size_, 0UL);
-
- for (unsigned int i = 0; i < kNumOutputBuffers; ++i) {
- auto shm = std::make_unique<base::UnsafeSharedMemoryRegion>();
- *shm = base::UnsafeSharedMemoryRegion::Create(output_buffer_size_);
- LOG_ASSERT(shm->IsValid());
- FeedEncoderWithOutput(shm.get());
- output_shms_.push_back(std::move(shm));
- }
-
- if (g_env->run_at_fps()) {
- input_timer_.reset(new base::RepeatingTimer());
- input_timer_->Start(
- FROM_HERE, base::TimeDelta::FromSeconds(1) / current_framerate_,
- base::BindRepeating(&VEAClient::OnInputTimer, base::Unretained(this)));
- } else {
- while (inputs_at_client_.size() <
- num_required_input_buffers_ + kNumExtraInputFrames)
- FeedEncoderWithOneInput();
- }
-}
-
-void VEAClient::VerifyOutputTimestamp(base::TimeDelta timestamp) {
- DCHECK(thread_checker_.CalledOnValidThread());
- // One input frame may be mapped to multiple output frames, so the current
- // timestamp should be equal to previous timestamp or the top of
- // frame_timestamps_.
- if (timestamp != previous_timestamp_) {
- ASSERT_TRUE(!frame_timestamps_.empty());
- EXPECT_EQ(frame_timestamps_.front(), timestamp);
- previous_timestamp_ = frame_timestamps_.front();
- frame_timestamps_.pop();
- }
-}
-
-void VEAClient::BitstreamBufferReady(
- int32_t bitstream_buffer_id,
- const media::BitstreamBufferMetadata& metadata) {
- DCHECK(thread_checker_.CalledOnValidThread());
- ASSERT_LE(metadata.payload_size_bytes, output_buffer_size_);
-
- UpdateBitstreamBufferReadyTimeout(bitstream_buffer_id);
-
- IdToSHM::iterator it = output_buffers_at_client_.find(bitstream_buffer_id);
- ASSERT_NE(it, output_buffers_at_client_.end());
- base::UnsafeSharedMemoryRegion* shm = it->second;
- LOG_ASSERT(shm->IsValid());
- output_buffers_at_client_.erase(it);
-
- if (state_ == CS_FLUSHED || state_ == CS_VALIDATED)
- return;
-
- // When flush is completed, VEA may return an extra empty buffer. Skip
- // checking the buffer.
- if (verify_output_timestamp_ && metadata.payload_size_bytes > 0) {
- VerifyOutputTimestamp(metadata.timestamp);
- }
-
- encoded_stream_size_since_last_check_ += metadata.payload_size_bytes;
-
- base::WritableSharedMemoryMapping mapping = shm->Map();
- LOG_ASSERT(mapping.IsValid());
- const uint8_t* stream_ptr = static_cast<const uint8_t*>(mapping.memory());
- if (metadata.payload_size_bytes > 0) {
- if (stream_validator_) {
- stream_validator_->ProcessStreamBuffer(stream_ptr,
- metadata.payload_size_bytes);
- } else {
- // We don't know the visible size of the encoded stream without the stream
- // validator, so just send the expected value to pass the check.
- HandleEncodedFrame(metadata.key_frame, encoded_visible_size_);
- }
-
- if (quality_validator_) {
- scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CopyFrom(
- static_cast<const uint8_t*>(mapping.memory()),
- static_cast<int>(metadata.payload_size_bytes)));
- quality_validator_->AddDecodeBuffer(buffer);
- }
- // If the encoder does not support flush, pretend flush is done when all
- // frames are received. We also do this when scaling is requested (because a
- // well behaved client should not request a flush in this situation).
- if ((!encoder_->IsFlushSupported() || requested_scaling()) &&
- num_encoded_frames_ == num_frames_to_encode_) {
- FlushEncoderDone(true);
- }
-
- if (save_to_file_) {
- if (IsVP8(test_stream_->requested_profile) ||
- IsVP9(test_stream_->requested_profile))
- WriteIvfFrameHeader(num_encoded_frames_ - 1,
- metadata.payload_size_bytes);
-
- EXPECT_TRUE(base::AppendToFile(
- base::FilePath::FromUTF8Unsafe(test_stream_->out_filename),
- static_cast<char*>(mapping.memory()),
- base::checked_cast<int>(metadata.payload_size_bytes)));
- }
- }
-
- FeedEncoderWithOutput(shm);
-}
-
-void VEAClient::UpdateBitstreamBufferReadyTimeout(int32_t bitstream_buffer_id) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DVLOGF(4);
-
- buffer_ready_timeout_.Reset(
- base::BindRepeating(&VEAClient::BitstreamBufferReadyTimeout,
- base::Unretained(this), bitstream_buffer_id));
- base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
- FROM_HERE, buffer_ready_timeout_.callback(),
- base::TimeDelta::FromMilliseconds(kBitstreamBufferReadyTimeoutMs));
-}
-
-void VEAClient::SetState(ClientState new_state) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- DVLOG(4) << "Changing state " << state_ << "->" << new_state;
- note_->Notify(new_state);
- state_ = new_state;
-}
-
-void VEAClient::SetStreamParameters(unsigned int bitrate,
- unsigned int framerate) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- current_requested_bitrate_ = bitrate;
- current_framerate_ = framerate;
- LOG_ASSERT(current_requested_bitrate_ > 0UL);
- LOG_ASSERT(current_framerate_ > 0UL);
- encoder_->RequestEncodingParametersChange(bitrate, framerate);
- DVLOG(1) << "Switched parameters to " << current_requested_bitrate_
- << " bps @ " << current_framerate_ << " FPS";
-}
-
-void VEAClient::InputNoLongerNeededCallback(int32_t input_id) {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- std::set<int32_t>::iterator it = inputs_at_client_.find(input_id);
- ASSERT_NE(it, inputs_at_client_.end());
- inputs_at_client_.erase(it);
- if (!g_env->run_at_fps())
- FeedEncoderWithOneInput();
-}
-
-scoped_refptr<VideoFrame> VEAClient::CreateFrame(off_t position) {
- DCHECK(thread_checker_.CalledOnValidThread());
- CHECK_GT(current_framerate_, 0U);
-
- size_t num_planes = VideoFrame::NumPlanes(test_stream_->pixel_format);
- CHECK_LE(num_planes, 3u);
-
- uint8_t* frame_data[3] = {};
- std::vector<ColorPlaneLayout> planes(num_planes);
- size_t offset = position;
- // All the planes are stored in the same buffer, aligned_in_file_data[0].
- for (size_t i = 0; i < num_planes; i++) {
- frame_data[i] =
- reinterpret_cast<uint8_t*>(&test_stream_->aligned_in_file_data[0]) +
- offset;
- planes[i].stride = VideoFrame::RowBytes(i, test_stream_->pixel_format,
- input_coded_size_.width());
- planes[i].offset = offset;
- planes[i].size = test_stream_->aligned_plane_size[i];
- offset += test_stream_->aligned_plane_size[i];
- }
-
- auto layout = VideoFrameLayout::CreateWithPlanes(
- test_stream_->pixel_format, input_coded_size_, std::move(planes));
- if (!layout) {
- LOG(ERROR) << "Failed to create VideoFrameLayout";
- return nullptr;
- }
-
- scoped_refptr<VideoFrame> video_frame =
- VideoFrame::WrapExternalYuvDataWithLayout(
- *layout, gfx::Rect(test_stream_->visible_size),
- /*natural_size=*/encoded_visible_size_, frame_data[0], frame_data[1],
- frame_data[2],
- // Timestamp needs to avoid starting from 0.
- base::TimeDelta().FromMilliseconds(
- (next_input_id_ + 1) * base::Time::kMillisecondsPerSecond /
- current_framerate_));
-#if defined(OS_LINUX) || defined(OS_CHROMEOS)
- if (video_frame) {
- if (g_native_input) {
- video_frame = test::CloneVideoFrame(
- gpu_memory_buffer_factory_.get(), video_frame.get(),
- video_frame->layout(), VideoFrame::STORAGE_GPU_MEMORY_BUFFER,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE);
- } else {
- // We want MOJO_SHARED_BUFFER memory for the Chrome OS VEA if it needs to
- // use the image processor.
- video_frame =
- MojoSharedBufferVideoFrame::CreateFromYUVFrame(*video_frame);
- }
- }
-#else
- if (g_native_input)
- video_frame = nullptr;
-#endif
-
- EXPECT_NE(nullptr, video_frame.get());
- return video_frame;
-}
-
-scoped_refptr<VideoFrame> VEAClient::PrepareInputFrame(off_t position,
- int32_t* input_id) {
- DCHECK(thread_checker_.CalledOnValidThread());
- CHECK_LE(position + test_stream_->aligned_buffer_size,
- test_stream_->aligned_in_file_data.size());
-
- scoped_refptr<VideoFrame> frame = CreateFrame(position);
- EXPECT_TRUE(frame);
- frame->AddDestructionObserver(BindToCurrentLoop(
- base::BindOnce(&VEAClient::InputNoLongerNeededCallback,
- base::Unretained(this), next_input_id_)));
-
- LOG_ASSERT(inputs_at_client_.insert(next_input_id_).second);
-
- *input_id = next_input_id_++;
- return frame;
-}
-
-void VEAClient::OnInputTimer() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- if (!has_encoder() || state_ != CS_ENCODING)
- input_timer_.reset();
- else if (inputs_at_client_.size() <
- num_required_input_buffers_ + kNumExtraInputFrames)
- FeedEncoderWithOneInput();
- else
- DVLOG(1) << "Dropping input frame";
-}
-
-void VEAClient::FeedEncoderWithOneInput() {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!has_encoder() || state_ != CS_ENCODING ||
- num_frames_submitted_to_encoder_ == num_frames_to_encode_) {
- return;
- }
-
- size_t bytes_left =
- test_stream_->aligned_in_file_data.size() - pos_in_input_stream_;
- if (bytes_left < test_stream_->aligned_buffer_size) {
- DCHECK_EQ(bytes_left, 0UL);
- // Rewind if at the end of stream and we are still encoding.
- // This is to flush the encoder with additional frames from the beginning
- // of the stream, or if the stream is shorter that the number of frames
- // we require for bitrate tests.
- pos_in_input_stream_ = 0;
- }
-
- if (quality_validator_)
- quality_validator_->AddOriginalFrame(CreateFrame(pos_in_input_stream_));
-
- int32_t input_id;
- scoped_refptr<VideoFrame> video_frame =
- PrepareInputFrame(pos_in_input_stream_, &input_id);
- frame_timestamps_.push(video_frame->timestamp());
- pos_in_input_stream_ += static_cast<off_t>(test_stream_->aligned_buffer_size);
-
- if (input_id == 0) {
- first_frame_start_time_ = base::TimeTicks::Now();
- }
-
- if (g_env->needs_encode_latency()) {
- LOG_ASSERT(input_id == static_cast<int32_t>(encode_start_time_.size()));
- encode_start_time_.push_back(base::TimeTicks::Now());
- }
-
- bool force_keyframe = (keyframe_period_ && input_id % keyframe_period_ == 0);
- if (force_keyframe) {
- // Because we increase |num_frames_submitted_to_encoder_| after calling
- // Encode(), the value here is actually 0-based frame index.
- keyframe_indices_.push(num_frames_submitted_to_encoder_);
- }
- encoder_->Encode(video_frame, force_keyframe);
- ++num_frames_submitted_to_encoder_;
-
- // If scaling was requested, we don't need to flush: that's because the only
- // use case for Flush() is ARC++ and pixel format conversion and/or scaling
- // are not used.
- if (!requested_scaling() &&
- num_frames_submitted_to_encoder_ == num_frames_to_encode_) {
- FlushEncoder();
- }
-}
-
-void VEAClient::FeedEncoderWithOutput(base::UnsafeSharedMemoryRegion* shm) {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!has_encoder())
- return;
-
- if (state_ != CS_ENCODING && state_ != CS_FLUSHING)
- return;
-
- BitstreamBuffer bitstream_buffer(
- next_output_buffer_id_++,
- base::UnsafeSharedMemoryRegion::TakeHandleForSerialization(
- shm->Duplicate()),
- output_buffer_size_);
- LOG_ASSERT(output_buffers_at_client_
- .insert(std::make_pair(bitstream_buffer.id(), shm))
- .second);
-
- encoder_->UseOutputBitstreamBuffer(std::move(bitstream_buffer));
-}
-
-bool VEAClient::HandleEncodedFrame(bool keyframe,
- const gfx::Size& visible_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- // This would be a bug in the test, which should not ignore false
- // return value from this method.
- LOG_ASSERT(num_encoded_frames_ <= num_frames_to_encode_);
-
- last_frame_ready_time_ = base::TimeTicks::Now();
-
- if (g_env->needs_encode_latency()) {
- LOG_ASSERT(num_encoded_frames_ < encode_start_time_.size());
- base::TimeTicks start_time = encode_start_time_[num_encoded_frames_];
- LOG_ASSERT(!start_time.is_null());
- encode_latencies_.push_back(last_frame_ready_time_ - start_time);
- }
-
- ++num_encoded_frames_;
- ++num_frames_since_last_check_;
-
- // Because the keyframe behavior requirements are loose, we give
- // the encoder more freedom here. It could either deliver a keyframe
- // immediately after we requested it, which could be for a frame number
- // before the one we requested it for (if the keyframe request
- // is asynchronous, i.e. not bound to any concrete frame, and because
- // the pipeline can be deeper than one frame), at that frame, or after.
- // So the only constraints we put here is that we get a keyframe not
- // earlier than we requested one (in time), and not later than
- // kMaxKeyframeDelay frames after the frame, for which we requested
- // it, comes back encoded.
- if (!keyframe_indices_.empty()) {
- // Convert to 0-based index for encoded frame.
- const unsigned int frame_index = num_encoded_frames_ - 1;
- if (keyframe) {
- EXPECT_LE(frame_index, keyframe_indices_.front() + kMaxKeyframeDelay);
- keyframe_indices_.pop();
- } else {
- EXPECT_LT(frame_index, keyframe_indices_.front() + kMaxKeyframeDelay);
- }
- }
-
- EXPECT_EQ(encoded_visible_size_, visible_size);
-
- if (num_encoded_frames_ == num_frames_to_encode_ / 2) {
- VerifyStreamProperties();
- if (requested_subsequent_bitrate_ != current_requested_bitrate_ ||
- requested_subsequent_framerate_ != current_framerate_) {
- SetStreamParameters(requested_subsequent_bitrate_,
- requested_subsequent_framerate_);
- if (g_env->run_at_fps() && input_timer_)
- input_timer_->Start(
- FROM_HERE, base::TimeDelta::FromSeconds(1) / current_framerate_,
- base::BindRepeating(&VEAClient::OnInputTimer,
- base::Unretained(this)));
- }
- } else if (num_encoded_frames_ == num_frames_to_encode_) {
- LogPerf();
- VerifyStreamProperties();
- // We might receive the last frame before calling Flush(). In this case we
- // set the state to CS_FLUSHING first to bypass the state transition check.
- if (state_ == CS_ENCODING)
- SetState(CS_FLUSHING);
- SetState(CS_FINISHED);
- if (verify_output_timestamp_) {
- // There may be some timestamps left because we push extra frames to flush
- // encoder.
- EXPECT_LE(frame_timestamps_.size(),
- static_cast<size_t>(next_input_id_ - num_frames_to_encode_));
- }
- return false;
- }
-
- return true;
-}
-
-void VEAClient::LogPerf() {
- DCHECK(thread_checker_.CalledOnValidThread());
- g_env->LogToFile("Measured encoder FPS",
- base::StringPrintf("%.3f", frames_per_second()));
-
- // Log encode latencies.
- if (g_env->needs_encode_latency()) {
- std::sort(encode_latencies_.begin(), encode_latencies_.end());
- for (const auto& percentile : kLoggedLatencyPercentiles) {
- base::TimeDelta latency = Percentile(encode_latencies_, percentile);
- g_env->LogToFile(
- base::StringPrintf("Encode latency for the %dth percentile",
- percentile),
- base::StringPrintf("%" PRId64 " us", latency.InMicroseconds()));
- }
- }
-}
-
-void VEAClient::FlushEncoder() {
- // In order to guarantee the order between encoder.Encode() and
- // encoder.Flush(), this method should be called from the same thread as
- // encoder.Encode().
- DCHECK(thread_checker_.CalledOnValidThread());
- LOG_ASSERT(num_frames_submitted_to_encoder_ == num_frames_to_encode_);
-
- if (!encoder_->IsFlushSupported())
- return;
-
- encoder_->Flush(
- base::BindOnce(&VEAClient::FlushEncoderDone, base::Unretained(this)));
- // We might receive the last frame before calling Flush(). In this case we set
- // the state to CS_FLUSHING when receiving the last frame.
- if (state_ != CS_FINISHED)
- SetState(CS_FLUSHING);
-}
-
-void VEAClient::FlushEncoderDone(bool success) {
- DCHECK(thread_checker_.CalledOnValidThread());
- DVLOGF(3);
- LOG_ASSERT(num_frames_submitted_to_encoder_ == num_frames_to_encode_);
-
- // Stop the timeout callback.
- buffer_ready_timeout_.Cancel();
-
- if (!success || num_encoded_frames_ != num_frames_to_encode_) {
- SetState(CS_ERROR);
- return;
- }
- FlushEncoderSuccessfully();
-}
-
-void VEAClient::FlushEncoderSuccessfully() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- SetState(CS_FLUSHED);
- if (!quality_validator_) {
- SetState(CS_VALIDATED);
- } else {
- // Insert EOS buffer to flush the decoder.
- quality_validator_->Flush();
- }
-}
-
-void VEAClient::BitstreamBufferReadyTimeout(int32_t bitstream_buffer_id) {
- DCHECK(thread_checker_.CalledOnValidThread());
- LOG(ERROR) << "Timeout getting next bitstream after BitstreamBufferReady("
- << bitstream_buffer_id << ").";
- SetState(CS_ERROR);
-}
-
-void VEAClient::VerifyStreamProperties() {
- DCHECK(thread_checker_.CalledOnValidThread());
- LOG_ASSERT(num_frames_since_last_check_ > 0UL);
- LOG_ASSERT(encoded_stream_size_since_last_check_ > 0UL);
- unsigned int bitrate = static_cast<unsigned int>(
- encoded_stream_size_since_last_check_ * 8 * current_framerate_ /
- num_frames_since_last_check_);
- DVLOG(1) << "Current chunk's bitrate: " << bitrate
- << " (expected: " << current_requested_bitrate_ << " @ "
- << current_framerate_ << " FPS,"
- << " num frames in chunk: " << num_frames_since_last_check_;
-
- num_frames_since_last_check_ = 0;
- encoded_stream_size_since_last_check_ = 0;
-
- if (force_bitrate_) {
- EXPECT_NEAR(bitrate, current_requested_bitrate_,
- kBitrateTolerance * current_requested_bitrate_);
- }
-}
-
-void VEAClient::WriteIvfFileHeader(uint32_t fourcc) {
- DCHECK(thread_checker_.CalledOnValidThread());
- IvfFileHeader header = {};
- memcpy(header.signature, kIvfHeaderSignature, sizeof(header.signature));
- header.version = 0;
- header.header_size = sizeof(header);
- header.fourcc = fourcc; // VP80 or VP90
- header.width = base::checked_cast<uint16_t>(encoded_visible_size_.width());
- header.height = base::checked_cast<uint16_t>(encoded_visible_size_.height());
- header.timebase_denum = requested_framerate_;
- header.timebase_num = 1;
- header.num_frames = num_frames_to_encode_;
- header.ByteSwap();
-
- EXPECT_TRUE(base::AppendToFile(
- base::FilePath::FromUTF8Unsafe(test_stream_->out_filename),
- reinterpret_cast<char*>(&header), sizeof(header)));
-}
-
-void VEAClient::WriteIvfFrameHeader(int frame_index, size_t frame_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- IvfFrameHeader header = {};
-
- header.frame_size = static_cast<uint32_t>(frame_size);
- header.timestamp = frame_index;
- header.ByteSwap();
- EXPECT_TRUE(base::AppendToFile(
- base::FilePath::FromUTF8Unsafe(test_stream_->out_filename),
- reinterpret_cast<char*>(&header), sizeof(header)));
-}
-
-// Base class for simple VEA Clients
-class SimpleVEAClientBase : public VEAClientBase {
- public:
- void CreateEncoder();
- void DestroyEncoder();
-
- // VideoDecodeAccelerator::Client implementation.
- void RequireBitstreamBuffers(unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_buffer_size) override;
-
- protected:
- SimpleVEAClientBase(media::test::ClientStateNotification<ClientState>* note,
- const int width,
- const int height);
-
- void SetState(ClientState new_state) override;
-
- // Provide the encoder with a new output buffer.
- void FeedEncoderWithOutput(base::UnsafeSharedMemoryRegion* shm,
- size_t output_size);
-
- const int width_;
- const int height_;
- const int bitrate_;
- const int fps_;
-};
-
-SimpleVEAClientBase::SimpleVEAClientBase(
- media::test::ClientStateNotification<ClientState>* note,
- const int width,
- const int height)
- : VEAClientBase(note),
- width_(width),
- height_(height),
- bitrate_(200000),
- fps_(30) {
- thread_checker_.DetachFromThread();
-}
-
-void SimpleVEAClientBase::CreateEncoder() {
- DCHECK(thread_checker_.CalledOnValidThread());
- LOG_ASSERT(!has_encoder());
- LOG_ASSERT(g_env->test_streams_.size());
-
- gfx::Size visible_size(width_, height_);
- const VideoEncodeAccelerator::Config config(
- g_env->test_streams_[0]->pixel_format, visible_size,
- g_env->test_streams_[0]->requested_profile, bitrate_, fps_);
- encoder_ = CreateVideoEncodeAccelerator(config, this, gpu::GpuPreferences(),
- gpu::GpuDriverBugWorkarounds());
- if (!encoder_) {
- LOG(ERROR) << "Failed creating a VideoEncodeAccelerator.";
- SetState(CS_ERROR);
- return;
- }
- encoder_->RequestEncodingParametersChange(bitrate_, fps_);
- SetState(CS_INITIALIZED);
-}
-
-void SimpleVEAClientBase::DestroyEncoder() {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!has_encoder())
- return;
- // Clear the objects that should be destroyed on the same thread as creation.
- encoder_.reset();
-}
-
-void SimpleVEAClientBase::SetState(ClientState new_state) {
- DVLOG(4) << "Changing state to " << new_state;
- note_->Notify(new_state);
-}
-
-void SimpleVEAClientBase::RequireBitstreamBuffers(
- unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- SetState(CS_ENCODING);
- ASSERT_GT(output_size, 0UL);
-
- for (unsigned int i = 0; i < kNumOutputBuffers; ++i) {
- auto shm = std::make_unique<base::UnsafeSharedMemoryRegion>();
- *shm = base::UnsafeSharedMemoryRegion::Create(output_size);
- LOG_ASSERT(shm->IsValid());
- FeedEncoderWithOutput(shm.get(), output_size);
- output_shms_.push_back(std::move(shm));
- }
-}
-
-void SimpleVEAClientBase::FeedEncoderWithOutput(
- base::UnsafeSharedMemoryRegion* shm,
- size_t output_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!has_encoder())
- return;
-
- BitstreamBuffer bitstream_buffer(
- next_output_buffer_id_++,
- base::UnsafeSharedMemoryRegion::TakeHandleForSerialization(
- shm->Duplicate()),
- output_size);
- encoder_->UseOutputBitstreamBuffer(std::move(bitstream_buffer));
-}
-
-// This client is only used to make sure the encoder does not return an encoded
-// frame before getting any input.
-class VEANoInputClient : public SimpleVEAClientBase {
- public:
- explicit VEANoInputClient(
- media::test::ClientStateNotification<ClientState>* note);
- void DestroyEncoder();
-
- // VideoDecodeAccelerator::Client implementation.
- void RequireBitstreamBuffers(unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_buffer_size) override;
- void BitstreamBufferReady(
- int32_t bitstream_buffer_id,
- const media::BitstreamBufferMetadata& metadata) override;
-
- private:
- // The timer used to monitor the encoder doesn't return an output buffer in
- // a period of time.
- std::unique_ptr<base::OneShotTimer> timer_;
-};
-
-VEANoInputClient::VEANoInputClient(
- media::test::ClientStateNotification<ClientState>* note)
- : SimpleVEAClientBase(note, 320, 240) {}
-
-void VEANoInputClient::DestroyEncoder() {
- SimpleVEAClientBase::DestroyEncoder();
- // Clear the objects that should be destroyed on the same thread as creation.
- timer_.reset();
-}
-
-void VEANoInputClient::RequireBitstreamBuffers(
- unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- SimpleVEAClientBase::RequireBitstreamBuffers(input_count, input_coded_size,
- output_size);
-
- // Timer is used to make sure there is no output frame in 100ms.
- timer_.reset(new base::OneShotTimer());
- timer_->Start(FROM_HERE, base::TimeDelta::FromMilliseconds(100),
- base::BindOnce(&VEANoInputClient::SetState,
- base::Unretained(this), CS_FINISHED));
-}
-
-void VEANoInputClient::BitstreamBufferReady(
- int32_t bitstream_buffer_id,
- const media::BitstreamBufferMetadata& metadata) {
- DCHECK(thread_checker_.CalledOnValidThread());
- SetState(CS_ERROR);
-}
-
-// This client is only used to test input frame with the size of U and V planes
-// unaligned to cache line.
-// To have both width and height divisible by 16 but not 32 will make the size
-// of U/V plane (width * height / 4) unaligned to 128-byte cache line.
-class VEACacheLineUnalignedInputClient : public SimpleVEAClientBase {
- public:
- explicit VEACacheLineUnalignedInputClient(
- media::test::ClientStateNotification<ClientState>* note);
-
- // VideoDecodeAccelerator::Client implementation.
- void RequireBitstreamBuffers(unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_buffer_size) override;
- void BitstreamBufferReady(
- int32_t bitstream_buffer_id,
- const media::BitstreamBufferMetadata& metadata) override;
-
- private:
- // Feed the encoder with one input frame.
- void FeedEncoderWithOneInput(const gfx::Size& input_coded_size);
-};
-
-VEACacheLineUnalignedInputClient::VEACacheLineUnalignedInputClient(
- media::test::ClientStateNotification<ClientState>* note)
- : SimpleVEAClientBase(note, 368, 368) {
-} // 368 is divisible by 16 but not 32
-
-void VEACacheLineUnalignedInputClient::RequireBitstreamBuffers(
- unsigned int input_count,
- const gfx::Size& input_coded_size,
- size_t output_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- SimpleVEAClientBase::RequireBitstreamBuffers(input_count, input_coded_size,
- output_size);
-
- FeedEncoderWithOneInput(input_coded_size);
-}
-
-void VEACacheLineUnalignedInputClient::BitstreamBufferReady(
- int32_t bitstream_buffer_id,
- const media::BitstreamBufferMetadata& metadata) {
- DCHECK(thread_checker_.CalledOnValidThread());
- // It's enough to encode just one frame. If plane size is not aligned,
- // VideoEncodeAccelerator::Encode will fail.
- SetState(CS_FINISHED);
-}
-
-void VEACacheLineUnalignedInputClient::FeedEncoderWithOneInput(
- const gfx::Size& input_coded_size) {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (!has_encoder())
- return;
-
- const VideoPixelFormat pixel_format = g_env->test_streams_[0]->pixel_format;
- size_t num_planes = VideoFrame::NumPlanes(pixel_format);
- CHECK_LE(num_planes, 3u);
- std::vector<ColorPlaneLayout> planes(num_planes);
- size_t offset = 0;
- for (size_t i = 0; i < num_planes; i++) {
- size_t plane_size = base::bits::Align(
- VideoFrame::PlaneSize(pixel_format, i, input_coded_size).GetArea(),
- test::kPlatformBufferAlignment);
-
- planes[i].stride =
- VideoFrame::RowBytes(i, pixel_format, input_coded_size.width());
- planes[i].offset = offset;
- planes[i].size = plane_size;
- offset += plane_size;
- }
- auto layout = VideoFrameLayout::CreateWithPlanes(
- pixel_format, input_coded_size, std::move(planes),
- test::kPlatformBufferAlignment);
- ASSERT_TRUE(layout);
- scoped_refptr<VideoFrame> video_frame = VideoFrame::CreateFrameWithLayout(
- *layout, gfx::Rect(input_coded_size), input_coded_size,
- base::TimeDelta().FromMilliseconds(base::Time::kMillisecondsPerSecond /
- fps_),
- true);
- // We want MOJO_SHARED_BUFFER memory for the Chrome OS VEA if it needs to
- // use the image processor.
- video_frame = MojoSharedBufferVideoFrame::CreateFromYUVFrame(*video_frame);
-
- encoder_->Encode(video_frame, false);
-}
-
-// Test parameters:
-// - Number of concurrent encoders. The value takes effect when there is only
-// one input stream; otherwise, one encoder per input stream will be
-// instantiated.
-// - If true, save output to file (provided an output filename was supplied).
-// - Force a keyframe every n frames.
-// - Force bitrate; the actual required value is provided as a property
-// of the input stream, because it depends on stream type/resolution/etc.
-// - If true, switch bitrate mid-stream.
-// - If true, switch framerate mid-stream.
-// - If true, verify the output frames of encoder.
-// - If true, verify the timestamps of output frames.
-// - If true, verify the output level is as provided in input stream. Only
-// available for H264 encoder for now.
-// - If true, request that the encoder scales the input stream to 50% of the
-// original size prior to encoding. This is only applicable when
-// |g_native_input| is true. Otherwise, the test is skipped. This is because
-// the intention is to exercise the image processor path inside the decoder,
-// and in non-native input mode, the scaling is done by the client instead of
-// the encoder (and we're not interested in testing that). This is also
-// skipped if any of the test streams have a visible size smaller than
-// |kMinVisibleSizeForScalingTest|.
-class VideoEncodeAcceleratorTest
- : public ::testing::TestWithParam<
- std::
- tuple<int, bool, int, bool, bool, bool, bool, bool, bool, bool>> {
- public:
- void SetUp() override {
- const bool scale = std::get<9>(GetParam());
- if (scale) {
- if (!g_native_input) {
- GTEST_SKIP() << "Test skipped because scaling should only occur when "
- "using native input";
- }
- for (const auto& test_stream : g_env->test_streams_) {
- if (!gfx::Rect(test_stream->visible_size)
- .Contains(gfx::Rect(kMinVisibleSizeForScalingTest))) {
- GTEST_SKIP() << "Test skipped because the resolution of one of the "
- "input streams is below the minimum "
- << kMinVisibleSizeForScalingTest.ToString();
- }
- }
- }
- }
-};
-
-TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) {
- // Workaround: TestSuite::Initialize() overwrites specified features.
- // Re-enable our required features here so that they are enabled in encoding.
- auto scoped_feature_list = CreateScopedFeatureList();
-
- size_t num_concurrent_encoders = std::get<0>(GetParam());
- const bool save_to_file = std::get<1>(GetParam());
- const unsigned int keyframe_period = std::get<2>(GetParam());
- const bool force_bitrate = std::get<3>(GetParam());
- const bool mid_stream_bitrate_switch = std::get<4>(GetParam());
- const bool mid_stream_framerate_switch = std::get<5>(GetParam());
- const bool verify_output =
- std::get<6>(GetParam()) || g_env->verify_all_output();
- const bool verify_output_timestamp = std::get<7>(GetParam());
- const bool force_level = std::get<8>(GetParam());
- const bool scale = std::get<9>(GetParam());
-
-#if BUILDFLAG(IS_ASH)
- if (ShouldSkipTest(g_env->test_streams_[0]->pixel_format))
- GTEST_SKIP();
-#endif // BUILDFLAG(IS_ASH)
-
- if (force_level) {
- // Skip ForceLevel test if "--force_level=false".
- if (!g_force_level) {
- LOG(WARNING) << "ForceLevel test is disabled.";
- return;
- }
-
- // Skip ForceLevel test for non-H264 test stream.
- for (auto it = g_env->test_streams_.begin();
- it != g_env->test_streams_.end();) {
- if (!IsH264((*it)->requested_profile) || !(*it)->requested_level) {
- LOG(WARNING) << "Skip ForceLevel for stream: " << (*it)->in_filename
- << " (Non-H264 codec or level is not assigned).";
- it = g_env->test_streams_.erase(it);
- } else {
- ASSERT_TRUE(CheckH264InitConfigValidity(it->get()));
- ++it;
- }
- }
- if (g_env->test_streams_.empty()) {
- LOG(WARNING) << "ForceLevel test is totally skipped.";
- return;
- }
- }
-
- std::vector<
- std::unique_ptr<media::test::ClientStateNotification<ClientState>>>
- notes;
- std::vector<std::unique_ptr<VEAClient>> clients;
-
- if (g_env->test_streams_.size() > 1)
- num_concurrent_encoders = g_env->test_streams_.size();
-
- // Create all encoders.
- for (size_t i = 0; i < num_concurrent_encoders; i++) {
- size_t test_stream_index = i % g_env->test_streams_.size();
- // Disregard save_to_file if we didn't get an output filename.
- bool encoder_save_to_file =
- (save_to_file &&
- !g_env->test_streams_[test_stream_index]->out_filename.empty());
-
- notes.push_back(
- std::make_unique<media::test::ClientStateNotification<ClientState>>());
- clients.push_back(std::make_unique<VEAClient>(
- g_env->test_streams_[test_stream_index].get(), notes.back().get(),
- encoder_save_to_file, keyframe_period, force_bitrate,
- mid_stream_bitrate_switch, mid_stream_framerate_switch, verify_output,
- verify_output_timestamp, force_level, scale));
-
- g_env->GetRenderingTaskRunner()->PostTask(
- FROM_HERE, base::BindOnce(&VEAClient::CreateEncoder,
- base::Unretained(clients.back().get())));
- }
-
- // All encoders must pass through states in this order.
- enum ClientState state_transitions[] = {CS_INITIALIZED, CS_ENCODING,
- CS_FLUSHING, CS_FINISHED,
- CS_FLUSHED, CS_VALIDATED};
-
- // Wait for all encoders to go through all states and finish.
- // Do this by waiting for all encoders to advance to state n before checking
- // state n+1, to verify that they are able to operate concurrently.
- // It also simulates the real-world usage better, as the main thread, on which
- // encoders are created/destroyed, is a single GPU Process ChildThread.
- // Moreover, we can't have proper multithreading on X11, so this could cause
- // hard to debug issues there, if there were multiple "ChildThreads".
- for (const auto& state : state_transitions) {
- for (size_t i = 0; i < num_concurrent_encoders && !HasFailure(); i++) {
- EXPECT_EQ(state, notes[i]->Wait());
- }
- if (HasFailure()) {
- break;
- }
- }
-
- for (size_t i = 0; i < num_concurrent_encoders; ++i) {
- g_env->GetRenderingTaskRunner()->PostTask(
- FROM_HERE, base::BindOnce(&VEAClient::DestroyEncoder,
- base::Unretained(clients[i].get())));
- }
-
- g_env->FlushRenderingThread();
-}
-
-// Test parameters:
-// - Test type
-// 0: No input test
-// 1: Cache line-unaligned test
-class VideoEncodeAcceleratorSimpleTest : public ::testing::TestWithParam<int> {
-};
-
-template <class TestClient>
-void SimpleTestFunc() {
- std::unique_ptr<media::test::ClientStateNotification<ClientState>> note(
- new media::test::ClientStateNotification<ClientState>());
- std::unique_ptr<TestClient> client(new TestClient(note.get()));
- base::Thread vea_client_thread("EncoderClientThread");
- ASSERT_TRUE(vea_client_thread.Start());
-
- vea_client_thread.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&TestClient::CreateEncoder,
- base::Unretained(client.get())));
-
- // Encoder must pass through states in this order.
- enum ClientState state_transitions[] = {CS_INITIALIZED, CS_ENCODING,
- CS_FINISHED};
-
- for (const auto& state : state_transitions) {
- EXPECT_EQ(state, note->Wait());
- if (testing::Test::HasFailure()) {
- break;
- }
- }
-
- vea_client_thread.task_runner()->PostTask(
- FROM_HERE, base::BindOnce(&TestClient::DestroyEncoder,
- base::Unretained(client.get())));
-
- // This ensures all tasks have finished.
- vea_client_thread.Stop();
-}
-
-TEST_P(VideoEncodeAcceleratorSimpleTest, TestSimpleEncode) {
- // Workaround: TestSuite::Initialize() overwrites specified features.
- // Re-enable our required features here so that they are enabled in encoding.
- auto scoped_feature_list = CreateScopedFeatureList();
-
- const int test_type = GetParam();
- ASSERT_LT(test_type, 2) << "Invalid test type=" << test_type;
-
-#if BUILDFLAG(IS_ASH)
- if (ShouldSkipTest(g_env->test_streams_[0]->pixel_format))
- GTEST_SKIP();
-#endif // BUILDFLAG(IS_ASH)
-
- if (test_type == 0)
- SimpleTestFunc<VEANoInputClient>();
- else if (test_type == 1)
- SimpleTestFunc<VEACacheLineUnalignedInputClient>();
-}
-
-#if defined(OS_CHROMEOS) || defined(OS_LINUX)
-// TODO(kcwu): add back test of verify_output=true after
-// https://crbug.com/694131 fixed.
-INSTANTIATE_TEST_SUITE_P(SimpleEncode,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- true,
- 0,
- false,
- false,
- false,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(SimpleEncodeWithScaling,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- true,
- 0,
- false,
- false,
- false,
- false,
- false,
- false,
- true)));
-
-INSTANTIATE_TEST_SUITE_P(EncoderPerf,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- false,
- false,
- false,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(ForceKeyframes,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 10,
- false,
- false,
- false,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(ForceBitrate,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- true,
- false,
- false,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(MidStreamParamSwitchBitrate,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- true,
- true,
- false,
- false,
- false,
- false,
- false)));
-
-// TODO(kcwu): add back bitrate test after https://crbug.com/693336 fixed.
-INSTANTIATE_TEST_SUITE_P(DISABLED_MidStreamParamSwitchFPS,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- true,
- false,
- true,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(3,
- false,
- 0,
- false,
- false,
- false,
- false,
- false,
- false,
- false),
- std::make_tuple(3,
- false,
- 0,
- true,
- true,
- false,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- false,
- false,
- false,
- false,
- true,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(ForceLevel,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- false,
- false,
- false,
- false,
- false,
- true,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(NoInputTest,
- VideoEncodeAcceleratorSimpleTest,
- ::testing::Values(0));
-
-INSTANTIATE_TEST_SUITE_P(CacheLineUnalignedInputTest,
- VideoEncodeAcceleratorSimpleTest,
- ::testing::Values(1));
-
-#elif defined(OS_MAC) || defined(OS_WIN)
-INSTANTIATE_TEST_SUITE_P(SimpleEncode,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- true,
- 0,
- false,
- false,
- false,
- false,
- false,
- false,
- false),
- std::make_tuple(1,
- true,
- 0,
- false,
- false,
- false,
- true,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(EncoderPerf,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- false,
- false,
- false,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(3,
- false,
- 0,
- false,
- false,
- false,
- false,
- false,
- false,
- false)));
-
-INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- false,
- false,
- false,
- false,
- true,
- false,
- false)));
-
-#if defined(OS_WIN)
-INSTANTIATE_TEST_SUITE_P(ForceBitrate,
- VideoEncodeAcceleratorTest,
- ::testing::Values(std::make_tuple(1,
- false,
- 0,
- true,
- false,
- false,
- false,
- false,
- false,
- false)));
-#endif // defined(OS_WIN)
-
-#endif // defined(OS_CHROMEOS) || defined(OS_LINUX)
-
-// TODO(posciak): more tests:
-// - async FeedEncoderWithOutput
-// - out-of-order return of outputs to encoder
-// - multiple encoders + decoders
-// - mid-stream encoder_->Destroy()
-
-class VEATestSuite : public base::TestSuite {
- public:
- VEATestSuite(int argc, char** argv) : base::TestSuite(argc, argv) {}
-
- private:
- void Initialize() override {
- base::TestSuite::Initialize();
-
-#if BUILDFLAG(IS_ASH)
- task_environment_ = std::make_unique<base::test::TaskEnvironment>(
- base::test::TaskEnvironment::MainThreadType::UI);
-#else
- task_environment_ = std::make_unique<base::test::TaskEnvironment>();
-#endif
- media::g_env =
- reinterpret_cast<media::VideoEncodeAcceleratorTestEnvironment*>(
- testing::AddGlobalTestEnvironment(
- new media::VideoEncodeAcceleratorTestEnvironment(
- std::move(media::g_test_stream_data), media::g_log_path,
- media::g_frame_stats_path, media::g_run_at_fps,
- media::g_needs_encode_latency,
- media::g_verify_all_output)));
-
-#if BUILDFLAG(USE_VAAPI)
- auto scoped_feature_list = CreateScopedFeatureList();
- media::VaapiWrapper::PreSandboxInitialization();
-#elif defined(OS_WIN)
- media::MediaFoundationVideoEncodeAccelerator::PreSandboxInitialization();
-#endif
- }
-
- void Shutdown() override {
- task_environment_.reset();
- base::TestSuite::Shutdown();
- }
-
- private:
- std::unique_ptr<base::test::TaskEnvironment> task_environment_;
-};
-
-} // namespace
-} // namespace media
-
-int main(int argc, char** argv) {
- mojo::core::Init();
- media::VEATestSuite test_suite(argc, argv);
-
- base::ShadowingAtExitManager at_exit_manager;
-
- // Needed to enable DVLOG through --vmodule.
- logging::LoggingSettings settings;
- settings.logging_dest =
- logging::LOG_TO_SYSTEM_DEBUG_LOG | logging::LOG_TO_STDERR;
- LOG_ASSERT(logging::InitLogging(settings));
-
- const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
- DCHECK(cmd_line);
-
- base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
- for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
- it != switches.end(); ++it) {
- if (it->first == "test_stream_data") {
- media::g_test_stream_data->assign(it->second.c_str());
- continue;
- }
- // Output machine-readable logs with fixed formats to a file.
- if (it->first == "output_log") {
- media::g_log_path = base::FilePath(
- base::FilePath::StringType(it->second.begin(), it->second.end()));
- continue;
- }
- if (it->first == "num_frames_to_encode") {
- std::string input(it->second.begin(), it->second.end());
- LOG_ASSERT(base::StringToInt(input, &media::g_num_frames_to_encode));
- continue;
- }
- if (it->first == "measure_latency") {
- media::g_needs_encode_latency = true;
- continue;
- }
- if (it->first == "fake_encoder") {
- media::g_fake_encoder = true;
- continue;
- }
- if (it->first == "run_at_fps") {
- media::g_run_at_fps = true;
- continue;
- }
- if (it->first == "verify_all_output") {
- media::g_verify_all_output = true;
- continue;
- }
- if (it->first == "force_level") {
- std::string input(it->second.begin(), it->second.end());
- // Only set |g_force_level| to true if input is "true"; false otherwise.
- media::g_force_level = input == "true";
- continue;
- }
-
- if (it->first == "native_input") {
-#if BUILDFLAG(IS_ASH)
- media::g_native_input = true;
-#else
- LOG(FATAL) << "Unsupported option";
-#endif
- continue;
- }
-
- if (it->first == "v" || it->first == "vmodule")
- continue;
- if (it->first == "ozone-platform" || it->first == "ozone-use-surfaceless")
- continue;
-
- // Output per-frame metrics to a csv file.
- if (it->first == "frame_stats") {
- media::g_frame_stats_path = base::FilePath(
- base::FilePath::StringType(it->second.begin(), it->second.end()));
- continue;
- }
- }
-
- if (media::g_needs_encode_latency && !media::g_run_at_fps) {
- // Encode latency can only be measured with --run_at_fps. Otherwise, we get
- // skewed results since it may queue too many frames at once with the same
- // encode start time.
- LOG(FATAL) << "--measure_latency requires --run_at_fps enabled to work.";
- }
-
- return base::LaunchUnitTestsSerially(
- argc, argv,
- base::BindOnce(&media::VEATestSuite::Run, base::Unretained(&test_suite)));
-}
diff --git a/chromium/media/gpu/vp8_decoder.cc b/chromium/media/gpu/vp8_decoder.cc
index 23a88b6696b..f57d5adefe7 100644
--- a/chromium/media/gpu/vp8_decoder.cc
+++ b/chromium/media/gpu/vp8_decoder.cc
@@ -180,6 +180,10 @@ VideoCodecProfile VP8Decoder::GetProfile() const {
return VP8PROFILE_ANY;
}
+uint8_t VP8Decoder::GetBitDepth() const {
+ return 8u;
+}
+
size_t VP8Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return kVP8NumFramesActive + kPicsInPipeline;
diff --git a/chromium/media/gpu/vp8_decoder.h b/chromium/media/gpu/vp8_decoder.h
index 70bf5b5e80c..193972faba4 100644
--- a/chromium/media/gpu/vp8_decoder.h
+++ b/chromium/media/gpu/vp8_decoder.h
@@ -70,6 +70,7 @@ class MEDIA_GPU_EXPORT VP8Decoder : public AcceleratedVideoDecoder {
gfx::Size GetPicSize() const override;
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
+ uint8_t GetBitDepth() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
diff --git a/chromium/media/gpu/vp8_decoder_unittest.cc b/chromium/media/gpu/vp8_decoder_unittest.cc
index c7e9b289522..f04e676a9f2 100644
--- a/chromium/media/gpu/vp8_decoder_unittest.cc
+++ b/chromium/media/gpu/vp8_decoder_unittest.cc
@@ -80,6 +80,7 @@ void VP8DecoderTest::SetUp() {
void VP8DecoderTest::DecodeFirstIFrame() {
ASSERT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode(kNullFrame));
ASSERT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode(kIFrame));
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
EXPECT_EQ(kVideoSize, decoder_->GetPicSize());
EXPECT_LE(kRequiredNumOfPictures, decoder_->GetRequiredNumOfPictures());
}
@@ -119,6 +120,8 @@ AcceleratedVideoDecoder::DecodeResult VP8DecoderTest::Decode(
AcceleratedVideoDecoder::DecodeResult::kRanOutOfStreamData ||
result == AcceleratedVideoDecoder::DecodeResult::kConfigChange ||
result == AcceleratedVideoDecoder::DecodeResult::kDecodeError);
+ if (result != AcceleratedVideoDecoder::DecodeResult::kDecodeError)
+ EXPECT_EQ(8u, decoder_->GetBitDepth());
return result;
}
diff --git a/chromium/media/gpu/vp9_decoder.cc b/chromium/media/gpu/vp9_decoder.cc
index 29cd176fa7d..bbe17de16d1 100644
--- a/chromium/media/gpu/vp9_decoder.cc
+++ b/chromium/media/gpu/vp9_decoder.cc
@@ -20,7 +20,7 @@ namespace media {
namespace {
std::vector<uint32_t> GetSpatialLayerFrameSize(
const DecoderBuffer& decoder_buffer) {
-#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
const uint32_t* cue_data =
reinterpret_cast<const uint32_t*>(decoder_buffer.side_data());
if (!cue_data) {
@@ -37,7 +37,7 @@ std::vector<uint32_t> GetSpatialLayerFrameSize(
return {};
}
return std::vector<uint32_t>(cue_data, cue_data + num_of_layers);
-#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_CHROMEOS_ASH)
return {};
}
@@ -56,6 +56,25 @@ VideoCodecProfile VP9ProfileToVideoCodecProfile(uint8_t profile) {
}
}
+bool IsValidBitDepth(uint8_t bit_depth, VideoCodecProfile profile) {
+ // Spec 7.2.
+ switch (profile) {
+ case VP9PROFILE_PROFILE0:
+ case VP9PROFILE_PROFILE1:
+ return bit_depth == 8u;
+ case VP9PROFILE_PROFILE2:
+ case VP9PROFILE_PROFILE3:
+ return bit_depth == 10u || bit_depth == 12u;
+ default:
+ NOTREACHED();
+ return false;
+ }
+}
+
+bool IsYUV420Sequence(const Vp9FrameHeader& frame_header) {
+ // Spec 7.2.2
+ return frame_header.subsampling_x == 1u && frame_header.subsampling_y == 1u;
+}
} // namespace
VP9Decoder::VP9Accelerator::VP9Accelerator() {}
@@ -101,6 +120,8 @@ bool VP9Decoder::Flush() {
void VP9Decoder::Reset() {
curr_frame_hdr_ = nullptr;
+ decrypt_config_.reset();
+ pending_pic_.reset();
ref_frames_.Clear();
@@ -112,14 +133,25 @@ void VP9Decoder::Reset() {
}
VP9Decoder::DecodeResult VP9Decoder::Decode() {
- while (1) {
+ while (true) {
+ // If we have a pending picture to decode, try that first.
+ if (pending_pic_) {
+ VP9Accelerator::Status status =
+ DecodeAndOutputPicture(std::move(pending_pic_));
+ if (status == VP9Accelerator::Status::kFail) {
+ SetError();
+ return kDecodeError;
+ }
+ if (status == VP9Accelerator::Status::kTryAgain)
+ return kTryAgain;
+ }
+
// Read a new frame header if one is not awaiting decoding already.
- std::unique_ptr<DecryptConfig> decrypt_config;
if (!curr_frame_hdr_) {
gfx::Size allocate_size;
std::unique_ptr<Vp9FrameHeader> hdr(new Vp9FrameHeader());
Vp9Parser::Result res =
- parser_.ParseNextFrame(hdr.get(), &allocate_size, &decrypt_config);
+ parser_.ParseNextFrame(hdr.get(), &allocate_size, &decrypt_config_);
switch (res) {
case Vp9Parser::kOk:
curr_frame_hdr_ = std::move(hdr);
@@ -151,6 +183,7 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
state_ = kDecoding;
} else {
curr_frame_hdr_.reset();
+ decrypt_config_.reset();
continue;
}
}
@@ -183,6 +216,7 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
}
curr_frame_hdr_.reset();
+ decrypt_config_.reset();
continue;
}
@@ -202,11 +236,24 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
VLOG(1) << "Invalid profile: " << curr_frame_hdr_->profile;
return kDecodeError;
}
+ if (!IsValidBitDepth(curr_frame_hdr_->bit_depth, new_profile)) {
+ DVLOG(1) << "Invalid bit depth="
+ << base::strict_cast<int>(curr_frame_hdr_->bit_depth)
+ << ", profile=" << GetProfileName(new_profile);
+ return kDecodeError;
+ }
+ if (!IsYUV420Sequence(*curr_frame_hdr_)) {
+ DVLOG(1) << "Only YUV 4:2:0 is supported";
+ return kDecodeError;
+ }
DCHECK(!new_pic_size.IsEmpty());
- if (new_pic_size != pic_size_ || new_profile != profile_) {
+ if (new_pic_size != pic_size_ || new_profile != profile_ ||
+ curr_frame_hdr_->bit_depth != bit_depth_) {
DVLOG(1) << "New profile: " << GetProfileName(new_profile)
- << ", New resolution: " << new_pic_size.ToString();
+ << ", New resolution: " << new_pic_size.ToString()
+ << ", New bit depth: "
+ << base::strict_cast<int>(curr_frame_hdr_->bit_depth);
if (!curr_frame_hdr_->IsKeyframe() &&
!(curr_frame_hdr_->IsIntra() && pic_size_.IsEmpty())) {
@@ -222,6 +269,7 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
}
curr_frame_hdr_.reset();
+ decrypt_config_.reset();
return kRanOutOfStreamData;
}
@@ -234,6 +282,7 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
pic_size_ = new_pic_size;
visible_rect_ = new_render_rect;
profile_ = new_profile;
+ bit_depth_ = curr_frame_hdr_->bit_depth;
size_change_failure_counter_ = 0;
return kConfigChange;
}
@@ -247,20 +296,23 @@ VP9Decoder::DecodeResult VP9Decoder::Decode() {
pic->set_visible_rect(new_render_rect);
pic->set_bitstream_id(stream_id_);
- pic->set_decrypt_config(std::move(decrypt_config));
+ pic->set_decrypt_config(std::move(decrypt_config_));
// For VP9, container color spaces override video stream color spaces.
- if (container_color_space_.IsSpecified()) {
+ if (container_color_space_.IsSpecified())
pic->set_colorspace(container_color_space_);
- } else if (curr_frame_hdr_) {
+ else if (curr_frame_hdr_)
pic->set_colorspace(curr_frame_hdr_->GetColorSpace());
- }
+
pic->frame_hdr = std::move(curr_frame_hdr_);
- if (!DecodeAndOutputPicture(std::move(pic))) {
+ VP9Accelerator::Status status = DecodeAndOutputPicture(std::move(pic));
+ if (status == VP9Accelerator::Status::kFail) {
SetError();
return kDecodeError;
}
+ if (status == VP9Accelerator::Status::kTryAgain)
+ return kTryAgain;
}
}
@@ -279,7 +331,8 @@ void VP9Decoder::UpdateFrameContext(
std::move(context_refresh_cb).Run(frame_ctx);
}
-bool VP9Decoder::DecodeAndOutputPicture(scoped_refptr<VP9Picture> pic) {
+VP9Decoder::VP9Accelerator::Status VP9Decoder::DecodeAndOutputPicture(
+ scoped_refptr<VP9Picture> pic) {
DCHECK(!pic_size_.IsEmpty());
DCHECK(pic->frame_hdr);
@@ -293,20 +346,22 @@ bool VP9Decoder::DecodeAndOutputPicture(scoped_refptr<VP9Picture> pic) {
}
const Vp9Parser::Context& context = parser_.context();
- if (!accelerator_->SubmitDecode(pic, context.segmentation(),
- context.loop_filter(), ref_frames_,
- std::move(done_cb))) {
- return false;
+ VP9Accelerator::Status status = accelerator_->SubmitDecode(
+ pic, context.segmentation(), context.loop_filter(), ref_frames_,
+ std::move(done_cb));
+ if (status != VP9Accelerator::Status::kOk) {
+ if (status == VP9Accelerator::Status::kTryAgain)
+ pending_pic_ = std::move(pic);
+ return status;
}
if (pic->frame_hdr->show_frame) {
- if (!accelerator_->OutputPicture(pic)) {
- return false;
- }
+ if (!accelerator_->OutputPicture(pic))
+ return VP9Accelerator::Status::kFail;
}
ref_frames_.Refresh(std::move(pic));
- return true;
+ return status;
}
void VP9Decoder::SetError() {
@@ -326,6 +381,10 @@ VideoCodecProfile VP9Decoder::GetProfile() const {
return profile_;
}
+uint8_t VP9Decoder::GetBitDepth() const {
+ return bit_depth_;
+}
+
size_t VP9Decoder::GetRequiredNumOfPictures() const {
constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
return kPicsInPipeline + GetNumReferenceFrames();
diff --git a/chromium/media/gpu/vp9_decoder.h b/chromium/media/gpu/vp9_decoder.h
index 63f3ef28b30..2698e9893c7 100644
--- a/chromium/media/gpu/vp9_decoder.h
+++ b/chromium/media/gpu/vp9_decoder.h
@@ -33,6 +33,24 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
public:
class MEDIA_GPU_EXPORT VP9Accelerator {
public:
+ // Methods may return kTryAgain if they need additional data (provided
+ // independently) in order to proceed. Examples are things like not having
+ // an appropriate key to decode encrypted content. This is not considered an
+ // unrecoverable error, but rather a pause to allow an application to
+ // independently provide the required data. When VP9Decoder::Decode()
+ // is called again, it will attempt to resume processing of the stream
+ // by calling the same method again.
+ enum class Status {
+ // Operation completed successfully.
+ kOk,
+
+ // Operation failed.
+ kFail,
+
+ // Operation failed because some external data is missing. Retry the same
+ // operation later, once the data has been provided.
+ kTryAgain,
+ };
VP9Accelerator();
virtual ~VP9Accelerator();
@@ -61,11 +79,11 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
// |lf_params| does not need to remain valid after this method returns.
//
// Return true when successful, false otherwise.
- virtual bool SubmitDecode(scoped_refptr<VP9Picture> pic,
- const Vp9SegmentationParams& segm_params,
- const Vp9LoopFilterParams& lf_params,
- const Vp9ReferenceFrameVector& reference_frames,
- const base::OnceClosure done_cb) = 0;
+ virtual Status SubmitDecode(scoped_refptr<VP9Picture> pic,
+ const Vp9SegmentationParams& segm_params,
+ const Vp9LoopFilterParams& lf_params,
+ const Vp9ReferenceFrameVector& reference_frames,
+ const base::OnceClosure done_cb) = 0;
// Schedule output (display) of |pic|.
//
@@ -107,13 +125,15 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
gfx::Size GetPicSize() const override;
gfx::Rect GetVisibleRect() const override;
VideoCodecProfile GetProfile() const override;
+ uint8_t GetBitDepth() const override;
size_t GetRequiredNumOfPictures() const override;
size_t GetNumReferenceFrames() const override;
private:
// Decode and possibly output |pic| (if the picture is to be shown).
- // Return true on success, false otherwise.
- bool DecodeAndOutputPicture(scoped_refptr<VP9Picture> pic);
+ // Return kOk on success, kTryAgain if this should be attempted again on the
+ // next Decode call, and kFail otherwise.
+ VP9Accelerator::Status DecodeAndOutputPicture(scoped_refptr<VP9Picture> pic);
// Get frame context state after decoding |pic| from the accelerator, and call
// |context_refresh_cb| with the acquired state.
@@ -137,8 +157,10 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
// Current stream buffer id; to be assigned to pictures decoded from it.
int32_t stream_id_ = -1;
- // Current frame header to be used in decoding the next picture.
+ // Current frame header and decrypt config to be used in decoding the next
+ // picture.
std::unique_ptr<Vp9FrameHeader> curr_frame_hdr_;
+ std::unique_ptr<DecryptConfig> decrypt_config_;
// Current frame size that is necessary to decode |curr_frame_hdr_|.
gfx::Size curr_frame_size_;
@@ -154,6 +176,11 @@ class MEDIA_GPU_EXPORT VP9Decoder : public AcceleratedVideoDecoder {
gfx::Rect visible_rect_;
// Profile of input bitstream.
VideoCodecProfile profile_;
+ // Bit depth of input bitstream.
+ uint8_t bit_depth_ = 0;
+
+ // Pending picture for decode when accelerator returns kTryAgain.
+ scoped_refptr<VP9Picture> pending_pic_;
size_t size_change_failure_counter_ = 0;
diff --git a/chromium/media/gpu/windows/d3d11_av1_accelerator.cc b/chromium/media/gpu/windows/d3d11_av1_accelerator.cc
new file mode 100644
index 00000000000..8d2d808ed2a
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_av1_accelerator.cc
@@ -0,0 +1,820 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/windows/d3d11_av1_accelerator.h"
+
+#include <windows.h>
+#include <numeric>
+#include <string>
+#include <utility>
+
+#include "base/memory/ptr_util.h"
+#include "base/metrics/histogram_functions.h"
+#include "media/gpu/av1_picture.h"
+#include "media/gpu/codec_picture.h"
+#include "media/gpu/windows/d3d11_picture_buffer.h"
+
+// These are from <dxva.h> in a newer SDK than the one Chrome ships with. They
+// should be deleted once Chrome switches to the updated SDK; they have been
+// copied from: https://www.microsoft.com/en-us/download/details.aspx?id=101577
+#pragma pack(push, 1)
+typedef struct _DXVA_PicEntry_AV1 {
+ UINT width;
+ UINT height;
+
+ // Global motion parameters
+ INT wmmat[6];
+ union {
+ struct {
+ UCHAR wminvalid : 1;
+ UCHAR wmtype : 2;
+ UCHAR Reserved : 5;
+ };
+ UCHAR wGlobalMotionFlags;
+ };
+
+ UCHAR Index;
+ USHORT Reserved16Bits;
+
+} DXVA_PicEntry_AV1, *LPDXVA_PicEntry_AV1;
+
+/* AV1 picture parameters structure */
+typedef struct _DXVA_PicParams_AV1 {
+ UINT width;
+ UINT height;
+
+ UINT max_width;
+ UINT max_height;
+
+ UCHAR CurrPicTextureIndex;
+ UCHAR superres_denom;
+ UCHAR bitdepth;
+ UCHAR seq_profile;
+
+ // Tiles:
+ struct {
+ UCHAR cols;
+ UCHAR rows;
+ USHORT context_update_id;
+ USHORT widths[64];
+ USHORT heights[64];
+ } tiles;
+
+ // Coding Tools
+ union {
+ struct {
+ UINT use_128x128_superblock : 1;
+ UINT intra_edge_filter : 1;
+ UINT interintra_compound : 1;
+ UINT masked_compound : 1;
+ UINT warped_motion : 1;
+ UINT dual_filter : 1;
+ UINT jnt_comp : 1;
+ UINT screen_content_tools : 1;
+ UINT integer_mv : 1;
+ UINT cdef : 1;
+ UINT restoration : 1;
+ UINT film_grain : 1;
+ UINT intrabc : 1;
+ UINT high_precision_mv : 1;
+ UINT switchable_motion_mode : 1;
+ UINT filter_intra : 1;
+ UINT disable_frame_end_update_cdf : 1;
+ UINT disable_cdf_update : 1;
+ UINT reference_mode : 1;
+ UINT skip_mode : 1;
+ UINT reduced_tx_set : 1;
+ UINT superres : 1;
+ UINT tx_mode : 2;
+ UINT use_ref_frame_mvs : 1;
+ UINT enable_ref_frame_mvs : 1;
+ UINT reference_frame_update : 1;
+ UINT Reserved : 5;
+ };
+ UINT32 CodingParamToolFlags;
+ } coding;
+
+ // Format & Picture Info flags
+ union {
+ struct {
+ UCHAR frame_type : 2;
+ UCHAR show_frame : 1;
+ UCHAR showable_frame : 1;
+ UCHAR subsampling_x : 1;
+ UCHAR subsampling_y : 1;
+ UCHAR mono_chrome : 1;
+ UCHAR Reserved : 1;
+ };
+ UCHAR FormatAndPictureInfoFlags;
+ } format;
+
+ // References
+ UCHAR primary_ref_frame;
+ UCHAR order_hint;
+ UCHAR order_hint_bits;
+
+ DXVA_PicEntry_AV1 frame_refs[7];
+ UCHAR RefFrameMapTextureIndex[8];
+
+ // Loop filter parameters
+ struct {
+ UCHAR filter_level[2];
+ UCHAR filter_level_u;
+ UCHAR filter_level_v;
+
+ UCHAR sharpness_level;
+ union {
+ struct {
+ UCHAR mode_ref_delta_enabled : 1;
+ UCHAR mode_ref_delta_update : 1;
+ UCHAR delta_lf_multi : 1;
+ UCHAR delta_lf_present : 1;
+ UCHAR Reserved : 4;
+ };
+ UCHAR ControlFlags;
+ } DUMMYUNIONNAME;
+ CHAR ref_deltas[8];
+ CHAR mode_deltas[2];
+ UCHAR delta_lf_res;
+ UCHAR frame_restoration_type[3];
+ USHORT log2_restoration_unit_size[3];
+ UINT16 Reserved16Bits;
+ } loop_filter;
+
+ // Quantization
+ struct {
+ union {
+ struct {
+ UCHAR delta_q_present : 1;
+ UCHAR delta_q_res : 2;
+ UCHAR Reserved : 5;
+ };
+ UCHAR ControlFlags;
+ } DUMMYUNIONNAME;
+
+ UCHAR base_qindex;
+ CHAR y_dc_delta_q;
+ CHAR u_dc_delta_q;
+ CHAR v_dc_delta_q;
+ CHAR u_ac_delta_q;
+ CHAR v_ac_delta_q;
+ // using_qmatrix:
+ UCHAR qm_y;
+ UCHAR qm_u;
+ UCHAR qm_v;
+ UINT16 Reserved16Bits;
+ } quantization;
+
+ // Cdef parameters
+ struct {
+ union {
+ struct {
+ UCHAR damping : 2;
+ UCHAR bits : 2;
+ UCHAR Reserved : 4;
+ };
+ UCHAR ControlFlags;
+ } DUMMYUNIONNAME;
+
+ union {
+ struct {
+ UCHAR primary : 6;
+ UCHAR secondary : 2;
+ };
+ UCHAR combined;
+ } y_strengths[8];
+
+ union {
+ struct {
+ UCHAR primary : 6;
+ UCHAR secondary : 2;
+ };
+ UCHAR combined;
+ } uv_strengths[8];
+
+ } cdef;
+
+ UCHAR interp_filter;
+
+ // Segmentation
+ struct {
+ union {
+ struct {
+ UCHAR enabled : 1;
+ UCHAR update_map : 1;
+ UCHAR update_data : 1;
+ UCHAR temporal_update : 1;
+ UCHAR Reserved : 4;
+ };
+ UCHAR ControlFlags;
+ } DUMMYUNIONNAME;
+ UCHAR Reserved24Bits[3];
+
+ union {
+ struct {
+ UCHAR alt_q : 1;
+ UCHAR alt_lf_y_v : 1;
+ UCHAR alt_lf_y_h : 1;
+ UCHAR alt_lf_u : 1;
+ UCHAR alt_lf_v : 1;
+ UCHAR ref_frame : 1;
+ UCHAR skip : 1;
+ UCHAR globalmv : 1;
+ };
+ UCHAR mask;
+ } feature_mask[8];
+
+ SHORT feature_data[8][8];
+
+ } segmentation;
+
+ struct {
+ union {
+ struct {
+ USHORT apply_grain : 1;
+ USHORT scaling_shift_minus8 : 2;
+ USHORT chroma_scaling_from_luma : 1;
+ USHORT ar_coeff_lag : 2;
+ USHORT ar_coeff_shift_minus6 : 2;
+ USHORT grain_scale_shift : 2;
+ USHORT overlap_flag : 1;
+ USHORT clip_to_restricted_range : 1;
+ USHORT matrix_coeff_is_identity : 1;
+ USHORT Reserved : 3;
+ };
+ USHORT ControlFlags;
+ } DUMMYUNIONNAME;
+
+ USHORT grain_seed;
+ UCHAR scaling_points_y[14][2];
+ UCHAR num_y_points;
+ UCHAR scaling_points_cb[10][2];
+ UCHAR num_cb_points;
+ UCHAR scaling_points_cr[10][2];
+ UCHAR num_cr_points;
+ UCHAR ar_coeffs_y[24];
+ UCHAR ar_coeffs_cb[25];
+ UCHAR ar_coeffs_cr[25];
+ UCHAR cb_mult;
+ UCHAR cb_luma_mult;
+ UCHAR cr_mult;
+ UCHAR cr_luma_mult;
+ UCHAR Reserved8Bits;
+ SHORT cb_offset;
+ SHORT cr_offset;
+ } film_grain;
+
+ UINT Reserved32Bits;
+ UINT StatusReportFeedbackNumber;
+} DXVA_PicParams_AV1, *LPDXVA_PicParams_AV1;
+
+typedef struct _DXVA_Tile_AV1 {
+ UINT DataOffset;
+ UINT DataSize;
+ USHORT row;
+ USHORT column;
+ USHORT Reserved16Bits;
+ UCHAR anchor_frame;
+ UCHAR Reserved8Bits;
+} DXVA_Tile_AV1, *LPDXVA_Tile_AV1;
+#pragma pack(pop)
+
+namespace media {
+
+class D3D11AV1Picture : public AV1Picture {
+ public:
+ explicit D3D11AV1Picture(D3D11PictureBuffer* d3d11_picture,
+ D3D11VideoDecoderClient* client,
+ bool apply_grain)
+ : picture_buffer_(d3d11_picture),
+ client_(client),
+ apply_grain_(apply_grain),
+ picture_index_(d3d11_picture->picture_index()) {
+ picture_buffer_->set_in_picture_use(true);
+ }
+
+ bool apply_grain() const { return apply_grain_; }
+ D3D11PictureBuffer* picture_buffer() const { return picture_buffer_; }
+
+ protected:
+ ~D3D11AV1Picture() override { picture_buffer_->set_in_picture_use(false); }
+
+ private:
+ scoped_refptr<AV1Picture> CreateDuplicate() override {
+ // We've already sent off the base frame for rendering, so we can just stamp
+ // |picture_buffer_| with the updated timestamp.
+ client_->UpdateTimestamp(picture_buffer_);
+ return this;
+ }
+
+ D3D11PictureBuffer* const picture_buffer_;
+ D3D11VideoDecoderClient* const client_;
+ const bool apply_grain_;
+ const size_t picture_index_;
+};
+
+class D3D11AV1Accelerator::ScopedDecoderBuffer {
+ public:
+ ScopedDecoderBuffer(MediaLog* media_log,
+ VideoContextWrapper* context,
+ ID3D11VideoDecoder* decoder,
+ D3D11_VIDEO_DECODER_BUFFER_TYPE type)
+ : media_log_(media_log),
+ context_(context),
+ decoder_(decoder),
+ type_(type) {
+ UINT size;
+ uint8_t* buffer;
+ driver_call_result_ = context_->GetDecoderBuffer(
+ decoder_, type_, &size, reinterpret_cast<void**>(&buffer));
+ if (FAILED(driver_call_result_)) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "ScopedDecoderBuffer(" << type_
+ << ")=" << logging::SystemErrorCodeToString(driver_call_result_);
+ return;
+ }
+
+ buffer_ = base::span<uint8_t>(buffer, size);
+ }
+ ScopedDecoderBuffer(ScopedDecoderBuffer&& o)
+ : media_log_(o.media_log_),
+ context_(o.context_),
+ decoder_(o.decoder_),
+ type_(o.type_),
+ buffer_(std::move(o.buffer_)) {
+ DCHECK(o.buffer_.empty());
+ }
+
+ ~ScopedDecoderBuffer() { Commit(); }
+
+ ScopedDecoderBuffer(const ScopedDecoderBuffer&) = delete;
+ ScopedDecoderBuffer& operator=(const ScopedDecoderBuffer&) = delete;
+
+ void Commit() {
+ if (buffer_.empty())
+ return;
+ driver_call_result_ = context_->ReleaseDecoderBuffer(decoder_, type_);
+ if (FAILED(driver_call_result_)) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "~ScopedDecoderBuffer(" << type_
+ << ")=" << logging::SystemErrorCodeToString(driver_call_result_);
+ }
+ buffer_ = base::span<uint8_t>();
+ }
+
+ bool empty() const { return buffer_.empty(); }
+ uint8_t* data() const { return buffer_.data(); }
+ size_t size() const { return buffer_.size(); }
+ HRESULT error() const { return driver_call_result_; }
+
+ private:
+ MediaLog* const media_log_;
+ VideoContextWrapper* const context_;
+ ID3D11VideoDecoder* const decoder_;
+ const D3D11_VIDEO_DECODER_BUFFER_TYPE type_;
+ base::span<uint8_t> buffer_;
+ HRESULT driver_call_result_ = S_OK;
+};
+
+D3D11AV1Accelerator::D3D11AV1Accelerator(
+ D3D11VideoDecoderClient* client,
+ MediaLog* media_log,
+ ComD3D11VideoDevice video_device,
+ std::unique_ptr<VideoContextWrapper> video_context)
+ : client_(client),
+ media_log_(media_log->Clone()),
+ video_device_(std::move(video_device)),
+ video_context_(std::move(video_context)) {
+ DCHECK(client);
+ DCHECK(media_log_);
+ client->SetDecoderCB(base::BindRepeating(
+ &D3D11AV1Accelerator::SetVideoDecoder, base::Unretained(this)));
+}
+
+D3D11AV1Accelerator::~D3D11AV1Accelerator() {}
+
+void D3D11AV1Accelerator::RecordFailure(const std::string& fail_type,
+ const std::string& message,
+ StatusCode reason) {
+ MEDIA_LOG(ERROR, media_log_)
+ << "DX11AV1Failure(" << fail_type << ")=" << message;
+ base::UmaHistogramSparse("Media.D3D11.AV1Status", static_cast<int>(reason));
+}
+
+scoped_refptr<AV1Picture> D3D11AV1Accelerator::CreateAV1Picture(
+ bool apply_grain) {
+ D3D11PictureBuffer* picture_buffer = client_->GetPicture();
+ return picture_buffer ? base::MakeRefCounted<D3D11AV1Picture>(
+ picture_buffer, client_, apply_grain)
+ : nullptr;
+}
+
+D3D11AV1Accelerator::ScopedDecoderBuffer D3D11AV1Accelerator::GetBuffer(
+ D3D11_VIDEO_DECODER_BUFFER_TYPE type) {
+ return ScopedDecoderBuffer(media_log_.get(), video_context_.get(),
+ video_decoder_.Get(), type);
+}
+
+bool D3D11AV1Accelerator::SubmitDecoderBuffer(
+ const DXVA_PicParams_AV1& pic_params,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers) {
+ // Buffer #1 - AV1 specific picture parameters.
+ auto params_buffer = GetBuffer(D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
+ if (params_buffer.empty() || params_buffer.size() < sizeof(pic_params)) {
+ RecordFailure("SubmitDecoderBuffers",
+ logging::SystemErrorCodeToString(params_buffer.error()),
+ StatusCode::kGetPicParamBufferFailed);
+ return false;
+ }
+
+ memcpy(params_buffer.data(), &pic_params, sizeof(pic_params));
+
+ // Buffer #2 - Slice control data.
+ const auto tile_size = sizeof(DXVA_Tile_AV1) * tile_buffers.size();
+ auto tile_buffer = GetBuffer(D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL);
+ if (tile_buffer.empty() || tile_buffer.size() < tile_size) {
+ RecordFailure("SubmitDecoderBuffers",
+ logging::SystemErrorCodeToString(tile_buffer.error()),
+ StatusCode::kGetSliceControlBufferFailed);
+ return false;
+ }
+
+ auto* tiles = reinterpret_cast<DXVA_Tile_AV1*>(tile_buffer.data());
+
+ // Buffer #3 - Tile buffer bitstream data.
+ const size_t bitstream_size = std::accumulate(
+ tile_buffers.begin(), tile_buffers.end(), 0,
+ [](size_t acc, const auto& buffer) { return acc + buffer.size; });
+ auto bitstream_buffer = GetBuffer(D3D11_VIDEO_DECODER_BUFFER_BITSTREAM);
+ if (bitstream_buffer.empty() || bitstream_buffer.size() < bitstream_size) {
+ RecordFailure("SubmitDecoderBuffers",
+ logging::SystemErrorCodeToString(bitstream_buffer.error()),
+ StatusCode::kGetBitstreamBufferFailed);
+ return false;
+ }
+
+ size_t tile_offset = 0;
+ for (size_t i = 0; i < tile_buffers.size(); ++i) {
+ const auto& tile = tile_buffers[i];
+ tiles[i].DataOffset = tile_offset;
+ tiles[i].DataSize = tile.size;
+ tiles[i].row = i / pic_params.tiles.cols;
+ tiles[i].column = i % pic_params.tiles.cols;
+ tiles[i].anchor_frame = 0xFF;
+
+ memcpy(bitstream_buffer.data() + tile_offset, tile.data, tile.size);
+ tile_offset += tile.size;
+ }
+
+ // Commit the buffers we prepared above.
+ params_buffer.Commit();
+ tile_buffer.Commit();
+ bitstream_buffer.Commit();
+
+ constexpr int kBuffersCount = 3;
+ VideoContextWrapper::VideoBufferWrapper buffers[kBuffersCount] = {};
+ buffers[0].BufferType = D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS;
+ buffers[0].DataSize = sizeof(pic_params);
+ buffers[1].BufferType = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
+ buffers[1].DataSize = tile_size;
+ buffers[2].BufferType = D3D11_VIDEO_DECODER_BUFFER_BITSTREAM;
+ buffers[2].DataSize = bitstream_size;
+
+ const auto hr = video_context_->SubmitDecoderBuffers(video_decoder_.Get(),
+ kBuffersCount, buffers);
+ if (FAILED(hr)) {
+ RecordFailure("SubmitDecoderBuffers", logging::SystemErrorCodeToString(hr),
+ StatusCode::kSubmitDecoderBuffersFailed);
+ return false;
+ }
+
+ return true;
+}
+
+bool D3D11AV1Accelerator::SubmitDecode(
+ const AV1Picture& pic,
+ const libgav1::ObuSequenceHeader& seq_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
+ base::span<const uint8_t> data) {
+ const D3D11AV1Picture* pic_ptr = static_cast<const D3D11AV1Picture*>(&pic);
+ do {
+ const auto hr = video_context_->DecoderBeginFrame(
+ video_decoder_.Get(), pic_ptr->picture_buffer()->output_view().Get(), 0,
+ nullptr);
+ if (SUCCEEDED(hr)) {
+ break;
+ } else if (hr == E_PENDING || hr == D3DERR_WASSTILLDRAWING) {
+ base::PlatformThread::YieldCurrentThread();
+ } else if (FAILED(hr)) {
+ RecordFailure("DecoderBeginFrame", logging::SystemErrorCodeToString(hr),
+ StatusCode::kDecoderBeginFrameFailed);
+ return false;
+ }
+ } while (true);
+
+ DXVA_PicParams_AV1 pic_params = {0};
+ FillPicParams(pic_ptr->picture_buffer()->picture_index(),
+ pic_ptr->apply_grain(), pic.frame_header, seq_header,
+ ref_frames, &pic_params);
+
+ if (!SubmitDecoderBuffer(pic_params, tile_buffers))
+ return false;
+
+ const auto hr = video_context_->DecoderEndFrame(video_decoder_.Get());
+ if (FAILED(hr)) {
+ RecordFailure("DecoderEndFrame", logging::SystemErrorCodeToString(hr),
+ StatusCode::kDecoderEndFrameFailed);
+ return false;
+ }
+
+ return true;
+}
+
+bool D3D11AV1Accelerator::OutputPicture(const AV1Picture& pic) {
+ const auto* pic_ptr = static_cast<const D3D11AV1Picture*>(&pic);
+ return client_->OutputResult(pic_ptr, pic_ptr->picture_buffer());
+}
+
+void D3D11AV1Accelerator::SetVideoDecoder(ComD3D11VideoDecoder video_decoder) {
+ video_decoder_ = std::move(video_decoder);
+}
+
+void D3D11AV1Accelerator::FillPicParams(
+ size_t picture_index,
+ bool apply_grain,
+ const libgav1::ObuFrameHeader& frame_header,
+ const libgav1::ObuSequenceHeader& seq_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ DXVA_PicParams_AV1* pp) {
+ // Note: Unclear from documentation if DXVA wants these values -1. The docs
+ // say they correspond to the "minus_1" variants... Microsoft's dav1d
+ // implementation uses the full values.
+ pp->width = frame_header.width;
+ pp->height = frame_header.height;
+ pp->max_width = seq_header.max_frame_width;
+ pp->max_height = seq_header.max_frame_height;
+
+ pp->CurrPicTextureIndex = picture_index;
+ pp->superres_denom = frame_header.use_superres
+ ? frame_header.superres_scale_denominator
+ : libgav1::kSuperResScaleNumerator;
+ pp->bitdepth = seq_header.color_config.bitdepth;
+ pp->seq_profile = seq_header.profile;
+
+ const auto& tile_info = frame_header.tile_info;
+ pp->tiles.cols = tile_info.tile_columns;
+ pp->tiles.rows = tile_info.tile_rows;
+ pp->tiles.context_update_id = tile_info.context_update_id;
+
+ if (tile_info.uniform_spacing) {
+ // TODO(b/174802667): Just use tile_column_width_in_superblocks and
+ // tile_row_height_in_superblocks once they're always populated by libgav1.
+ const auto tile_width_sb =
+ (tile_info.sb_columns + (1 << tile_info.tile_columns_log2) - 1) >>
+ tile_info.tile_columns_log2;
+ const int last_width_idx = tile_info.tile_columns - 1;
+ for (int i = 0; i < last_width_idx; ++i)
+ pp->tiles.widths[i] = tile_width_sb;
+ pp->tiles.widths[last_width_idx] =
+ tile_info.sb_columns - last_width_idx * tile_width_sb;
+
+ const auto tile_height_sb =
+ (tile_info.sb_rows + (1 << tile_info.tile_rows_log2) - 1) >>
+ tile_info.tile_rows_log2;
+ const int last_height_idx = tile_info.tile_rows - 1;
+ for (int i = 0; i < last_height_idx; ++i)
+ pp->tiles.heights[i] = tile_height_sb;
+ pp->tiles.heights[last_height_idx] =
+ tile_info.sb_rows - last_height_idx * tile_height_sb;
+ } else {
+ for (int i = 0; i < pp->tiles.cols; ++i) {
+ pp->tiles.widths[i] =
+ frame_header.tile_info.tile_column_width_in_superblocks[i];
+ }
+ for (int i = 0; i < pp->tiles.rows; ++i) {
+ pp->tiles.heights[i] =
+ frame_header.tile_info.tile_row_height_in_superblocks[i];
+ }
+ }
+
+ pp->coding.use_128x128_superblock = seq_header.use_128x128_superblock;
+ pp->coding.intra_edge_filter = seq_header.enable_intra_edge_filter;
+ pp->coding.interintra_compound = seq_header.enable_interintra_compound;
+ pp->coding.masked_compound = seq_header.enable_masked_compound;
+
+ // Note: The ObuSequenceHeader has a |enable_warped_motion| field and the
+ // ObuFrameHeader has a |allow_warped_motion|. Per the DXVA spec,
+ // "[warped_motion] corresponds to the syntax element named
+ // allow_warped_motion from the specification."
+ pp->coding.warped_motion = frame_header.allow_warped_motion;
+
+ pp->coding.dual_filter = seq_header.enable_dual_filter;
+ pp->coding.jnt_comp = seq_header.enable_jnt_comp;
+
+ // Another field in both the sequence and frame header, per the DXVA spec:
+ // "[screen_content_tools] corresponds to the syntax element named
+ // allow_screen_content_tools from the specification."
+ pp->coding.screen_content_tools = frame_header.allow_screen_content_tools;
+
+ // Another field in both the sequence and frame header, per the DXVA spec:
+ // "[integer_mv] corresponds to the syntax element named force_integer_mv
+ // from the specification."
+ pp->coding.integer_mv = frame_header.force_integer_mv;
+
+ pp->coding.cdef = seq_header.enable_cdef;
+ pp->coding.restoration = seq_header.enable_restoration;
+ pp->coding.film_grain = seq_header.film_grain_params_present;
+ pp->coding.intrabc = frame_header.allow_intrabc;
+ pp->coding.high_precision_mv = frame_header.allow_high_precision_mv;
+ pp->coding.switchable_motion_mode = frame_header.is_motion_mode_switchable;
+ pp->coding.filter_intra = seq_header.enable_filter_intra;
+ pp->coding.disable_frame_end_update_cdf =
+ !frame_header.enable_frame_end_update_cdf;
+ pp->coding.disable_cdf_update = !frame_header.enable_cdf_update;
+ pp->coding.reference_mode = frame_header.reference_mode_select;
+ pp->coding.skip_mode = frame_header.skip_mode_present;
+ pp->coding.reduced_tx_set = frame_header.reduced_tx_set;
+ pp->coding.superres = frame_header.use_superres;
+ pp->coding.tx_mode = frame_header.tx_mode;
+ pp->coding.use_ref_frame_mvs = frame_header.use_ref_frame_mvs;
+ pp->coding.enable_ref_frame_mvs = seq_header.enable_ref_frame_mvs;
+ pp->coding.reference_frame_update =
+ !(frame_header.show_existing_frame &&
+ frame_header.frame_type == libgav1::kFrameKey);
+
+ pp->format.frame_type = frame_header.frame_type;
+ pp->format.show_frame = frame_header.show_frame;
+ pp->format.showable_frame = frame_header.showable_frame;
+ pp->format.subsampling_x = seq_header.color_config.subsampling_x;
+ pp->format.subsampling_y = seq_header.color_config.subsampling_y;
+ pp->format.mono_chrome = seq_header.color_config.is_monochrome;
+
+ pp->primary_ref_frame = frame_header.primary_reference_frame;
+ pp->order_hint = frame_header.order_hint;
+ pp->order_hint_bits = seq_header.order_hint_bits;
+
+ for (size_t i = 0; i < libgav1::kNumReferenceFrameTypes - 1; ++i) {
+ if (libgav1::IsIntraFrame(frame_header.frame_type)) {
+ pp->frame_refs[i].Index = 0xFF;
+ continue;
+ }
+
+ const auto ref_idx = frame_header.reference_frame_index[i];
+ const auto* rp =
+ static_cast<const D3D11AV1Picture*>(ref_frames[ref_idx].get());
+ if (!rp) {
+ pp->frame_refs[i].Index = 0xFF;
+ continue;
+ }
+
+ pp->frame_refs[i].width = rp->frame_header.width;
+ pp->frame_refs[i].height = rp->frame_header.height;
+
+ const auto& gm =
+ frame_header.global_motion[libgav1::kReferenceFrameLast + i];
+ for (size_t j = 0; j < 6; ++j)
+ pp->frame_refs[i].wmmat[j] = gm.params[j];
+ pp->frame_refs[i].wminvalid =
+ gm.type == libgav1::kGlobalMotionTransformationTypeIdentity;
+
+ pp->frame_refs[i].wmtype = gm.type;
+ pp->frame_refs[i].Index = ref_idx;
+ }
+
+ for (size_t i = 0; i < libgav1::kNumReferenceFrameTypes; ++i) {
+ const auto* rp = static_cast<const D3D11AV1Picture*>(ref_frames[i].get());
+ pp->RefFrameMapTextureIndex[i] =
+ rp ? rp->picture_buffer()->picture_index() : 0xFF;
+ }
+
+ pp->loop_filter.filter_level[0] = frame_header.loop_filter.level[0];
+ pp->loop_filter.filter_level[1] = frame_header.loop_filter.level[1];
+ pp->loop_filter.filter_level_u = frame_header.loop_filter.level[2];
+ pp->loop_filter.filter_level_v = frame_header.loop_filter.level[3];
+ pp->loop_filter.sharpness_level = frame_header.loop_filter.sharpness;
+ pp->loop_filter.mode_ref_delta_enabled =
+ frame_header.loop_filter.delta_enabled;
+ pp->loop_filter.mode_ref_delta_update = frame_header.loop_filter.delta_update;
+ pp->loop_filter.delta_lf_multi = frame_header.delta_lf.multi;
+ pp->loop_filter.delta_lf_present = frame_header.delta_lf.present;
+
+ for (size_t i = 0; i < libgav1::kNumReferenceFrameTypes; ++i)
+ pp->loop_filter.ref_deltas[i] = frame_header.loop_filter.ref_deltas[i];
+ pp->loop_filter.mode_deltas[0] = frame_header.loop_filter.mode_deltas[0];
+ pp->loop_filter.mode_deltas[1] = frame_header.loop_filter.mode_deltas[1];
+ pp->loop_filter.delta_lf_res = frame_header.delta_lf.scale;
+
+ for (size_t i = 0; i < libgav1::kMaxPlanes; ++i) {
+ constexpr uint8_t kD3D11LoopRestorationMapping[4] = {
+ 0, // libgav1::kLoopRestorationTypeNone,
+ 3, // libgav1::kLoopRestorationTypeSwitchable,
+ 1, // libgav1::kLoopRestorationTypeWiener,
+ 2, // libgav1::kLoopRestorationTypeSgrProj
+ };
+
+ pp->loop_filter.frame_restoration_type[i] =
+ kD3D11LoopRestorationMapping[frame_header.loop_restoration.type[i]];
+ pp->loop_filter.log2_restoration_unit_size[i] =
+ frame_header.loop_restoration.unit_size_log2[i];
+ }
+
+ pp->quantization.delta_q_present = frame_header.delta_q.present;
+ pp->quantization.delta_q_res = frame_header.delta_q.scale;
+ pp->quantization.base_qindex = frame_header.quantizer.base_index;
+ pp->quantization.y_dc_delta_q = frame_header.quantizer.delta_dc[0];
+ pp->quantization.u_dc_delta_q = frame_header.quantizer.delta_dc[1];
+ pp->quantization.v_dc_delta_q = frame_header.quantizer.delta_dc[2];
+ pp->quantization.u_ac_delta_q = frame_header.quantizer.delta_ac[1];
+ pp->quantization.v_ac_delta_q = frame_header.quantizer.delta_ac[2];
+ pp->quantization.qm_y = frame_header.quantizer.use_matrix
+ ? frame_header.quantizer.matrix_level[0]
+ : 0xFF;
+ pp->quantization.qm_u = frame_header.quantizer.use_matrix
+ ? frame_header.quantizer.matrix_level[1]
+ : 0xFF;
+ pp->quantization.qm_v = frame_header.quantizer.use_matrix
+ ? frame_header.quantizer.matrix_level[2]
+ : 0xFF;
+
+ // libgav1 stores the computed versions of the cdef values, so we must undo
+ // the computation for DXVA. See ObuParser::ParseCdefParameters().
+ const uint8_t coeff_shift = pp->bitdepth - 8;
+ pp->cdef.damping = frame_header.cdef.damping - coeff_shift - 3u;
+ pp->cdef.bits = frame_header.cdef.bits;
+ for (size_t i = 0; i < libgav1::kMaxCdefStrengths; ++i) {
+ pp->cdef.y_strengths[i].primary =
+ frame_header.cdef.y_primary_strength[i] >> coeff_shift;
+ pp->cdef.y_strengths[i].secondary =
+ frame_header.cdef.y_secondary_strength[i] >> coeff_shift;
+ pp->cdef.uv_strengths[i].primary =
+ frame_header.cdef.uv_primary_strength[i] >> coeff_shift;
+ pp->cdef.uv_strengths[i].secondary =
+ frame_header.cdef.uv_secondary_strength[i] >> coeff_shift;
+ }
+
+ pp->interp_filter = frame_header.interpolation_filter;
+
+ pp->segmentation.enabled = frame_header.segmentation.enabled;
+ pp->segmentation.update_map = frame_header.segmentation.update_map;
+ pp->segmentation.update_data = frame_header.segmentation.update_data;
+ pp->segmentation.temporal_update = frame_header.segmentation.temporal_update;
+ for (size_t i = 0; i < libgav1::kMaxSegments; ++i) {
+ for (size_t j = 0; j < libgav1::kSegmentFeatureMax; ++j) {
+ pp->segmentation.feature_mask[i].mask |=
+ frame_header.segmentation.feature_enabled[i][j] << j;
+ pp->segmentation.feature_data[i][j] =
+ frame_header.segmentation.feature_data[i][j];
+ }
+ }
+
+ if (apply_grain) {
+ const auto& fg = frame_header.film_grain_params;
+ pp->film_grain.apply_grain = fg.apply_grain;
+ pp->film_grain.scaling_shift_minus8 = fg.chroma_scaling - 8;
+ pp->film_grain.chroma_scaling_from_luma = fg.chroma_scaling_from_luma;
+ pp->film_grain.ar_coeff_lag = fg.auto_regression_coeff_lag;
+ pp->film_grain.ar_coeff_shift_minus6 = fg.auto_regression_shift - 6;
+ pp->film_grain.grain_scale_shift = fg.grain_scale_shift;
+ pp->film_grain.overlap_flag = fg.overlap_flag;
+ pp->film_grain.clip_to_restricted_range = fg.clip_to_restricted_range;
+ pp->film_grain.matrix_coeff_is_identity =
+ seq_header.color_config.matrix_coefficients ==
+ libgav1::kMatrixCoefficientsIdentity;
+ pp->film_grain.grain_seed = fg.grain_seed;
+ pp->film_grain.num_y_points = fg.num_y_points;
+ for (uint8_t i = 0; i < fg.num_y_points; ++i) {
+ pp->film_grain.scaling_points_y[i][0] = fg.point_y_value[i];
+ pp->film_grain.scaling_points_y[i][1] = fg.point_y_scaling[i];
+ }
+ pp->film_grain.num_cb_points = fg.num_u_points;
+ for (uint8_t i = 0; i < fg.num_u_points; ++i) {
+ pp->film_grain.scaling_points_cb[i][0] = fg.point_u_value[i];
+ pp->film_grain.scaling_points_cb[i][1] = fg.point_u_scaling[i];
+ }
+ pp->film_grain.num_cr_points = fg.num_v_points;
+ for (uint8_t i = 0; i < fg.num_v_points; ++i) {
+ pp->film_grain.scaling_points_cr[i][0] = fg.point_v_value[i];
+ pp->film_grain.scaling_points_cr[i][1] = fg.point_v_scaling[i];
+ }
+ for (size_t i = 0; i < base::size(fg.auto_regression_coeff_y); ++i) {
+ pp->film_grain.ar_coeffs_y[i] = fg.auto_regression_coeff_y[i] + 128;
+ }
+ for (size_t i = 0; i < base::size(fg.auto_regression_coeff_u); ++i) {
+ pp->film_grain.ar_coeffs_cb[i] = fg.auto_regression_coeff_u[i] + 128;
+ pp->film_grain.ar_coeffs_cr[i] = fg.auto_regression_coeff_v[i] + 128;
+ }
+ pp->film_grain.cb_mult = fg.u_multiplier;
+ pp->film_grain.cb_luma_mult = fg.u_luma_multiplier;
+ pp->film_grain.cb_offset = fg.u_offset;
+ pp->film_grain.cr_mult = fg.v_multiplier;
+ pp->film_grain.cr_luma_mult = fg.v_luma_multiplier;
+ pp->film_grain.cr_offset = fg.v_offset;
+ }
+
+ // StatusReportFeedbackNumber "should not be equal to 0"... but it crashes :|
+ // pp->StatusReportFeedbackNumber = ++status_feedback_;
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_av1_accelerator.h b/chromium/media/gpu/windows/d3d11_av1_accelerator.h
new file mode 100644
index 00000000000..c1ee5c450a1
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_av1_accelerator.h
@@ -0,0 +1,74 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_WINDOWS_D3D11_AV1_ACCELERATOR_H_
+#define MEDIA_GPU_WINDOWS_D3D11_AV1_ACCELERATOR_H_
+
+#include <d3d11_1.h>
+#include <d3d9.h>
+#include <dxva.h>
+#include <windows.h>
+#include <wrl/client.h>
+
+#include "base/callback_helpers.h"
+#include "media/base/media_log.h"
+#include "media/base/status_codes.h"
+#include "media/gpu/av1_decoder.h"
+#include "media/gpu/windows/d3d11_com_defs.h"
+#include "media/gpu/windows/d3d11_video_context_wrapper.h"
+#include "media/gpu/windows/d3d11_video_decoder_client.h"
+
+typedef struct _DXVA_PicParams_AV1 DXVA_PicParams_AV1;
+typedef struct _DXVA_Tile_AV1 DXVA_Tile_AV1;
+
+namespace media {
+
+class D3D11AV1Accelerator : public AV1Decoder::AV1Accelerator {
+ public:
+ D3D11AV1Accelerator(D3D11VideoDecoderClient* client,
+ MediaLog* media_log,
+ ComD3D11VideoDevice video_device,
+ std::unique_ptr<VideoContextWrapper> video_context);
+ ~D3D11AV1Accelerator() override;
+
+ scoped_refptr<AV1Picture> CreateAV1Picture(bool apply_grain) override;
+
+ bool SubmitDecode(const AV1Picture& pic,
+ const libgav1::ObuSequenceHeader& seq_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers,
+ base::span<const uint8_t> data) override;
+
+ bool OutputPicture(const AV1Picture& pic) override;
+
+ private:
+ class ScopedDecoderBuffer;
+ ScopedDecoderBuffer GetBuffer(D3D11_VIDEO_DECODER_BUFFER_TYPE type);
+
+ bool SubmitDecoderBuffer(
+ const DXVA_PicParams_AV1& pic_params,
+ const libgav1::Vector<libgav1::TileBuffer>& tile_buffers);
+ void RecordFailure(const std::string& fail_type,
+ const std::string& message,
+ StatusCode reason);
+ void SetVideoDecoder(ComD3D11VideoDecoder video_decoder);
+ void FillPicParams(size_t picture_index,
+ bool apply_grain,
+ const libgav1::ObuFrameHeader& frame_header,
+ const libgav1::ObuSequenceHeader& seq_header,
+ const AV1ReferenceFrameVector& ref_frames,
+ DXVA_PicParams_AV1* pp);
+
+ D3D11VideoDecoderClient* client_;
+ std::unique_ptr<MediaLog> media_log_;
+ ComD3D11VideoDecoder video_decoder_;
+ ComD3D11VideoDevice video_device_;
+ std::unique_ptr<VideoContextWrapper> video_context_;
+
+ DISALLOW_COPY_AND_ASSIGN(D3D11AV1Accelerator);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_WINDOWS_D3D11_AV1_ACCELERATOR_H_
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
index ae4d5b09709..59f0823ca5a 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
@@ -5,12 +5,16 @@
#include "media/gpu/windows/d3d11_decoder_configurator.h"
#include <d3d11.h>
+#include <d3d9.h>
+#include <dxva2api.h>
#include "base/feature_list.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
#include "media/base/status_codes.h"
#include "media/base/win/hresult_status_helper.h"
+#include "media/base/win/mf_helpers.h"
+#include "media/gpu/windows/av1_guids.h"
#include "media/gpu/windows/d3d11_copying_texture_wrapper.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gl/direct_composition_surface_win.h"
@@ -33,29 +37,35 @@ std::unique_ptr<D3D11DecoderConfigurator> D3D11DecoderConfigurator::Create(
const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config,
+ uint8_t bit_depth,
MediaLog* media_log) {
- bool supports_nv12_decode_swap_chain =
+ const bool supports_nv12_decode_swap_chain =
gl::DirectCompositionSurfaceWin::IsDecodeSwapChainSupported();
-
- DXGI_FORMAT decoder_dxgi_format = DXGI_FORMAT_NV12;
+ const auto decoder_dxgi_format =
+ bit_depth == 8 ? DXGI_FORMAT_NV12 : DXGI_FORMAT_P010;
GUID decoder_guid = {};
if (config.codec() == kCodecH264) {
- MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder is using h264 / NV12";
decoder_guid = D3D11_DECODER_PROFILE_H264_VLD_NOFGT;
} else if (config.profile() == VP9PROFILE_PROFILE0) {
- MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder is using vp9p0 / NV12";
decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0;
} else if (config.profile() == VP9PROFILE_PROFILE2) {
- MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder is using vp9p2 / P010";
decoder_guid = D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2;
- decoder_dxgi_format = DXGI_FORMAT_P010;
+ } else if (config.profile() == AV1PROFILE_PROFILE_MAIN) {
+ decoder_guid = DXVA_ModeAV1_VLD_Profile0;
+ } else if (config.profile() == AV1PROFILE_PROFILE_HIGH) {
+ decoder_guid = DXVA_ModeAV1_VLD_Profile1;
+ } else if (config.profile() == AV1PROFILE_PROFILE_PRO) {
+ decoder_guid = DXVA_ModeAV1_VLD_Profile2;
} else {
- // TODO(tmathmeyer) support other profiles in the future.
MEDIA_LOG(INFO, media_log)
<< "D3D11VideoDecoder does not support codec " << config.codec();
return nullptr;
}
+ MEDIA_LOG(INFO, media_log)
+ << "D3D11VideoDecoder is using " << GetProfileName(config.profile())
+ << " / " << (decoder_dxgi_format == DXGI_FORMAT_NV12 ? "NV12" : "P010");
+
return std::make_unique<D3D11DecoderConfigurator>(
decoder_dxgi_format, decoder_guid, config.coded_size(),
config.is_encrypted(), supports_nv12_decode_swap_chain);
@@ -84,11 +94,15 @@ StatusOr<ComD3D11Texture2D> D3D11DecoderConfigurator::CreateOutputTexture(
ComD3D11Texture2D texture;
HRESULT hr =
device->CreateTexture2D(&output_texture_desc_, nullptr, &texture);
- if (!SUCCEEDED(hr)) {
+ if (FAILED(hr)) {
+ return Status(StatusCode::kCreateDecoderOutputTextureFailed)
+ .AddCause(HresultToStatus(hr));
+ }
+ hr = SetDebugName(texture.Get(), "D3D11Decoder_ConfiguratorOutput");
+ if (FAILED(hr)) {
return Status(StatusCode::kCreateDecoderOutputTextureFailed)
.AddCause(HresultToStatus(hr));
}
-
return texture;
}
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.h b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
index b1f3c7934a4..beb99ecfb97 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.h
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
@@ -36,6 +36,7 @@ class MEDIA_GPU_EXPORT D3D11DecoderConfigurator {
const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config,
+ uint8_t bit_depth,
MediaLog* media_log);
bool SupportsDevice(ComD3D11VideoDevice video_device);
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc b/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
index eee52186b3f..4bc4976933e 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator_unittest.cc
@@ -6,6 +6,7 @@
#include "media/base/media_util.h"
#include "media/base/win/d3d11_mocks.h"
+#include "media/gpu/windows/av1_guids.h"
#include "media/gpu/windows/d3d11_decoder_configurator.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -35,14 +36,15 @@ class D3D11DecoderConfiguratorUnittest : public ::testing::Test {
std::unique_ptr<D3D11DecoderConfigurator> CreateWithDefaultGPUInfo(
const VideoDecoderConfig& config,
- bool zero_copy_enabled = true) {
+ bool zero_copy_enabled = true,
+ uint8_t bit_depth = 8) {
gpu::GpuPreferences prefs;
prefs.enable_zero_copy_dxgi_video = zero_copy_enabled;
gpu::GpuDriverBugWorkarounds workarounds;
workarounds.disable_dxgi_zero_copy_video = false;
auto media_log = std::make_unique<NullMediaLog>();
return D3D11DecoderConfigurator::Create(prefs, workarounds, config,
- media_log.get());
+ bit_depth, media_log.get());
}
};
@@ -57,13 +59,25 @@ TEST_F(D3D11DecoderConfiguratorUnittest, VP9Profile0RightFormats) {
TEST_F(D3D11DecoderConfiguratorUnittest, VP9Profile2RightFormats) {
auto configurator = CreateWithDefaultGPUInfo(
- CreateDecoderConfig(VP9PROFILE_PROFILE2, {0, 0}, false), false);
+ CreateDecoderConfig(VP9PROFILE_PROFILE2, {0, 0}, false), false, 10);
EXPECT_EQ(configurator->DecoderGuid(),
D3D11_DECODER_PROFILE_VP9_VLD_10BIT_PROFILE2);
EXPECT_EQ(configurator->DecoderDescriptor()->OutputFormat, DXGI_FORMAT_P010);
}
+TEST_F(D3D11DecoderConfiguratorUnittest, AV1ProfileRightFormats) {
+ auto configurator = CreateWithDefaultGPUInfo(
+ CreateDecoderConfig(AV1PROFILE_PROFILE_MAIN, {0, 0}, false), false, 8);
+ EXPECT_EQ(configurator->DecoderGuid(), DXVA_ModeAV1_VLD_Profile0);
+ EXPECT_EQ(configurator->DecoderDescriptor()->OutputFormat, DXGI_FORMAT_NV12);
+
+ configurator = CreateWithDefaultGPUInfo(
+ CreateDecoderConfig(AV1PROFILE_PROFILE_MAIN, {0, 0}, false), false, 10);
+ EXPECT_EQ(configurator->DecoderGuid(), DXVA_ModeAV1_VLD_Profile0);
+ EXPECT_EQ(configurator->DecoderDescriptor()->OutputFormat, DXGI_FORMAT_P010);
+}
+
TEST_F(D3D11DecoderConfiguratorUnittest, SupportsDeviceNoProfiles) {
auto configurator = CreateWithDefaultGPUInfo(
CreateDecoderConfig(VP9PROFILE_PROFILE0, {0, 0}, false));
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
index e87c1ece44f..20acf54e7cc 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.cc
@@ -7,6 +7,8 @@
#include <windows.h>
#include "base/memory/ptr_util.h"
+#include "base/metrics/histogram_functions.h"
+#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
#include "media/base/media_log.h"
#include "media/base/win/mf_helpers.h"
@@ -95,7 +97,8 @@ DecoderStatus D3D11H264Accelerator::SubmitFrameMetadata(
scoped_refptr<H264Picture> pic) {
const bool is_encrypted = pic->decrypt_config();
if (is_encrypted) {
- RecordFailure("Cannot find decrypt context for the frame.");
+ RecordFailure("Cannot find decrypt context for the frame.",
+ StatusCode::kCryptoConfigFailed);
return DecoderStatus::kFail;
}
@@ -111,7 +114,8 @@ DecoderStatus D3D11H264Accelerator::SubmitFrameMetadata(
// TODO(liberato): For now, just busy wait.
;
} else if (!SUCCEEDED(hr)) {
- RecordFailure("DecoderBeginFrame failed", hr);
+ RecordFailure("DecoderBeginFrame failed",
+ StatusCode::kDecoderBeginFrameFailed, hr);
return DecoderStatus::kFail;
} else {
break;
@@ -161,7 +165,8 @@ bool D3D11H264Accelerator::RetrieveBitstreamBuffer() {
video_decoder_.Get(), D3D11_VIDEO_DECODER_BUFFER_BITSTREAM, &buffer_size,
&buffer);
if (!SUCCEEDED(hr)) {
- RecordFailure("GetDecoderBuffer (Bitstream) failed", hr);
+ RecordFailure("GetDecoderBuffer (Bitstream) failed",
+ StatusCode::kGetBitstreamBufferFailed, hr);
return false;
}
bitstream_buffer_bytes_ = (uint8_t*)buffer;
@@ -333,7 +338,8 @@ DecoderStatus D3D11H264Accelerator::SubmitSlice(
video_decoder_.Get(), D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS,
&buffer_size, &buffer);
if (!SUCCEEDED(hr)) {
- RecordFailure("ReleaseDecoderBuffer (PictureParams) failed", hr);
+ RecordFailure("GetDecoderBuffer (PictureParams) failed",
+ StatusCode::kGetPicParamBufferFailed, hr);
return DecoderStatus::kFail;
}
@@ -341,7 +347,8 @@ DecoderStatus D3D11H264Accelerator::SubmitSlice(
hr = video_context_->ReleaseDecoderBuffer(
video_decoder_.Get(), D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
if (!SUCCEEDED(hr)) {
- RecordFailure("ReleaseDecoderBuffer (PictureParams) failed", hr);
+ RecordFailure("ReleaseDecoderBuffer (PictureParams) failed",
+ StatusCode::kReleasePicParamBufferFailed, hr);
return DecoderStatus::kFail;
}
@@ -373,7 +380,8 @@ DecoderStatus D3D11H264Accelerator::SubmitSlice(
D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX, &buffer_size,
&buffer);
if (!SUCCEEDED(hr)) {
- RecordFailure("GetDecoderBuffer (QuantMatrix) failed", hr);
+ RecordFailure("GetDecoderBuffer (QuantMatrix) failed",
+ StatusCode::kGetQuantBufferFailed, hr);
return DecoderStatus::kFail;
}
memcpy(buffer, &iq_matrix_buf, sizeof(iq_matrix_buf));
@@ -381,7 +389,8 @@ DecoderStatus D3D11H264Accelerator::SubmitSlice(
video_decoder_.Get(),
D3D11_VIDEO_DECODER_BUFFER_INVERSE_QUANTIZATION_MATRIX);
if (!SUCCEEDED(hr)) {
- RecordFailure("ReleaseDecoderBuffer (QuantMatrix) failed", hr);
+ RecordFailure("ReleaseDecoderBuffer (QuantMatrix) failed",
+ StatusCode::kReleaseQuantBufferFailed, hr);
return DecoderStatus::kFail;
}
@@ -399,8 +408,9 @@ DecoderStatus D3D11H264Accelerator::SubmitSlice(
// the subsample ClearSize adjustment below should work.
if (bitstream_buffer_size_ < remaining_bitstream) {
RecordFailure("Input slice NALU (" + std::to_string(remaining_bitstream) +
- ") too big to fit in the bistream buffer (" +
- std::to_string(bitstream_buffer_size_) + ").");
+ ") too big to fit in the bistream buffer (" +
+ base::NumberToString(bitstream_buffer_size_) + ").",
+ StatusCode::kBitstreamBufferSliceTooBig);
return DecoderStatus::kFail;
}
@@ -420,15 +430,11 @@ DecoderStatus D3D11H264Accelerator::SubmitSlice(
while (remaining_bitstream > 0) {
if (bitstream_buffer_size_ < remaining_bitstream &&
slice_info_.size() > 0) {
- if (!SubmitSliceData()) {
- RecordFailure("SubmitSliceData failed");
+ if (!SubmitSliceData())
return DecoderStatus::kFail;
- }
- if (!RetrieveBitstreamBuffer()) {
- RecordFailure("RetrieveBitstreamBuffer failed");
+ if (!RetrieveBitstreamBuffer())
return DecoderStatus::kFail;
- }
}
size_t bytes_to_copy = remaining_bitstream;
@@ -482,7 +488,8 @@ bool D3D11H264Accelerator::SubmitSliceData() {
video_decoder_.Get(), D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL,
&buffer_size, &buffer);
if (!SUCCEEDED(hr)) {
- RecordFailure("GetDecoderBuffer (SliceControl) failed", hr);
+ RecordFailure("GetDecoderBuffer (SliceControl) failed",
+ StatusCode::kGetSliceControlBufferFailed, hr);
return false;
}
@@ -491,14 +498,16 @@ bool D3D11H264Accelerator::SubmitSliceData() {
hr = video_context_->ReleaseDecoderBuffer(
video_decoder_.Get(), D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL);
if (!SUCCEEDED(hr)) {
- RecordFailure("ReleaseDecoderBuffer (SliceControl) failed", hr);
+ RecordFailure("ReleaseDecoderBuffer (SliceControl) failed",
+ StatusCode::kReleaseSliceControlBufferFailed, hr);
return false;
}
hr = video_context_->ReleaseDecoderBuffer(
video_decoder_.Get(), D3D11_VIDEO_DECODER_BUFFER_BITSTREAM);
if (!SUCCEEDED(hr)) {
- RecordFailure("ReleaseDecoderBuffer (BitStream) failed", hr);
+ RecordFailure("ReleaseDecoderBuffer (BitStream) failed",
+ StatusCode::kReleaseBitstreamBufferFailed, hr);
return false;
}
@@ -536,7 +545,8 @@ bool D3D11H264Accelerator::SubmitSliceData() {
frame_iv_.clear();
subsamples_.clear();
if (!SUCCEEDED(hr)) {
- RecordFailure("SubmitDecoderBuffers failed", hr);
+ RecordFailure("SubmitDecoderBuffers failed",
+ StatusCode::kSubmitDecoderBuffersFailed, hr);
return false;
}
@@ -545,14 +555,13 @@ bool D3D11H264Accelerator::SubmitSliceData() {
DecoderStatus D3D11H264Accelerator::SubmitDecode(
scoped_refptr<H264Picture> pic) {
- if (!SubmitSliceData()) {
- RecordFailure("SubmitSliceData failed");
+ if (!SubmitSliceData())
return DecoderStatus::kFail;
- }
HRESULT hr = video_context_->DecoderEndFrame(video_decoder_.Get());
if (!SUCCEEDED(hr)) {
- RecordFailure("DecoderEndFrame failed", hr);
+ RecordFailure("DecoderEndFrame failed", StatusCode::kDecoderEndFrameFailed,
+ hr);
return DecoderStatus::kFail;
}
@@ -579,6 +588,7 @@ bool D3D11H264Accelerator::OutputPicture(scoped_refptr<H264Picture> pic) {
}
void D3D11H264Accelerator::RecordFailure(const std::string& reason,
+ StatusCode code,
HRESULT hr) const {
std::string hr_string;
if (!SUCCEEDED(hr))
@@ -586,6 +596,7 @@ void D3D11H264Accelerator::RecordFailure(const std::string& reason,
DLOG(ERROR) << reason << hr_string;
MEDIA_LOG(ERROR, media_log_) << hr_string << ": " << reason;
+ base::UmaHistogramSparse("Media.D3D11.H264Status", static_cast<int>(code));
}
void D3D11H264Accelerator::SetVideoDecoder(ComD3D11VideoDecoder video_decoder) {
diff --git a/chromium/media/gpu/windows/d3d11_h264_accelerator.h b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
index 00e2bd5cecd..39f25322566 100644
--- a/chromium/media/gpu/windows/d3d11_h264_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_h264_accelerator.h
@@ -13,6 +13,7 @@
#include <vector>
#include "gpu/command_buffer/service/texture_manager.h"
+#include "media/base/status_codes.h"
#include "media/base/video_frame.h"
#include "media/base/win/mf_helpers.h"
#include "media/gpu/h264_decoder.h"
@@ -84,7 +85,9 @@ class D3D11H264Accelerator : public H264Decoder::H264Accelerator {
bool RetrieveBitstreamBuffer();
// Record a failure to DVLOG and |media_log_|.
- void RecordFailure(const std::string& reason, HRESULT hr = S_OK) const;
+ void RecordFailure(const std::string& reason,
+ StatusCode code,
+ HRESULT hr = S_OK) const;
D3D11VideoDecoderClient* client_;
MediaLog* media_log_ = nullptr;
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector.cc b/chromium/media/gpu/windows/d3d11_texture_selector.cc
index 422a69b91c6..7a92a8f5280 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector.cc
@@ -9,6 +9,7 @@
#include "base/feature_list.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
+#include "media/base/win/mf_helpers.h"
#include "media/gpu/windows/d3d11_copying_texture_wrapper.h"
#include "media/gpu/windows/d3d11_video_device_format_support.h"
#include "ui/gfx/geometry/size.h"
@@ -111,6 +112,7 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
DXGI_FORMAT_R16G16B16A16_FLOAT)) {
MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder: fp16 scRGBLinear";
output_dxgi_format = DXGI_FORMAT_R16G16B16A16_FLOAT;
+ output_pixel_format = PIXEL_FORMAT_RGBAF16;
output_color_space = gfx::ColorSpace::CreateSCRGBLinear();
} else if (format_checker->CheckOutputFormatSupport(
DXGI_FORMAT_R10G10B10A2_UNORM)) {
@@ -218,7 +220,11 @@ std::unique_ptr<Texture2DWrapper> CopyTextureSelector::CreateTextureWrapper(
texture_desc.Height = size.height();
ComD3D11Texture2D out_texture;
- if (!SUCCEEDED(device->CreateTexture2D(&texture_desc, nullptr, &out_texture)))
+ if (FAILED(device->CreateTexture2D(&texture_desc, nullptr, &out_texture)))
+ return nullptr;
+
+ if (FAILED(
+ SetDebugName(out_texture.Get(), "D3D11Decoder_CopyTextureSelector")))
return nullptr;
return std::make_unique<CopyingTexture2DWrapper>(
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc b/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
index 1793fdfe5cd..12bd030c458 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
@@ -116,7 +116,7 @@ TEST_F(D3D11TextureSelectorUnittest, P010CopiesToFP16InHDR) {
CreateWithDefaultGPUInfo(DXGI_FORMAT_P010, ZeroCopyEnabled::kTrue,
TextureSelector::HDRMode::kSDROrHDR);
- EXPECT_EQ(tex_sel->PixelFormat(), PIXEL_FORMAT_ARGB);
+ EXPECT_EQ(tex_sel->PixelFormat(), PIXEL_FORMAT_RGBAF16);
EXPECT_EQ(tex_sel->OutputDXGIFormat(), DXGI_FORMAT_R16G16B16A16_FLOAT);
EXPECT_TRUE(tex_sel->WillCopyForTesting());
// TODO(liberato): Check output color space, somehow.
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
index ad97d4c5695..ed2ede8d1eb 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
@@ -51,9 +51,8 @@ bool DXGIFormatToVizFormat(
return true;
case DXGI_FORMAT_R16G16B16A16_FLOAT:
DCHECK_EQ(textures_per_picture, 1u);
- if (pixel_format != PIXEL_FORMAT_ARGB) {
+ if (pixel_format != PIXEL_FORMAT_RGBAF16)
return false;
- }
texture_formats[0] = viz::RGBA_F16;
return true;
default: // Unsupported
@@ -118,8 +117,7 @@ Status DefaultTexture2DWrapper::ProcessTexture(
// from some previous operation.
// TODO(liberato): Return the error.
if (received_error_)
- return Status(StatusCode::kProcessTextureFailed)
- .AddCause(std::move(*received_error_));
+ return Status(StatusCode::kProcessTextureFailed);
// TODO(liberato): make sure that |mailbox_holders_| is zero-initialized in
// case we don't use all the planes.
@@ -166,10 +164,10 @@ Status DefaultTexture2DWrapper::Init(
// device for decoding. Sharing seems not to work very well. Otherwise, we
// would create the texture with KEYED_MUTEX and NTHANDLE, then send along
// a handle that we get from |texture| as an IDXGIResource1.
- gpu_resources_.Post(FROM_HERE, &GpuResources::Init, std::move(get_helper_cb),
- std::move(mailboxes), GL_TEXTURE_EXTERNAL_OES, size_,
- textures_per_picture, texture_formats, pixel_format_,
- texture, array_slice);
+ gpu_resources_.AsyncCall(&GpuResources::Init)
+ .WithArgs(std::move(get_helper_cb), std::move(mailboxes),
+ GL_TEXTURE_EXTERNAL_OES, size_, textures_per_picture,
+ texture_formats, pixel_format_, texture, array_slice);
return OkStatus();
}
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc b/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc
index 33e03d828e6..5c28dd1cd20 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc
@@ -114,7 +114,7 @@ TEST_F(D3D11TextureWrapperUnittest, BGRA8InitSucceeds) {
TEST_F(D3D11TextureWrapperUnittest, FP16InitSucceeds) {
STOP_IF_WIN7();
const DXGI_FORMAT dxgi_format = DXGI_FORMAT_R16G16B16A16_FLOAT;
- const VideoPixelFormat pixel_format = PIXEL_FORMAT_ARGB;
+ const VideoPixelFormat pixel_format = PIXEL_FORMAT_RGBAF16;
auto wrapper = std::make_unique<DefaultTexture2DWrapper>(size_, dxgi_format,
pixel_format);
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.cc b/chromium/media/gpu/windows/d3d11_video_decoder.cc
index 98bbf65a59d..46103d4470e 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.cc
@@ -28,6 +28,7 @@
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "media/base/win/hresult_status_helper.h"
+#include "media/gpu/windows/d3d11_av1_accelerator.h"
#include "media/gpu/windows/d3d11_picture_buffer.h"
#include "media/gpu/windows/d3d11_video_context_wrapper.h"
#include "media/gpu/windows/d3d11_video_decoder_impl.h"
@@ -149,6 +150,10 @@ std::string D3D11VideoDecoder::GetDisplayName() const {
return "D3D11VideoDecoder";
}
+VideoDecoderType D3D11VideoDecoder::GetDecoderType() const {
+ return VideoDecoderType::kD3D11;
+}
+
HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder(
const VideoDecoderConfig& config,
ComD3D11VideoDecoder video_decoder) {
@@ -176,6 +181,11 @@ HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder(
std::make_unique<D3D11H264Accelerator>(
this, media_log_.get(), video_device_, std::move(video_context)),
profile_, config.color_space_info());
+ } else if (config.codec() == kCodecAV1) {
+ accelerated_video_decoder_ = std::make_unique<AV1Decoder>(
+ std::make_unique<D3D11AV1Accelerator>(
+ this, media_log_.get(), video_device_, std::move(video_context)),
+ profile_, config.color_space_info());
} else {
return E_FAIL;
}
@@ -187,14 +197,23 @@ HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder(
return hr;
}
-StatusOr<std::tuple<ComD3D11VideoDecoder>>
-D3D11VideoDecoder::CreateD3D11Decoder() {
- HRESULT hr;
+StatusOr<ComD3D11VideoDecoder> D3D11VideoDecoder::CreateD3D11Decoder() {
+ // By default we assume outputs are 8-bit for SDR color spaces and 10 bit for
+ // HDR color spaces (or VP9.2). We'll get a config change once we know the
+ // real bit depth if this turns out to be wrong.
+ bit_depth_ =
+ accelerated_video_decoder_
+ ? accelerated_video_decoder_->GetBitDepth()
+ : (config_.profile() == VP9PROFILE_PROFILE2 ||
+ config_.color_space_info().ToGfxColorSpace().IsHDR()
+ ? 10
+ : 8);
// TODO: supported check?
- decoder_configurator_ = D3D11DecoderConfigurator::Create(
- gpu_preferences_, gpu_workarounds_, config_, media_log_.get());
+ decoder_configurator_ =
+ D3D11DecoderConfigurator::Create(gpu_preferences_, gpu_workarounds_,
+ config_, bit_depth_, media_log_.get());
if (!decoder_configurator_)
return StatusCode::kDecoderUnsupportedProfile;
@@ -220,7 +239,7 @@ D3D11VideoDecoder::CreateD3D11Decoder() {
return StatusCode::kCreateTextureSelectorFailed;
UINT config_count = 0;
- hr = video_device_->GetVideoDecoderConfigCount(
+ auto hr = video_device_->GetVideoDecoderConfigCount(
decoder_configurator_->DecoderDescriptor(), &config_count);
if (FAILED(hr)) {
return Status(StatusCode::kGetDecoderConfigCountFailed)
@@ -241,8 +260,9 @@ D3D11VideoDecoder::CreateD3D11Decoder() {
.AddCause(HresultToStatus(hr));
}
- if (config_.codec() == kCodecVP9 && dec_config.ConfigBitstreamRaw == 1) {
- // DXVA VP9 specification mentions ConfigBitstreamRaw "shall be 1".
+ if ((config_.codec() == kCodecVP9 || config_.codec() == kCodecAV1) &&
+ dec_config.ConfigBitstreamRaw == 1) {
+ // DXVA VP9 and AV1 specifications say ConfigBitstreamRaw "shall be 1".
found = true;
break;
}
@@ -394,12 +414,12 @@ void D3D11VideoDecoder::Initialize(const VideoDecoderConfig& config,
auto video_decoder_or_error = CreateD3D11Decoder();
if (video_decoder_or_error.has_error()) {
- NotifyError(video_decoder_or_error.error());
+ NotifyError(std::move(video_decoder_or_error).error());
return;
}
- hr = InitializeAcceleratedDecoder(
- config, std::move(std::get<0>(video_decoder_or_error.value())));
+ hr = InitializeAcceleratedDecoder(config,
+ std::move(video_decoder_or_error).value());
if (!SUCCEEDED(hr)) {
NotifyError("Failed to get device context");
@@ -433,8 +453,8 @@ void D3D11VideoDecoder::Initialize(const VideoDecoderConfig& config,
// the originals on some other thread.
// Important but subtle note: base::Bind will copy |config_| since it's a
// const ref.
- impl_.Post(FROM_HERE, &D3D11VideoDecoderImpl::Initialize,
- BindToCurrentLoop(std::move(impl_init_cb)));
+ impl_.AsyncCall(&D3D11VideoDecoderImpl::Initialize)
+ .WithArgs(BindToCurrentLoop(std::move(impl_init_cb)));
}
void D3D11VideoDecoder::AddLifetimeProgressionStage(
@@ -537,7 +557,7 @@ void D3D11VideoDecoder::DoDecode() {
current_buffer_ = nullptr;
if (!accelerated_video_decoder_->Flush()) {
// This will also signal error |current_decode_cb_|.
- NotifyError("Flush failed");
+ NotifyError(StatusCode::kAcceleratorFlushFailed);
return;
}
// Pictures out output synchronously during Flush. Signal the decode
@@ -593,6 +613,7 @@ void D3D11VideoDecoder::DoDecode() {
// Otherwise, stop here. We'll restart when a picture comes back.
if (picture_buffers_.size())
return;
+
CreatePictureBuffers();
} else if (result == media::AcceleratedVideoDecoder::kConfigChange) {
// Before the first frame, we get a config change that we should ignore.
@@ -601,10 +622,12 @@ void D3D11VideoDecoder::DoDecode() {
// don't, so that init can fail rather than decoding if there's a problem
// creating it. We could also unconditionally re-allocate the decoder,
// but we keep it if it's ready to go.
+ const auto new_bit_depth = accelerated_video_decoder_->GetBitDepth();
const auto new_profile = accelerated_video_decoder_->GetProfile();
const auto new_coded_size = accelerated_video_decoder_->GetPicSize();
if (new_profile == config_.profile() &&
- new_coded_size == config_.coded_size()) {
+ new_coded_size == config_.coded_size() &&
+ new_bit_depth == bit_depth_ && !picture_buffers_.size()) {
continue;
}
@@ -622,20 +645,21 @@ void D3D11VideoDecoder::DoDecode() {
// accelerated decoder asked for any.
auto video_decoder_or_error = CreateD3D11Decoder();
if (video_decoder_or_error.has_error()) {
- NotifyError(video_decoder_or_error.error());
+ NotifyError(std::move(video_decoder_or_error).error());
return;
}
DCHECK(set_accelerator_decoder_cb_);
set_accelerator_decoder_cb_.Run(
- std::move(std::get<0>(video_decoder_or_error.value())));
+ std::move(video_decoder_or_error).value());
picture_buffers_.clear();
} else if (result == media::AcceleratedVideoDecoder::kTryAgain) {
LOG(ERROR) << "Try again is not supported";
- NotifyError("Try again is not supported");
+ NotifyError(StatusCode::kTryAgainNotSupported);
return;
} else {
- LOG(ERROR) << "VDA Error " << result;
- NotifyError("Accelerated decode failed");
+ std::ostringstream message;
+ message << "VDA Error " << result;
+ NotifyError(Status(StatusCode::kDecoderFailedDecode, message.str()));
return;
}
}
@@ -723,9 +747,9 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
? 1
: D3D11DecoderConfigurator::BUFFER_COUNT);
if (result.has_value()) {
- in_texture = std::move(result.value());
+ in_texture = std::move(result).value();
} else {
- NotifyError(std::move(result.error()).AddHere());
+ NotifyError(std::move(result).error().AddHere());
return;
}
}
@@ -842,7 +866,7 @@ bool D3D11VideoDecoder::OutputResult(const CodecPicture* picture,
frame->SetReleaseMailboxCB(
base::BindOnce(release_mailbox_cb_, std::move(wait_complete_cb)));
- frame->metadata()->power_efficient = true;
+ frame->metadata().power_efficient = true;
// For NV12, overlay is allowed by default. If the decoder is going to support
// non-NV12 textures, then this may have to be conditionally set. Also note
// that ALLOW_OVERLAY is required for encrypted video path.
@@ -857,7 +881,7 @@ bool D3D11VideoDecoder::OutputResult(const CodecPicture* picture,
// presenter decide if it wants to.
const bool allow_overlay =
base::FeatureList::IsEnabled(kD3D11VideoDecoderAllowOverlay);
- frame->metadata()->allow_overlay = allow_overlay;
+ frame->metadata().allow_overlay = allow_overlay;
frame->set_color_space(output_color_space);
frame->set_hdr_metadata(config_.hdr_metadata());
@@ -878,13 +902,19 @@ void D3D11VideoDecoder::NotifyError(const Status& reason) {
TRACE_EVENT0("gpu", "D3D11VideoDecoder::NotifyError");
state_ = State::kError;
- // TODO(tmathmeyer) - Remove this after plumbing Status through the
- // decode_cb and input_buffer_queue cb's.
- MEDIA_LOG(ERROR, media_log_)
- << "D3D11VideoDecoder error: " << std::hex << reason.code();
+ // Log why this failed.
+ base::UmaHistogramSparse("Media.D3D11.NotifyErrorStatus",
+ static_cast<int>(reason.code()));
- if (init_cb_)
+ if (init_cb_) {
std::move(init_cb_).Run(reason);
+ } else {
+ // TODO(tmathmeyer) - Remove this after plumbing Status through the
+ // decode_cb and input_buffer_queue cb's.
+ // Let the init handler set the error string if this is an init failure.
+ MEDIA_LOG(ERROR, media_log_) << "D3D11VideoDecoder error: 0x" << std::hex
+ << reason.code() << reason.message();
+ }
current_buffer_ = nullptr;
if (current_decode_cb_)
@@ -955,8 +985,10 @@ D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
return {};
}
- const auto supported_resolutions =
- GetSupportedD3D11VideoDecoderResolutions(d3d11_device, gpu_workarounds);
+ const auto supported_resolutions = GetSupportedD3D11VideoDecoderResolutions(
+ d3d11_device, gpu_workarounds,
+ base::FeatureList::IsEnabled(kD3D11VideoDecoderAV1) &&
+ !gpu_workarounds.disable_accelerated_av1_decode_d3d11);
std::vector<SupportedVideoDecoderConfig> configs;
for (const auto& kv : supported_resolutions) {
@@ -966,11 +998,9 @@ D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
continue;
}
- // TODO(liberato): Add VP8 and AV1 support to D3D11VideoDecoder.
- if (profile == VP8PROFILE_ANY ||
- (profile >= AV1PROFILE_MIN && profile <= AV1PROFILE_MAX)) {
+ // TODO(liberato): Add VP8 support to D3D11VideoDecoder.
+ if (profile == VP8PROFILE_ANY)
continue;
- }
const auto& resolution_range = kv.second;
configs.emplace_back(profile, profile, resolution_range.min_resolution,
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.h b/chromium/media/gpu/windows/d3d11_video_decoder.h
index ba30775a443..c99f61925c5 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.h
@@ -19,6 +19,7 @@
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_preferences.h"
#include "media/base/callback_registry.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder.h"
#include "media/gpu/command_buffer_helper.h"
#include "media/gpu/media_gpu_export.h"
@@ -29,7 +30,6 @@
#include "media/gpu/windows/d3d11_video_decoder_client.h"
#include "media/gpu/windows/d3d11_video_decoder_impl.h"
#include "media/gpu/windows/d3d11_vp9_accelerator.h"
-#include "media/video/supported_video_decoder_config.h"
namespace gpu {
class CommandBufferStub;
@@ -68,6 +68,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
// VideoDecoder implementation:
std::string GetDisplayName() const override;
+ VideoDecoderType GetDecoderType() const override;
void Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
@@ -144,10 +145,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
void CreatePictureBuffers();
// Create a D3D11VideoDecoder, if possible, based on the current config.
- // TODO(liberato): we use a tuple only because StatusOr<ComD3D111VideoDecoder>
- // doesn't work. Something about base::Optional trying to convert to void*,
- // but the conversion is ambiguous.
- StatusOr<std::tuple<ComD3D11VideoDecoder>> CreateD3D11Decoder();
+ StatusOr<ComD3D11VideoDecoder> CreateD3D11Decoder();
enum class NotSupportedReason {
kVideoIsSupported = 0,
@@ -300,6 +298,10 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
// accelerator. Needed for config changes.
SetAcceleratorDecoderCB set_accelerator_decoder_cb_;
+ // The currently configured bit depth for the decoder. When this changes we
+ // need to recreate the decoder.
+ uint8_t bit_depth_ = 8u;
+
base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoder);
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
index fc25f8e53f6..bf6792fb877 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -202,27 +202,25 @@ class D3D11VideoDecoderTest : public ::testing::Test {
get_device_cb, *supported_configs, is_hdr_supported_));
}
- void InitializeDecoder(const VideoDecoderConfig& config,
- StatusCode expectation = StatusCode::kOk) {
+ void InitializeDecoder(const VideoDecoderConfig& config, bool expectSuccess) {
const bool low_delay = false;
CdmContext* cdm_context = nullptr;
- if (expectation == StatusCode::kOk) {
+ if (expectSuccess) {
EXPECT_CALL(*this, MockInitCB(_)).Times(0);
EXPECT_CALL(*impl_, MockInitialize());
} else {
EXPECT_CALL(*this, MockInitCB(_)).Times(1);
}
- decoder_->Initialize(
- config, low_delay, cdm_context,
- base::BindOnce(&D3D11VideoDecoderTest::CheckExpectedStatus,
- base::Unretained(this), expectation),
- base::DoNothing(), base::DoNothing());
+ decoder_->Initialize(config, low_delay, cdm_context,
+ base::BindOnce(&D3D11VideoDecoderTest::CheckStatus,
+ base::Unretained(this), expectSuccess),
+ base::DoNothing(), base::DoNothing());
base::RunLoop().RunUntilIdle();
}
- void CheckExpectedStatus(Status expected, Status actual) {
- ASSERT_EQ(expected.code(), actual.code());
+ void CheckStatus(bool expectSuccess, Status actual) {
+ ASSERT_EQ(expectSuccess, actual.is_ok());
MockInitCB(actual);
}
@@ -263,10 +261,9 @@ TEST_F(D3D11VideoDecoderTest, SupportsVP9Profile0WithDecoderEnabled) {
CreateDecoder();
// We don't support vp9 on windows 7 and below.
if (base::win::GetVersion() <= base::win::Version::WIN7) {
- InitializeDecoder(configuration,
- StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(configuration, false);
} else {
- InitializeDecoder(configuration);
+ InitializeDecoder(configuration, true);
}
}
@@ -277,8 +274,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithLegacyGPU) {
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
- InitializeDecoder(configuration,
- StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(configuration, false);
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithGPUWorkaroundDisableVPX) {
@@ -288,8 +284,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithGPUWorkaroundDisableVPX) {
EnableDecoder(D3D11_DECODER_PROFILE_VP9_VLD_PROFILE0);
CreateDecoder();
- InitializeDecoder(configuration,
- StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(configuration, false);
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithoutDecoderEnabled) {
@@ -299,8 +294,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportVP9WithoutDecoderEnabled) {
// Enable a non-VP9 decoder.
EnableDecoder(D3D11_DECODER_PROFILE_H264_VLD_NOFGT); // Paranoia, not VP9.
CreateDecoder();
- InitializeDecoder(configuration,
- StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(configuration, false);
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportsH264HIGH10Profile) {
@@ -309,7 +303,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportsH264HIGH10Profile) {
VideoDecoderConfig high10 = TestVideoConfig::NormalCodecProfile(
kCodecH264, H264PROFILE_HIGH10PROFILE);
- InitializeDecoder(high10, StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(high10, false);
}
TEST_F(D3D11VideoDecoderTest, SupportsH264WithAutodetectedConfig) {
@@ -318,7 +312,7 @@ TEST_F(D3D11VideoDecoderTest, SupportsH264WithAutodetectedConfig) {
VideoDecoderConfig normal =
TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
- InitializeDecoder(normal);
+ InitializeDecoder(normal, true);
// TODO(liberato): Check |last_video_decoder_desc_| for sanity.
}
@@ -336,7 +330,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportH264IfNoSupportedConfig) {
VideoDecoderConfig normal =
TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN);
- InitializeDecoder(normal, StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(normal, false);
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportEncryptionWithoutFlag) {
@@ -346,8 +340,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportEncryptionWithoutFlag) {
encrypted_config.SetIsEncrypted(true);
DisableFeature(kHardwareSecureDecryption);
- InitializeDecoder(encrypted_config,
- StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(encrypted_config, false);
}
TEST_F(D3D11VideoDecoderTest, IgnoreWorkaroundsIgnoresWorkaround) {
@@ -357,7 +350,7 @@ TEST_F(D3D11VideoDecoderTest, IgnoreWorkaroundsIgnoresWorkaround) {
gpu_workarounds_.disable_d3d11_video_decoder = true;
CreateDecoder();
InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN));
+ TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN), true);
}
TEST_F(D3D11VideoDecoderTest, WorkaroundTurnsOffDecoder) {
@@ -365,8 +358,7 @@ TEST_F(D3D11VideoDecoderTest, WorkaroundTurnsOffDecoder) {
gpu_workarounds_.disable_d3d11_video_decoder = true;
CreateDecoder();
InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN),
- StatusCode::kDecoderInitializeNeverCompleted);
+ TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN), false);
}
TEST_F(D3D11VideoDecoderTest, DoesNotSupportEncryptionWithFlagOn11_0) {
@@ -377,8 +369,7 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportEncryptionWithFlagOn11_0) {
// 11.1 version, except for the D3D11 version.
EnableFeature(kHardwareSecureDecryption);
- InitializeDecoder(encrypted_config,
- StatusCode::kDecoderInitializeNeverCompleted);
+ InitializeDecoder(encrypted_config, false);
}
TEST_F(D3D11VideoDecoderTest, DISABLED_SupportsEncryptionWithFlagOn11_1) {
@@ -390,7 +381,7 @@ TEST_F(D3D11VideoDecoderTest, DISABLED_SupportsEncryptionWithFlagOn11_1) {
ON_CALL(*mock_d3d11_device_.Get(), GetFeatureLevel)
.WillByDefault(Return(D3D_FEATURE_LEVEL_11_1));
EnableFeature(kHardwareSecureDecryption);
- InitializeDecoder(encrypted_config);
+ InitializeDecoder(encrypted_config, true);
}
// TODO(xhwang): Add tests to cover kWaitingForNewKey and kWaitingForReset.
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
index 9fbfcd255af..43315d7c660 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
@@ -9,17 +9,21 @@
#include <utility>
#include "base/memory/ptr_util.h"
+#include "base/metrics/histogram_functions.h"
#include "media/gpu/windows/d3d11_vp9_picture.h"
namespace media {
-#define RETURN_ON_HR_FAILURE(expr_name, expr) \
- do { \
- HRESULT expr_value = (expr); \
- if (FAILED(expr_value)) { \
- RecordFailure(#expr_name, logging::SystemErrorCodeToString(expr_value)); \
- return false; \
- } \
+using DecodeStatus = VP9Decoder::VP9Accelerator::Status;
+
+#define RETURN_ON_HR_FAILURE(expr_name, expr, code) \
+ do { \
+ HRESULT expr_value = (expr); \
+ if (FAILED(expr_value)) { \
+ RecordFailure(#expr_name, logging::SystemErrorCodeToString(expr_value), \
+ code); \
+ return false; \
+ } \
} while (0)
std::vector<D3D11_VIDEO_DECODER_SUB_SAMPLE_MAPPING_BLOCK>
@@ -52,9 +56,11 @@ D3D11VP9Accelerator::D3D11VP9Accelerator(
D3D11VP9Accelerator::~D3D11VP9Accelerator() {}
void D3D11VP9Accelerator::RecordFailure(const std::string& fail_type,
- const std::string& reason) {
+ const std::string& reason,
+ StatusCode code) {
MEDIA_LOG(ERROR, media_log_)
<< "DX11VP9Failure(" << fail_type << ")=" << reason;
+ base::UmaHistogramSparse("Media.D3D11.VP9Status", static_cast<int>(code));
}
scoped_refptr<VP9Picture> D3D11VP9Accelerator::CreateVP9Picture() {
@@ -68,7 +74,8 @@ bool D3D11VP9Accelerator::BeginFrame(const D3D11VP9Picture& pic) {
const bool is_encrypted = pic.decrypt_config();
if (is_encrypted) {
RecordFailure("crypto_config",
- "Cannot find the decrypt context for the frame.");
+ "Cannot find the decrypt context for the frame.",
+ StatusCode::kCryptoConfigFailed);
return false;
}
@@ -80,7 +87,8 @@ bool D3D11VP9Accelerator::BeginFrame(const D3D11VP9Picture& pic) {
} while (hr == E_PENDING || hr == D3DERR_WASSTILLDRAWING);
if (FAILED(hr)) {
- RecordFailure("DecoderBeginFrame", logging::SystemErrorCodeToString(hr));
+ RecordFailure("DecoderBeginFrame", logging::SystemErrorCodeToString(hr),
+ StatusCode::kDecoderBeginFrameFailed);
return false;
}
@@ -251,25 +259,29 @@ void D3D11VP9Accelerator::CopyHeaderSizeAndID(DXVA_PicParams_VP9* pic_params,
bool D3D11VP9Accelerator::SubmitDecoderBuffer(
const DXVA_PicParams_VP9& pic_params,
const D3D11VP9Picture& pic) {
-#define GET_BUFFER(type) \
- RETURN_ON_HR_FAILURE(GetDecoderBuffer, \
- video_context_->GetDecoderBuffer( \
- video_decoder_.Get(), type, &buffer_size, &buffer))
-#define RELEASE_BUFFER(type) \
- RETURN_ON_HR_FAILURE( \
- ReleaseDecoderBuffer, \
- video_context_->ReleaseDecoderBuffer(video_decoder_.Get(), type))
+#define GET_BUFFER(type, code) \
+ RETURN_ON_HR_FAILURE(GetDecoderBuffer, \
+ video_context_->GetDecoderBuffer( \
+ video_decoder_.Get(), type, &buffer_size, &buffer), \
+ code)
+#define RELEASE_BUFFER(type, code) \
+ RETURN_ON_HR_FAILURE( \
+ ReleaseDecoderBuffer, \
+ video_context_->ReleaseDecoderBuffer(video_decoder_.Get(), type), code)
UINT buffer_size;
void* buffer;
- GET_BUFFER(D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
+ GET_BUFFER(D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS,
+ StatusCode::kGetPicParamBufferFailed);
memcpy(buffer, &pic_params, sizeof(pic_params));
- RELEASE_BUFFER(D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS);
+ RELEASE_BUFFER(D3D11_VIDEO_DECODER_BUFFER_PICTURE_PARAMETERS,
+ StatusCode::kReleasePicParamBufferFailed);
size_t buffer_offset = 0;
while (buffer_offset < pic.frame_hdr->frame_size) {
- GET_BUFFER(D3D11_VIDEO_DECODER_BUFFER_BITSTREAM);
+ GET_BUFFER(D3D11_VIDEO_DECODER_BUFFER_BITSTREAM,
+ StatusCode::kGetBitstreamBufferFailed);
size_t copy_size = pic.frame_hdr->frame_size - buffer_offset;
bool contains_end = true;
if (copy_size > buffer_size) {
@@ -277,11 +289,13 @@ bool D3D11VP9Accelerator::SubmitDecoderBuffer(
contains_end = false;
}
memcpy(buffer, pic.frame_hdr->data + buffer_offset, copy_size);
- RELEASE_BUFFER(D3D11_VIDEO_DECODER_BUFFER_BITSTREAM);
+ RELEASE_BUFFER(D3D11_VIDEO_DECODER_BUFFER_BITSTREAM,
+ StatusCode::kReleaseBitstreamBufferFailed);
DXVA_Slice_VPx_Short slice_info;
- GET_BUFFER(D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL);
+ GET_BUFFER(D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL,
+ StatusCode::kGetSliceControlBufferFailed);
slice_info.BSNALunitDataLocation = 0;
slice_info.SliceBytesInBuffer = (UINT)copy_size;
@@ -297,7 +311,8 @@ bool D3D11VP9Accelerator::SubmitDecoderBuffer(
slice_info.wBadSliceChopping = 3;
memcpy(buffer, &slice_info, sizeof(slice_info));
- RELEASE_BUFFER(D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL);
+ RELEASE_BUFFER(D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL,
+ StatusCode::kReleaseSliceControlBufferFailed);
constexpr int buffers_count = 3;
VideoContextWrapper::VideoBufferWrapper buffers[buffers_count] = {};
@@ -325,7 +340,8 @@ bool D3D11VP9Accelerator::SubmitDecoderBuffer(
RETURN_ON_HR_FAILURE(SubmitDecoderBuffers,
video_context_->SubmitDecoderBuffers(
- video_decoder_.Get(), buffers_count, buffers));
+ video_decoder_.Get(), buffers_count, buffers),
+ StatusCode::kSubmitDecoderBuffersFailed);
buffer_offset += copy_size;
}
@@ -334,7 +350,7 @@ bool D3D11VP9Accelerator::SubmitDecoderBuffer(
#undef RELEASE_BUFFER
}
-bool D3D11VP9Accelerator::SubmitDecode(
+DecodeStatus D3D11VP9Accelerator::SubmitDecode(
scoped_refptr<VP9Picture> picture,
const Vp9SegmentationParams& segmentation_params,
const Vp9LoopFilterParams& loop_filter_params,
@@ -343,7 +359,7 @@ bool D3D11VP9Accelerator::SubmitDecode(
D3D11VP9Picture* pic = static_cast<D3D11VP9Picture*>(picture.get());
if (!BeginFrame(*pic))
- return false;
+ return DecodeStatus::kFail;
DXVA_PicParams_VP9 pic_params = {};
CopyFrameParams(*pic, &pic_params);
@@ -355,13 +371,18 @@ bool D3D11VP9Accelerator::SubmitDecode(
CopyHeaderSizeAndID(&pic_params, *pic);
if (!SubmitDecoderBuffer(pic_params, *pic))
- return false;
+ return DecodeStatus::kFail;
+
+ HRESULT hr = video_context_->DecoderEndFrame(video_decoder_.Get());
+ if (FAILED(hr)) {
+ RecordFailure("DecoderEndFrame", logging::SystemErrorCodeToString(hr),
+ StatusCode::kDecoderEndFrameFailed);
+ return DecodeStatus::kFail;
+ }
- RETURN_ON_HR_FAILURE(DecoderEndFrame,
- video_context_->DecoderEndFrame(video_decoder_.Get()));
if (on_finished_cb)
std::move(on_finished_cb).Run();
- return true;
+ return DecodeStatus::kOk;
}
bool D3D11VP9Accelerator::OutputPicture(scoped_refptr<VP9Picture> picture) {
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
index 43c2c26e595..07dd99796f0 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
@@ -12,6 +12,7 @@
#include <wrl/client.h>
#include "media/base/media_log.h"
+#include "media/base/status_codes.h"
#include "media/gpu/vp9_decoder.h"
#include "media/gpu/windows/d3d11_com_defs.h"
#include "media/gpu/windows/d3d11_video_context_wrapper.h"
@@ -30,11 +31,11 @@ class D3D11VP9Accelerator : public VP9Decoder::VP9Accelerator {
scoped_refptr<VP9Picture> CreateVP9Picture() override;
- bool SubmitDecode(scoped_refptr<VP9Picture> picture,
- const Vp9SegmentationParams& segmentation_params,
- const Vp9LoopFilterParams& loop_filter_params,
- const Vp9ReferenceFrameVector& reference_frames,
- base::OnceClosure on_finished_cb) override;
+ Status SubmitDecode(scoped_refptr<VP9Picture> picture,
+ const Vp9SegmentationParams& segmentation_params,
+ const Vp9LoopFilterParams& loop_filter_params,
+ const Vp9ReferenceFrameVector& reference_frames,
+ base::OnceClosure on_finished_cb) override;
bool OutputPicture(scoped_refptr<VP9Picture> picture) override;
@@ -66,7 +67,9 @@ class D3D11VP9Accelerator : public VP9Decoder::VP9Accelerator {
bool SubmitDecoderBuffer(const DXVA_PicParams_VP9& pic_params,
const D3D11VP9Picture& pic);
- void RecordFailure(const std::string& fail_type, const std::string& reason);
+ void RecordFailure(const std::string& fail_type,
+ const std::string& reason,
+ StatusCode code);
void SetVideoDecoder(ComD3D11VideoDecoder video_decoder);
diff --git a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
index e3452d2017c..7cb7ba02c0c 100644
--- a/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
+++ b/chromium/media/gpu/windows/dxva_picture_buffer_win.cc
@@ -248,6 +248,9 @@ bool PbufferPictureBuffer::InitializeTexture(
HRESULT hr = decoder.d3d11_device_->CreateTexture2D(
&desc, nullptr, &dx11_decoding_texture_);
RETURN_ON_HR_FAILURE(hr, "Failed to create texture", false);
+ RETURN_ON_HR_FAILURE(
+ SetDebugName(dx11_decoding_texture_.Get(), "DXVADecoder_PictureBuffer"),
+ "SetDebugNameFail", false);
if (decoder.use_keyed_mutex_) {
hr = dx11_decoding_texture_.As(&dx11_keyed_mutex_);
RETURN_ON_HR_FAILURE(hr, "Failed to get keyed mutex", false);
@@ -494,6 +497,8 @@ bool EGLStreamPictureBuffer::BindSampleToTexture(
DCHECK_EQ(BOUND, state_);
state_ = IN_CLIENT;
+ shared_images_.resize(picture_buffer_.service_texture_ids().size());
+
current_d3d_sample_ = sample;
EGLDisplay egl_display = gl::GLSurfaceEGL::GetHardwareDisplay();
@@ -722,6 +727,9 @@ bool EGLStreamCopyPictureBuffer::Initialize(
HRESULT hr = decoder.d3d11_device_->CreateTexture2D(&desc, nullptr,
&decoder_copy_texture_);
RETURN_ON_HR_FAILURE(hr, "Failed to create texture", false);
+ RETURN_ON_HR_FAILURE(SetDebugName(decoder_copy_texture_.Get(),
+ "DXVADecoder_EGLStreamCopyPictureBuffer"),
+ "SetDebugNameFail", false);
DCHECK(decoder.use_keyed_mutex_);
hr = decoder_copy_texture_.As(&dx11_keyed_mutex_);
RETURN_ON_HR_FAILURE(hr, "Failed to get keyed mutex", false);
diff --git a/chromium/media/gpu/windows/dxva_picture_buffer_win.h b/chromium/media/gpu/windows/dxva_picture_buffer_win.h
index 891e763653e..c14b3c8a25d 100644
--- a/chromium/media/gpu/windows/dxva_picture_buffer_win.h
+++ b/chromium/media/gpu/windows/dxva_picture_buffer_win.h
@@ -68,6 +68,31 @@ class DXVAPictureBuffer {
color_space_ = color_space;
}
+ const std::vector<scoped_refptr<Picture::ScopedSharedImage>>& shared_images()
+ const {
+ return shared_images_;
+ }
+
+ void set_shared_image(
+ size_t plane,
+ scoped_refptr<Picture::ScopedSharedImage> shared_image) {
+ DCHECK(plane < shared_images_.size());
+ shared_images_[plane] = std::move(shared_image);
+ }
+
+ // Picture buffer data used to create a shared image backing.
+ const PictureBuffer::TextureIds& service_texture_ids() const {
+ return picture_buffer_.service_texture_ids();
+ }
+
+ gfx::Size texture_size(size_t plane) {
+ return picture_buffer_.texture_size(plane);
+ }
+
+ VideoPixelFormat pixel_format() const {
+ return picture_buffer_.pixel_format();
+ }
+
// Returns true if these could in theory be used as an overlay. May
// still be drawn using GL depending on the scene and precise hardware
// support.
@@ -96,6 +121,8 @@ class DXVAPictureBuffer {
gfx::ColorSpace color_space_;
scoped_refptr<gl::GLImage> gl_image_;
+ std::vector<scoped_refptr<Picture::ScopedSharedImage>> shared_images_;
+
DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
};
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index 753b70533c2..be7e2ec2a6a 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -21,6 +21,7 @@
#include "base/atomicops.h"
#include "base/base_paths_win.h"
#include "base/bind.h"
+#include "base/bind_post_task.h"
#include "base/callback.h"
#include "base/command_line.h"
#include "base/file_version_info.h"
@@ -38,12 +39,19 @@
#include "base/win/scoped_co_mem.h"
#include "base/win/windows_version.h"
#include "build/build_config.h"
+#include "components/viz/common/resources/resource_format_utils.h"
+#include "gpu/command_buffer/common/shared_image_usage.h"
+#include "gpu/command_buffer/service/shared_image_backing_d3d.h"
+#include "gpu/command_buffer/service/shared_image_factory.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_preferences.h"
+#include "gpu/ipc/service/shared_image_stub.h"
#include "media/base/media_log.h"
#include "media/base/media_switches.h"
+#include "media/base/video_frame.h"
#include "media/base/win/mf_helpers.h"
#include "media/filters/vp9_parser.h"
+#include "media/gpu/command_buffer_helper.h"
#include "media/gpu/windows/d3d11_video_device_format_support.h"
#include "media/gpu/windows/dxva_picture_buffer_win.h"
#include "media/gpu/windows/supported_profile_helpers.h"
@@ -59,6 +67,7 @@
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/gl_fence.h"
+#include "ui/gl/gl_image_dxgi.h"
#include "ui/gl/gl_surface_egl.h"
#include "ui/gl/gl_switches.h"
@@ -172,6 +181,36 @@ HRESULT g_last_device_removed_reason;
namespace media {
+bool VideoPixelFormatToVizFormat(
+ VideoPixelFormat pixel_format,
+ size_t textures_per_picture,
+ std::array<viz::ResourceFormat, VideoFrame::kMaxPlanes>& texture_formats) {
+ switch (pixel_format) {
+ case PIXEL_FORMAT_ARGB:
+ case PIXEL_FORMAT_XRGB:
+ case PIXEL_FORMAT_ABGR:
+ case PIXEL_FORMAT_BGRA:
+ DCHECK_EQ(textures_per_picture, 1u);
+ texture_formats[0] =
+ (pixel_format == PIXEL_FORMAT_ABGR) ? viz::RGBA_8888 : viz::BGRA_8888;
+ return true;
+ case PIXEL_FORMAT_NV12:
+ DCHECK_EQ(textures_per_picture, 2u);
+ texture_formats[0] = viz::RED_8; // Y
+ texture_formats[1] = viz::RG_88; // UV
+ return true;
+ case PIXEL_FORMAT_P016LE:
+ // TODO(crbug.com/1011555): P010 formats are not fully supported.
+ // The required Viz formats (viz::R16_EXT and viz::RG16_EXT) are not yet
+ // supported.
+ DCHECK_EQ(textures_per_picture, 2u);
+ return false;
+ default: // Unsupported
+ NOTREACHED();
+ return false;
+ }
+}
+
constexpr VideoCodecProfile kSupportedProfiles[] = {
H264PROFILE_BASELINE, H264PROFILE_MAIN, H264PROFILE_HIGH,
VP8PROFILE_ANY, VP9PROFILE_PROFILE0, VP9PROFILE_PROFILE2,
@@ -638,6 +677,9 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
if (media_log_)
MEDIA_LOG(INFO, media_log_) << "Starting Initialization of DXVAVDA";
+ AddPlaybackSucceededLifetimeStageIfNeeded();
+ AddLifetimeProgressionStage(DXVALifetimeProgression::kInitializeStarted);
+
if (!get_gl_context_cb_ || !make_context_current_cb_) {
NOTREACHED() << "GL callbacks are required for this VDA";
return false;
@@ -776,6 +818,10 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
UMA_HISTOGRAM_ENUMERATION("Media.DXVAVDA.PictureBufferMechanism",
GetPictureBufferMechanism());
+ AddLifetimeProgressionStage(
+ use_dx11_ ? DXVALifetimeProgression::kDX11InitializeSucceeded
+ : DXVALifetimeProgression::kDX9InitializeSucceeded);
+
return StartDecoderThread();
}
@@ -1319,6 +1365,9 @@ void DXVAVideoDecodeAccelerator::Reset() {
void DXVAVideoDecodeAccelerator::Destroy() {
DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
+
+ AddPlaybackSucceededLifetimeStageIfNeeded();
+
Invalidate();
delete this;
}
@@ -1333,6 +1382,13 @@ GLenum DXVAVideoDecodeAccelerator::GetSurfaceInternalFormat() const {
return GL_BGRA_EXT;
}
+bool DXVAVideoDecodeAccelerator::SupportsSharedImagePictureBuffers() const {
+ // Shared image is needed to display overlays which can be used directly
+ // by the video processor.
+ // TODO(crbug.com/1011555): Support for non-bind cases.
+ return GetPictureBufferMechanism() == PictureBufferMechanism::BIND;
+}
+
// static
VideoDecodeAccelerator::SupportedProfiles
DXVAVideoDecodeAccelerator::GetSupportedProfiles(
@@ -1423,8 +1479,9 @@ bool DXVAVideoDecodeAccelerator::InitDecoder(VideoCodecProfile profile) {
RETURN_ON_FAILURE(version_info, "unable to get version of msmpeg2vdec.dll",
false);
base::string16 file_version = version_info->file_version();
- RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos,
- "blocked version of msmpeg2vdec.dll 6.1.7140", false);
+ RETURN_ON_FAILURE(
+ file_version.find(STRING16_LITERAL("6.1.7140")) == base::string16::npos,
+ "blocked version of msmpeg2vdec.dll 6.1.7140", false);
codec_ = kCodecH264;
clsid = __uuidof(CMSH264DecoderMFT);
} else if ((profile >= VP9PROFILE_PROFILE0 &&
@@ -1814,7 +1871,7 @@ void DXVAVideoDecodeAccelerator::DoDecode(const gfx::Rect& visible_rect,
}
TRACE_EVENT_ASYNC_END0("gpu", "DXVAVideoDecodeAccelerator.Decoding", this);
- TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
+ TRACE_COUNTER1("DXVA_Decoding", "TotalPacketsBeforeDecode",
inputs_before_decode_);
inputs_before_decode_ = 0;
@@ -1967,6 +2024,20 @@ void DXVAVideoDecodeAccelerator::StopOnError(
return;
}
+ DXVALifetimeProgression result;
+ if (use_dx11_) {
+ if (decoded_any_frames_)
+ result = DXVALifetimeProgression::kDX11PlaybackFailedAfterFirstFrame;
+ else
+ result = DXVALifetimeProgression::kDX11PlaybackFailedBeforeFirstFrame;
+ } else {
+ if (decoded_any_frames_)
+ result = DXVALifetimeProgression::kDX9PlaybackFailedAfterFirstFrame;
+ else
+ result = DXVALifetimeProgression::kDX9PlaybackFailedBeforeFirstFrame;
+ }
+ AddLifetimeProgressionStage(result);
+
if (client_)
client_->NotifyError(error);
client_ = nullptr;
@@ -2126,12 +2197,19 @@ void DXVAVideoDecodeAccelerator::NotifyPictureReady(
int input_buffer_id,
const gfx::Rect& visible_rect,
const gfx::ColorSpace& color_space,
- bool allow_overlay) {
+ bool allow_overlay,
+ std::vector<scoped_refptr<Picture::ScopedSharedImage>> shared_images) {
DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
// This task could execute after the decoder has been torn down.
if (GetState() != kUninitialized && client_) {
Picture picture(picture_buffer_id, input_buffer_id, visible_rect,
color_space, allow_overlay);
+
+ for (uint32_t i = 0; i < shared_images.size(); i++) {
+ picture.set_scoped_shared_image(shared_images[i], i);
+ }
+
+ decoded_any_frames_ = true;
client_->PictureReady(picture);
}
}
@@ -2604,13 +2682,100 @@ void DXVAVideoDecodeAccelerator::BindPictureBufferToSample(
DCHECK(!output_picture_buffers_.empty());
+ // BindSampleToTexture configures GLImage with the DX11 output texture.
+ // The DX11 texture is then accessed through the GLImage to create a shared
+ // image backing below.
bool result = picture_buffer->BindSampleToTexture(this, sample);
- RETURN_AND_NOTIFY_ON_FAILURE(result, "Failed to complete copying surface",
+ RETURN_AND_NOTIFY_ON_FAILURE(result, "Failed to bind sample to texture",
PLATFORM_FAILURE, );
+ // Create the DX11 texture backed shared images (texture per plane).
+ std::vector<scoped_refptr<Picture::ScopedSharedImage>> scoped_shared_images;
+ if (SupportsSharedImagePictureBuffers()) {
+ gl::GLImageDXGI* gl_image_dxgi =
+ gl::GLImageDXGI::FromGLImage(picture_buffer->gl_image().get());
+ DCHECK(gl_image_dxgi);
+
+ const size_t textures_per_picture =
+ picture_buffer->service_texture_ids().size();
+
+ // Get the viz resource format per texture.
+ std::array<viz::ResourceFormat, VideoFrame::kMaxPlanes> viz_formats;
+ {
+ const bool result = VideoPixelFormatToVizFormat(
+ picture_buffer->pixel_format(), textures_per_picture, viz_formats);
+ RETURN_AND_NOTIFY_ON_FAILURE(
+ result, "Could not convert pixel format to viz format",
+ PLATFORM_FAILURE, );
+ }
+
+ CommandBufferHelper* helper = client_->GetCommandBufferHelper();
+ DCHECK(helper);
+
+ for (uint32_t texture_idx = 0; texture_idx < textures_per_picture;
+ texture_idx++) {
+ // Usage flags to allow the display compositor to draw from it, video
+ // to decode, and allow webgl/canvas access.
+ constexpr uint32_t shared_image_usage =
+ gpu::SHARED_IMAGE_USAGE_VIDEO_DECODE | gpu::SHARED_IMAGE_USAGE_GLES2 |
+ gpu::SHARED_IMAGE_USAGE_RASTER | gpu::SHARED_IMAGE_USAGE_DISPLAY |
+ gpu::SHARED_IMAGE_USAGE_SCANOUT;
+
+ // Create a shared image
+ // TODO(crbug.com/1011555): Need key shared mutex if shared image is ever
+ // used by another device.
+ scoped_refptr<gpu::gles2::TexturePassthrough> gl_texture =
+ gpu::gles2::TexturePassthrough::CheckedCast(helper->GetTexture(
+ picture_buffer->service_texture_ids()[texture_idx]));
+
+ // Create a new shared image mailbox. The existing mailbox belonging to
+ // this |picture_buffer| will be updated when the video frame is created.
+ const auto& mailbox = gpu::Mailbox::GenerateForSharedImage();
+
+ auto shared_image = std::make_unique<gpu::SharedImageBackingD3D>(
+ mailbox, viz_formats[texture_idx],
+ picture_buffer->texture_size(texture_idx),
+ picture_buffer->color_space(), kTopLeft_GrSurfaceOrigin,
+ kPremul_SkAlphaType, shared_image_usage,
+ /*swap_chain=*/nullptr, std::move(gl_texture),
+ picture_buffer->gl_image(),
+ /*buffer_index=*/0, gl_image_dxgi->texture(),
+ base::win::ScopedHandle(),
+ /*dxgi_keyed_mutex=*/nullptr);
+
+ // Caller is assumed to provide cleared d3d textures.
+ shared_image->SetCleared();
+
+ gpu::SharedImageStub* shared_image_stub = client_->GetSharedImageStub();
+ DCHECK(shared_image_stub);
+ const bool success = shared_image_stub->factory()->RegisterBacking(
+ std::move(shared_image), /* legacy_mailbox */ true);
+ if (!success) {
+ RETURN_AND_NOTIFY_ON_FAILURE(false, "Failed to register shared image",
+ PLATFORM_FAILURE, );
+ }
+
+ auto destroy_shared_image_callback = base::BindPostTask(
+ main_thread_task_runner_,
+ base::BindOnce(
+ shared_image_stub->GetSharedImageDestructionCallback(mailbox),
+ gpu::SyncToken()));
+
+ // Wrap the factory ref with a scoped shared image. The factory ref
+ // is used instead of requiring a destruction call-back.
+ auto scoped_shared_image =
+ base::MakeRefCounted<Picture::ScopedSharedImage>(
+ mailbox, GetTextureTarget(),
+ std::move(destroy_shared_image_callback));
+
+ scoped_shared_images.push_back(std::move(scoped_shared_image));
+ }
+ }
+
NotifyPictureReady(
picture_buffer->id(), input_buffer_id, picture_buffer->visible_rect(),
- picture_buffer->color_space(), picture_buffer->AllowOverlay());
+ picture_buffer->color_space(), picture_buffer->AllowOverlay(),
+ std::move(scoped_shared_images));
{
base::AutoLock lock(decoder_lock_);
@@ -3127,8 +3292,37 @@ bool DXVAVideoDecodeAccelerator::ShouldUseANGLEDevice() const {
NOTREACHED();
return false;
}
+
ID3D11Device* DXVAVideoDecodeAccelerator::D3D11Device() const {
return ShouldUseANGLEDevice() ? angle_device_.Get() : d3d11_device_.Get();
}
+void DXVAVideoDecodeAccelerator::AddLifetimeProgressionStage(
+ DXVALifetimeProgression stage) {
+ // If we're starting init, then forget about any previously output frames.
+ if (stage == DXVALifetimeProgression::kInitializeStarted)
+ decoded_any_frames_ = false;
+
+ // If init has succeeded, then we can output a playback success / failure when
+ // we fail / re-init / are destroyed, as needed.
+ already_initialized_ =
+ (stage == DXVALifetimeProgression::kDX11InitializeSucceeded ||
+ stage == DXVALifetimeProgression::kDX9InitializeSucceeded);
+
+ base::UmaHistogramEnumeration("Media.DXVAVDA.DecoderLifetimeProgression",
+ stage);
+}
+
+void DXVAVideoDecodeAccelerator::AddPlaybackSucceededLifetimeStageIfNeeded() {
+ // If we didn't complete initialization, then we didn't complete playback.
+ // This will also prevent us from sending "playback succeeded" more than once
+ // per init, or after a playback error.
+ if (!already_initialized_)
+ return;
+
+ AddLifetimeProgressionStage(
+ use_dx11_ ? DXVALifetimeProgression::kDX11PlaybackSucceeded
+ : DXVALifetimeProgression::kDX9PlaybackSucceeded);
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
index 0f5bfa44d04..034abba80a8 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
@@ -113,6 +113,7 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
const scoped_refptr<base::SingleThreadTaskRunner>& decode_task_runner)
override;
GLenum GetSurfaceInternalFormat() const override;
+ bool SupportsSharedImagePictureBuffers() const override;
static VideoDecodeAccelerator::SupportedProfiles GetSupportedProfiles(
const gpu::GpuPreferences& gpu_preferences,
@@ -154,20 +155,41 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
kMaxValue = BIND
};
+ // These values are persisted to logs. Entries should not be renumbered and
+ // numeric values should never be reused.
enum class DXVALifetimeProgression {
kInitializeStarted = 0,
- kInitializeSucceeded = 1,
- kPlaybackSucceeded = 2,
+
+ // DX11 init completed successfully.
+ kDX11InitializeSucceeded = 1,
+
+ // An error occurred after successful init, split up by whether a frame was
+ // delivered to the client yet or not.
+ kDX11PlaybackFailedBeforeFirstFrame = 2,
+ kDX11PlaybackFailedAfterFirstFrame = 3,
+
+ // Playback succeeded, which requires successful init.
+ kDX11PlaybackSucceeded = 4,
+
+ // DX9 variants of the above.
+ kDX9InitializeSucceeded = 5,
+ kDX9PlaybackFailedBeforeFirstFrame = 6,
+ kDX9PlaybackFailedAfterFirstFrame = 7,
+ kDX9PlaybackSucceeded = 8,
// For UMA. Must be the last entry. It should be initialized to the
// numerically largest value above; if you add more entries, then please
// update this to the last one.
- kMaxValue = kPlaybackSucceeded
+ kMaxValue = kDX9PlaybackSucceeded
};
// Log UMA progression state.
void AddLifetimeProgressionStage(DXVALifetimeProgression stage);
+ // Logs the appropriate PlaybackSucceeded lifetime stage, if we've completed
+ // init successfully and not logged an error or playback success since then.
+ void AddPlaybackSucceededLifetimeStageIfNeeded();
+
// Creates and initializes an instance of the D3D device and the
// corresponding device manager. The device manager instance is eventually
// passed to the IMFTransform interface implemented by the decoder.
@@ -249,7 +271,9 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
int input_buffer_id,
const gfx::Rect& visible_rect,
const gfx::ColorSpace& color_space,
- bool allow_overlay);
+ bool allow_overlay,
+ std::vector<scoped_refptr<Picture::ScopedSharedImage>>
+ shared_images = {});
// Sends pending input buffer processed acks to the client if we don't have
// output samples waiting to be processed.
@@ -419,6 +443,9 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
int processor_width_ = 0;
int processor_height_ = 0;
+
+ // Used for lifetime progression logging. Have we logged that initialization
+ // was successful, and nothing since?
bool already_initialized_ = false;
Microsoft::WRL::ComPtr<IDirectXVideoProcessorService>
@@ -610,6 +637,9 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
base::Optional<gl::HDRMetadataHelperWin> hdr_metadata_helper_;
bool use_empty_video_hdr_metadata_ = false;
+ // Have we delivered any decoded frames since the last call to Initialize()?
+ bool decoded_any_frames_ = false;
+
// WeakPtrFactory for posting tasks back to |this|.
base::WeakPtrFactory<DXVAVideoDecodeAccelerator> weak_this_factory_{this};
diff --git a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
index 7d297a0924c..849fec1d921 100644
--- a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
@@ -8,6 +8,7 @@
#pragma warning(disable : 4800) // Disable warning for added padding.
#include <codecapi.h>
+#include <d3d11_1.h>
#include <mferror.h>
#include <mftransform.h>
#include <objbase.h>
@@ -23,9 +24,11 @@
#include "base/win/scoped_co_mem.h"
#include "base/win/scoped_variant.h"
#include "base/win/windows_version.h"
+#include "media/base/media_switches.h"
#include "media/base/win/mf_helpers.h"
#include "media/base/win/mf_initializer.h"
#include "third_party/libyuv/include/libyuv.h"
+#include "ui/gfx/gpu_memory_buffer.h"
using media::MediaBufferScopedPointer;
@@ -45,7 +48,8 @@ const size_t kOneMicrosecondInMFSampleTimeUnits = 10;
const size_t kOutputSampleBufferSizeRatio = 4;
constexpr const wchar_t* const kMediaFoundationVideoEncoderDLLs[] = {
- L"mf.dll", L"mfplat.dll",
+ L"mf.dll",
+ L"mfplat.dll",
};
eAVEncH264VProfile GetH264VProfile(VideoCodecProfile profile,
@@ -67,7 +71,6 @@ eAVEncH264VProfile GetH264VProfile(VideoCodecProfile profile,
return eAVEncH264VProfile_unknown;
}
}
-
} // namespace
class MediaFoundationVideoEncodeAccelerator::EncodeOutput {
@@ -181,7 +184,8 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config,
DVLOG(3) << __func__ << ": " << config.AsHumanReadableString();
DCHECK(main_client_task_runner_->BelongsToCurrentThread());
- if (PIXEL_FORMAT_I420 != config.input_format) {
+ if (PIXEL_FORMAT_I420 != config.input_format &&
+ PIXEL_FORMAT_NV12 != config.input_format) {
DLOG(ERROR) << "Input format not supported= "
<< VideoPixelFormatToString(config.input_format);
return false;
@@ -244,7 +248,10 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config,
main_client_weak_factory_.reset(new base::WeakPtrFactory<Client>(client));
main_client_ = main_client_weak_factory_->GetWeakPtr();
input_visible_size_ = config.input_visible_size;
- frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
+ if (config.initial_framerate.has_value())
+ frame_rate_ = config.initial_framerate.value();
+ else
+ frame_rate_ = kMaxFrameRateNumerator / kMaxFrameRateDenominator;
target_bitrate_ = config.initial_bitrate;
bitstream_buffer_size_ = config.input_visible_size.GetArea();
@@ -259,18 +266,29 @@ bool MediaFoundationVideoEncodeAccelerator::Initialize(const Config& config,
return false;
}
- MFT_INPUT_STREAM_INFO input_stream_info;
- HRESULT hr =
- encoder_->GetInputStreamInfo(input_stream_id_, &input_stream_info);
- RETURN_ON_HR_FAILURE(hr, "Couldn't get input stream info", false);
- input_sample_ = CreateEmptySampleWithBuffer(
- input_stream_info.cbSize
- ? input_stream_info.cbSize
- : VideoFrame::AllocationSize(PIXEL_FORMAT_NV12, input_visible_size_),
- input_stream_info.cbAlignment);
+ HRESULT hr = MFCreateSample(&input_sample_);
+ RETURN_ON_HR_FAILURE(hr, "Failed to create sample", false);
+
+ if (config.input_format == PIXEL_FORMAT_NV12 &&
+ base::FeatureList::IsEnabled(media::kMediaFoundationD3D11VideoCapture)) {
+ dxgi_device_manager_ = DXGIDeviceManager::Create();
+ if (!dxgi_device_manager_) {
+ DLOG(ERROR) << "Failed to create DXGIDeviceManager";
+ return false;
+ }
+ }
if (is_async_mft_) {
// Start the asynchronous processing model
+ if (dxgi_device_manager_) {
+ auto mf_dxgi_device_manager =
+ dxgi_device_manager_->GetMFDXGIDeviceManager();
+ hr = encoder_->ProcessMessage(
+ MFT_MESSAGE_SET_D3D_MANAGER,
+ reinterpret_cast<ULONG_PTR>(mf_dxgi_device_manager.Get()));
+ RETURN_ON_HR_FAILURE(
+ hr, "Couldn't set ProcessMessage MFT_MESSAGE_SET_D3D_MANAGER", false);
+ }
hr = encoder_->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
RETURN_ON_HR_FAILURE(
hr, "Couldn't set ProcessMessage MFT_MESSAGE_COMMAND_FLUSH", false);
@@ -682,9 +700,9 @@ void MediaFoundationVideoEncodeAccelerator::EncodeTask(
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
if (is_async_mft_) {
- AsyncEncodeTask(frame, force_keyframe);
+ AsyncEncodeTask(std::move(frame), force_keyframe);
} else {
- SyncEncodeTask(frame, force_keyframe);
+ SyncEncodeTask(std::move(frame), force_keyframe);
}
}
@@ -695,7 +713,7 @@ void MediaFoundationVideoEncodeAccelerator::AsyncEncodeTask(
HRESULT hr = E_FAIL;
if (input_required_) {
// Hardware MFT is waiting for this coming input.
- hr = ProcessInput(frame, force_keyframe);
+ hr = ProcessInput(std::move(frame), force_keyframe);
if (FAILED(hr)) {
NotifyError(kPlatformFailureError);
RETURN_ON_HR_FAILURE(hr, "Couldn't encode", );
@@ -721,7 +739,7 @@ void MediaFoundationVideoEncodeAccelerator::AsyncEncodeTask(
// Always deliver the current input into HMFT.
if (event_type == METransformNeedInput) {
- hr = ProcessInput(frame, force_keyframe);
+ hr = ProcessInput(std::move(frame), force_keyframe);
if (FAILED(hr)) {
NotifyError(kPlatformFailureError);
RETURN_ON_HR_FAILURE(hr, "Couldn't encode", );
@@ -748,7 +766,7 @@ void MediaFoundationVideoEncodeAccelerator::SyncEncodeTask(
scoped_refptr<VideoFrame> frame,
bool force_keyframe) {
HRESULT hr = E_FAIL;
- hr = ProcessInput(frame, force_keyframe);
+ hr = ProcessInput(std::move(frame), force_keyframe);
// According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try
// processing the output. This error indicates that encoder does not accept
@@ -775,42 +793,17 @@ HRESULT MediaFoundationVideoEncodeAccelerator::ProcessInput(
bool force_keyframe) {
DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
- DCHECK_EQ(frame->format(), PIXEL_FORMAT_I420);
- // Convert I420 to NV12 as input.
- Microsoft::WRL::ComPtr<IMFMediaBuffer> input_buffer;
- input_sample_->GetBufferByIndex(0, &input_buffer);
-
- {
- MediaBufferScopedPointer scoped_buffer(input_buffer.Get());
- DCHECK(scoped_buffer.get());
- int dst_stride_y = frame->stride(VideoFrame::kYPlane);
- uint8_t* dst_uv =
- scoped_buffer.get() +
- frame->stride(VideoFrame::kYPlane) * frame->rows(VideoFrame::kYPlane);
- int dst_stride_uv = frame->stride(VideoFrame::kUPlane) * 2;
- libyuv::I420ToNV12(frame->visible_data(VideoFrame::kYPlane),
- frame->stride(VideoFrame::kYPlane),
- frame->visible_data(VideoFrame::kUPlane),
- frame->stride(VideoFrame::kUPlane),
- frame->visible_data(VideoFrame::kVPlane),
- frame->stride(VideoFrame::kVPlane), scoped_buffer.get(),
- dst_stride_y, dst_uv, dst_stride_uv,
- input_visible_size_.width(),
- input_visible_size_.height());
- }
+ HRESULT hr = PopulateInputSampleBuffer(frame);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't populate input sample buffer", hr);
input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() *
kOneMicrosecondInMFSampleTimeUnits);
UINT64 sample_duration = 0;
- HRESULT hr =
- MFFrameRateToAverageTimePerFrame(frame_rate_, 1, &sample_duration);
+ hr = MFFrameRateToAverageTimePerFrame(frame_rate_, 1, &sample_duration);
RETURN_ON_HR_FAILURE(hr, "Couldn't calculate sample duration", E_FAIL);
input_sample_->SetSampleDuration(sample_duration);
- // Release frame after input is copied.
- frame = nullptr;
-
if (force_keyframe) {
VARIANT var;
var.vt = VT_UI4;
@@ -818,13 +811,121 @@ HRESULT MediaFoundationVideoEncodeAccelerator::ProcessInput(
hr = codec_api_->SetValue(&CODECAPI_AVEncVideoForceKeyFrame, &var);
if (!compatible_with_win7_ && FAILED(hr)) {
LOG(WARNING) << "Failed to set CODECAPI_AVEncVideoForceKeyFrame, "
- "HRESULT: 0x" << std::hex << hr;
+ "HRESULT: 0x"
+ << std::hex << hr;
}
}
return encoder_->ProcessInput(input_stream_id_, input_sample_.Get(), 0);
}
+HRESULT MediaFoundationVideoEncodeAccelerator::PopulateInputSampleBuffer(
+ scoped_refptr<VideoFrame> frame) {
+ // Handle case where video frame is backed by a GPU texture
+ if (frame->storage_type() ==
+ VideoFrame::StorageType::STORAGE_GPU_MEMORY_BUFFER) {
+ DCHECK_EQ(frame->format(), PIXEL_FORMAT_NV12);
+
+ gfx::GpuMemoryBuffer* gmb = frame->GetGpuMemoryBuffer();
+ if (!gmb) {
+ DLOG(ERROR) << "Failed to get GMB for input frame";
+ return MF_E_INVALID_STREAM_DATA;
+ }
+
+ gfx::GpuMemoryBufferHandle buffer_handle = gmb->CloneHandle();
+ DCHECK_EQ(gmb->GetType(), gfx::GpuMemoryBufferType::DXGI_SHARED_HANDLE);
+
+ auto d3d_device = dxgi_device_manager_->GetDevice();
+ if (!d3d_device) {
+ DLOG(ERROR) << "Failed to get device from MF DXGI device manager";
+ return E_HANDLE;
+ }
+
+ Microsoft::WRL::ComPtr<ID3D11Device1> device1;
+ HRESULT hr = d3d_device.As(&device1);
+ RETURN_ON_HR_FAILURE(hr, "Failed to query ID3D11Device1", hr);
+
+ Microsoft::WRL::ComPtr<ID3D11Texture2D> texture;
+ hr = device1->OpenSharedResource1(buffer_handle.dxgi_handle.Get(),
+ IID_PPV_ARGS(&texture));
+ RETURN_ON_HR_FAILURE(hr, "Failed to open shared GMB D3D texture", hr);
+
+ Microsoft::WRL::ComPtr<IMFMediaBuffer> input_buffer;
+ hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), texture.Get(), 0,
+ FALSE, &input_buffer);
+ RETURN_ON_HR_FAILURE(hr, "Failed to create MF DXGI surface buffer", hr);
+
+ hr = input_sample_->RemoveAllBuffers();
+ RETURN_ON_HR_FAILURE(hr, "Failed remove buffers from sample", hr);
+ hr = input_sample_->AddBuffer(input_buffer.Get());
+ RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", hr);
+ return S_OK;
+ }
+
+ Microsoft::WRL::ComPtr<IMFMediaBuffer> input_buffer;
+ HRESULT hr = input_sample_->GetBufferByIndex(0, &input_buffer);
+ if (FAILED(hr)) {
+ // Allocate a new buffer.
+ MFT_INPUT_STREAM_INFO input_stream_info;
+ hr = encoder_->GetInputStreamInfo(input_stream_id_, &input_stream_info);
+ RETURN_ON_HR_FAILURE(hr, "Couldn't get input stream info", hr);
+
+ hr = MFCreateAlignedMemoryBuffer(
+ input_stream_info.cbSize ? input_stream_info.cbSize
+ : VideoFrame::AllocationSize(
+ PIXEL_FORMAT_NV12, input_visible_size_),
+ input_stream_info.cbAlignment == 0 ? input_stream_info.cbAlignment
+ : input_stream_info.cbAlignment - 1,
+ &input_buffer);
+ RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer", hr);
+ hr = input_sample_->AddBuffer(input_buffer.Get());
+ RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", hr);
+ }
+
+ MediaBufferScopedPointer scoped_buffer(input_buffer.Get());
+ DCHECK(scoped_buffer.get());
+ uint8_t* dst_y = scoped_buffer.get();
+ uint8_t* dst_uv =
+ scoped_buffer.get() +
+ frame->row_bytes(VideoFrame::kYPlane) * frame->rows(VideoFrame::kYPlane);
+ uint8_t* end = dst_uv + frame->row_bytes(VideoFrame::kUVPlane) *
+ frame->rows(VideoFrame::kUVPlane);
+ DCHECK_GE(std::ptrdiff_t{scoped_buffer.max_length()},
+ end - scoped_buffer.get());
+
+ if (frame->format() == PIXEL_FORMAT_NV12) {
+ // Copy NV12 pixel data from |frame| to |input_buffer|.
+ int error = libyuv::NV12Copy(frame->visible_data(VideoFrame::kYPlane),
+ frame->stride(VideoFrame::kYPlane),
+ frame->visible_data(VideoFrame::kUVPlane),
+ frame->stride(VideoFrame::kUVPlane), dst_y,
+ frame->row_bytes(VideoFrame::kYPlane), dst_uv,
+ frame->row_bytes(VideoFrame::kUPlane),
+ input_visible_size_.width(),
+ input_visible_size_.height());
+ if (error)
+ return E_FAIL;
+ } else if (frame->format() == PIXEL_FORMAT_I420) {
+ // Convert I420 to NV12 as input.
+ int error = libyuv::I420ToNV12(
+ frame->visible_data(VideoFrame::kYPlane),
+ frame->stride(VideoFrame::kYPlane),
+ frame->visible_data(VideoFrame::kUPlane),
+ frame->stride(VideoFrame::kUPlane),
+ frame->visible_data(VideoFrame::kVPlane),
+ frame->stride(VideoFrame::kVPlane), dst_y,
+ frame->row_bytes(VideoFrame::kYPlane), dst_uv,
+ frame->row_bytes(VideoFrame::kUPlane) * 2, input_visible_size_.width(),
+ input_visible_size_.height());
+ if (error)
+ return E_FAIL;
+ } else {
+ NOTREACHED();
+ }
+
+ return S_OK;
+}
+
void MediaFoundationVideoEncodeAccelerator::ProcessOutputAsync() {
DVLOG(3) << __func__;
DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread());
@@ -1014,7 +1115,7 @@ bool MediaFoundationVideoEncodeAccelerator::TryToDeliverInputFrame(
continue;
}
case METransformNeedInput: {
- hr = ProcessInput(frame, force_keyframe);
+ hr = ProcessInput(std::move(frame), force_keyframe);
if (FAILED(hr)) {
NotifyError(kPlatformFailureError);
RETURN_ON_HR_FAILURE(hr, "Couldn't encode", false);
diff --git a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
index 8d0b33d6500..31a4d4db930 100644
--- a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
+++ b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
@@ -18,6 +18,7 @@
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread.h"
+#include "media/base/win/dxgi_device_manager.h"
#include "media/base/win/mf_initializer.h"
#include "media/gpu/media_gpu_export.h"
#include "media/video/video_encode_accelerator.h"
@@ -89,6 +90,9 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
// Processes the input video frame for the encoder.
HRESULT ProcessInput(scoped_refptr<VideoFrame> frame, bool force_keyframe);
+ // Populates input sample buffer with contents of a video frame
+ HRESULT PopulateInputSampleBuffer(scoped_refptr<VideoFrame> frame);
+
// Checks for and copies encoded output on |encoder_thread_|.
void ProcessOutputAsync();
void ProcessOutputSync();
@@ -164,6 +168,9 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
base::Thread encoder_thread_;
scoped_refptr<base::SingleThreadTaskRunner> encoder_thread_task_runner_;
+ // DXGI device manager for handling hardware input textures
+ scoped_refptr<DXGIDeviceManager> dxgi_device_manager_;
+
// Declared last to ensure that all weak pointers are invalidated before
// other destructors run.
base::WeakPtrFactory<MediaFoundationVideoEncodeAccelerator>
diff --git a/chromium/media/gpu/windows/supported_profile_helpers.cc b/chromium/media/gpu/windows/supported_profile_helpers.cc
index 76a557d1825..fb3b532dadd 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers.cc
+++ b/chromium/media/gpu/windows/supported_profile_helpers.cc
@@ -203,7 +203,8 @@ namespace media {
SupportedResolutionRangeMap GetSupportedD3D11VideoDecoderResolutions(
ComD3D11Device device,
- const gpu::GpuDriverBugWorkarounds& workarounds) {
+ const gpu::GpuDriverBugWorkarounds& workarounds,
+ bool provide_av1_resolutions) {
TRACE_EVENT0("gpu,startup", "GetSupportedD3D11VideoDecoderResolutions");
SupportedResolutionRangeMap supported_resolutions;
@@ -245,7 +246,7 @@ SupportedResolutionRangeMap GetSupportedD3D11VideoDecoderResolutions(
const bool should_test_for_av1_support =
base::FeatureList::IsEnabled(kMediaFoundationAV1Decoding) &&
- !workarounds.disable_accelerated_av1_decode;
+ !workarounds.disable_accelerated_av1_decode && provide_av1_resolutions;
// Enumerate supported video profiles and look for the known profile for each
// codec. We first look through the the decoder profiles so we don't run N
diff --git a/chromium/media/gpu/windows/supported_profile_helpers.h b/chromium/media/gpu/windows/supported_profile_helpers.h
index 6e521d08ff6..98983db43e3 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers.h
+++ b/chromium/media/gpu/windows/supported_profile_helpers.h
@@ -33,7 +33,8 @@ using SupportedResolutionRangeMap =
MEDIA_GPU_EXPORT
SupportedResolutionRangeMap GetSupportedD3D11VideoDecoderResolutions(
ComD3D11Device device,
- const gpu::GpuDriverBugWorkarounds& workarounds);
+ const gpu::GpuDriverBugWorkarounds& workarounds,
+ bool provide_av1_resolutions = true);
} // namespace media
diff --git a/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc b/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
index 31a4fc6b08b..f93d3b60d2e 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
+++ b/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
@@ -170,6 +170,25 @@ class SupportedResolutionResolverTest : public ::testing::Test {
base::flat_map<GUID, gfx::Size, GUIDComparison> max_size_for_guids_;
};
+TEST_F(SupportedResolutionResolverTest, CanDisableAV1) {
+ DONT_RUN_ON_WIN_7();
+
+ // Do all the things to normally enable AV1:
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kMediaFoundationAV1Decoding);
+
+ // Enable the av1 decoder.
+ EnableDecoders({DXVA_ModeAV1_VLD_Profile0});
+ SetMaxResolution(DXVA_ModeAV1_VLD_Profile0, kSquare8k);
+
+ const auto supported_resolutions = GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_, false);
+ auto av1_supported_res = supported_resolutions.find(AV1PROFILE_PROFILE_MAIN);
+
+ // There should be no supported av1 resolutions.
+ ASSERT_EQ(av1_supported_res, supported_resolutions.end());
+}
+
TEST_F(SupportedResolutionResolverTest, HasH264SupportByDefault) {
DONT_RUN_ON_WIN_7();
AssertDefaultSupport(
diff --git a/chromium/media/media_options.gni b/chromium/media/media_options.gni
index 2137fa508ca..ad57a1da525 100644
--- a/chromium/media/media_options.gni
+++ b/chromium/media/media_options.gni
@@ -9,7 +9,6 @@ import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//media/gpu/args.gni")
import("//testing/libfuzzer/fuzzer_test.gni")
-import("//third_party/libaom/options.gni")
import("//third_party/libgav1/options.gni")
# Do not expand this list without double-checking with OWNERS, this is a list of
@@ -94,10 +93,12 @@ declare_args() {
# Enable HLS with SAMPLE-AES decryption.
enable_hls_sample_aes = proprietary_codecs && (is_chromecast || is_fuchsia)
- # Enable logging override, e.g. enable DVLOGs at build time.
- enable_logging_override = is_chromecast
+ # Enable logging override, e.g. enable DVLOGs through level 2 at build time.
+ # On Chromecast, these are logged as INFO.
+ # On Fuchsia, these are logged as VLOGs.
+ enable_logging_override = is_chromecast || is_fuchsia
- enable_dav1d_decoder = !is_android && !is_ios
+ enable_dav1d_decoder = !is_ios
# Enable browser managed persistent metadata storage for EME persistent
# session and persistent usage record session.
@@ -158,8 +159,8 @@ declare_args() {
# will be hosted in the mojo CDM service running in the CDM (utility) process.
# On Fuchsia, this is only enabled to build libclearkeycdm.so, the mojo CDM
# service is not used.
-enable_library_cdms = (is_linux && !is_chromecast) || is_chromeos || is_mac ||
- is_win || is_fuchsia
+enable_library_cdms = (is_linux && !is_chromecast) || is_chromeos_lacros ||
+ is_chromeos_ash || is_mac || is_win || is_fuchsia
declare_args() {
# When enabled, this feature allows developers to use a runtime flag to
@@ -177,8 +178,8 @@ declare_args() {
# Enable Storage ID which is used by CDMs. This is only available with chrome
# branding, but may be overridden by other embedders.
- enable_cdm_storage_id =
- enable_library_cdms && is_chrome_branded && (is_win || is_mac || is_ash)
+ enable_cdm_storage_id = enable_library_cdms && is_chrome_branded &&
+ (is_win || is_mac || is_chromeos_ash)
# If |enable_cdm_storage_id| is set, then an implementation specific key
# must also be provided. It can be provided by defining CDM_STORAGE_ID_KEY
@@ -206,7 +207,8 @@ if (is_chromecast) {
"video_decoder",
]
_default_mojo_media_host = "gpu"
-} else if (is_ash || is_mac || is_win || (is_linux && use_vaapi)) {
+} else if (is_chromeos_ash || is_mac || is_win ||
+ ((is_linux || is_chromeos_lacros) && use_vaapi)) {
_default_mojo_media_services = [ "video_decoder" ]
_default_mojo_media_host = "gpu"
}
diff --git a/chromium/media/midi/BUILD.gn b/chromium/media/midi/BUILD.gn
index 52329075c07..506d0b0c8c4 100644
--- a/chromium/media/midi/BUILD.gn
+++ b/chromium/media/midi/BUILD.gn
@@ -220,7 +220,7 @@ test("midi_unittests") {
# On LaCrOS, tests use ash-chrome as a window manager, thus the dependency.
# On other platforms, this target should not require the Chrome to run.
- if (!is_lacros) {
+ if (!is_chromeos_lacros) {
assert_no_deps = [ "//chrome" ]
}
}
diff --git a/chromium/media/midi/java/src/org/chromium/midi/UsbMidiDeviceFactoryAndroid.java b/chromium/media/midi/java/src/org/chromium/midi/UsbMidiDeviceFactoryAndroid.java
index 9511ab17c51..4b66609678e 100644
--- a/chromium/media/midi/java/src/org/chromium/midi/UsbMidiDeviceFactoryAndroid.java
+++ b/chromium/media/midi/java/src/org/chromium/midi/UsbMidiDeviceFactoryAndroid.java
@@ -16,6 +16,7 @@ import android.hardware.usb.UsbManager;
import android.os.Parcelable;
import org.chromium.base.ContextUtils;
+import org.chromium.base.IntentUtils;
import org.chromium.base.annotations.CalledByNative;
import org.chromium.base.annotations.JNINamespace;
import org.chromium.base.annotations.NativeMethods;
@@ -150,7 +151,8 @@ class UsbMidiDeviceFactoryAndroid {
// There is at least one interface supporting MIDI.
mUsbManager.requestPermission(device,
PendingIntent.getBroadcast(ContextUtils.getApplicationContext(), 0,
- new Intent(ACTION_USB_PERMISSION), 0));
+ new Intent(ACTION_USB_PERMISSION),
+ IntentUtils.getPendingIntentMutabilityFlag(true)));
mRequestedDevices.add(device);
break;
}
diff --git a/chromium/media/midi/midi_manager_win.cc b/chromium/media/midi/midi_manager_win.cc
index 1b41d22d5f6..7397cdf554a 100644
--- a/chromium/media/midi/midi_manager_win.cc
+++ b/chromium/media/midi/midi_manager_win.cc
@@ -352,8 +352,7 @@ class MidiManagerWin::InPort final : public Port {
caps.wMid,
caps.wPid,
caps.vDriverVersion,
- base::WideToUTF8(
- base::string16(caps.szPname, wcslen(caps.szPname))),
+ base::WideToUTF8(std::wstring(caps.szPname, wcslen(caps.szPname))),
caps.ManufacturerGuid),
manager_(manager),
in_handle_(kInvalidInHandle),
@@ -471,8 +470,7 @@ class MidiManagerWin::OutPort final : public Port {
caps.wMid,
caps.wPid,
caps.vDriverVersion,
- base::WideToUTF8(
- base::string16(caps.szPname, wcslen(caps.szPname))),
+ base::WideToUTF8(std::wstring(caps.szPname, wcslen(caps.szPname))),
caps.ManufacturerGuid),
software_(caps.wTechnology == MOD_SWSYNTH),
out_handle_(kInvalidOutHandle) {}
diff --git a/chromium/media/midi/midi_manager_winrt.cc b/chromium/media/midi/midi_manager_winrt.cc
index 891e1d22a60..9ec6bad7047 100644
--- a/chromium/media/midi/midi_manager_winrt.cc
+++ b/chromium/media/midi/midi_manager_winrt.cc
@@ -143,7 +143,7 @@ void GetDevPropString(DEVINST handle,
if (cr != CR_SUCCESS)
VLOG(1) << "CM_Get_DevNode_Property failed: CONFIGRET 0x" << std::hex << cr;
else
- *out = base::WideToUTF8(reinterpret_cast<base::char16*>(buffer.get()));
+ *out = base::WideToUTF8(reinterpret_cast<wchar_t*>(buffer.get()));
}
// Retrieves manufacturer (provider) and version information of underlying
@@ -163,7 +163,7 @@ void GetDevPropString(DEVINST handle,
void GetDriverInfoFromDeviceId(const std::string& dev_id,
std::string* out_manufacturer,
std::string* out_driver_version) {
- base::string16 dev_instance_id =
+ std::wstring dev_instance_id =
base::UTF8ToWide(dev_id.substr(4, dev_id.size() - 43));
base::ReplaceChars(dev_instance_id, L"#", L"\\", &dev_instance_id);
@@ -208,7 +208,7 @@ struct MidiPort {
template <typename InterfaceType,
typename RuntimeType,
typename StaticsInterfaceType,
- base::char16 const* runtime_class_id>
+ wchar_t const* runtime_class_id>
class MidiManagerWinrt::MidiPortManager {
public:
// MidiPortManager instances should be constructed on the kComTaskRunner.
diff --git a/chromium/media/midi/midi_manager_winrt.h b/chromium/media/midi/midi_manager_winrt.h
index e059dbc00b3..accf60fc7e9 100644
--- a/chromium/media/midi/midi_manager_winrt.h
+++ b/chromium/media/midi/midi_manager_winrt.h
@@ -35,7 +35,7 @@ class MIDI_EXPORT MidiManagerWinrt final : public MidiManager {
template <typename InterfaceType,
typename RuntimeType,
typename StaticsInterfaceType,
- base::char16 const* runtime_class_id>
+ wchar_t const* runtime_class_id>
class MidiPortManager;
// Callbacks on kComTaskRunner.
diff --git a/chromium/media/mojo/DEPS b/chromium/media/mojo/DEPS
index 63fab4423b5..65aa40597bf 100644
--- a/chromium/media/mojo/DEPS
+++ b/chromium/media/mojo/DEPS
@@ -9,6 +9,7 @@ include_rules = [
# remove this dependency.
"+sandbox/mac",
+ "+services/media_session",
"+services/metrics",
"+services/service_manager",
diff --git a/chromium/media/mojo/clients/BUILD.gn b/chromium/media/mojo/clients/BUILD.gn
index 994a5f13c59..a95b2688bee 100644
--- a/chromium/media/mojo/clients/BUILD.gn
+++ b/chromium/media/mojo/clients/BUILD.gn
@@ -88,6 +88,14 @@ source_set("clients") {
"//ui/gl:gl",
]
}
+ if (is_win) {
+ sources += [
+ "win/media_foundation_renderer_client.cc",
+ "win/media_foundation_renderer_client.h",
+ "win/media_foundation_renderer_client_factory.cc",
+ "win/media_foundation_renderer_client_factory.h",
+ ]
+ }
}
source_set("unit_tests") {
diff --git a/chromium/media/mojo/clients/mojo_android_overlay.cc b/chromium/media/mojo/clients/mojo_android_overlay.cc
index 4a3cc4ef02d..5652af5f442 100644
--- a/chromium/media/mojo/clients/mojo_android_overlay.cc
+++ b/chromium/media/mojo/clients/mojo_android_overlay.cc
@@ -81,6 +81,15 @@ void MojoAndroidOverlay::OnDestroyed() {
// signal that we should do that, since it still might be in use.
}
+void MojoAndroidOverlay::OnSynchronouslyDestroyed(
+ OnSynchronouslyDestroyedCallback done_cb) {
+ // Do what we normally do, but do so synchronously.
+ OnDestroyed();
+ // On completion of RunSurfaceDestroyedCallbacks, the surface must be no
+ // longer in use.
+ std::move(done_cb).Run();
+}
+
void MojoAndroidOverlay::OnPowerEfficientState(bool is_power_efficient) {
if (config_.power_cb)
config_.power_cb.Run(this, is_power_efficient);
diff --git a/chromium/media/mojo/clients/mojo_android_overlay.h b/chromium/media/mojo/clients/mojo_android_overlay.h
index 851870e4348..333a537e8b1 100644
--- a/chromium/media/mojo/clients/mojo_android_overlay.h
+++ b/chromium/media/mojo/clients/mojo_android_overlay.h
@@ -33,6 +33,8 @@ class MojoAndroidOverlay : public AndroidOverlay,
// mojom::AndroidOverlayClient
void OnSurfaceReady(uint64_t surface_key) override;
void OnDestroyed() override;
+ void OnSynchronouslyDestroyed(
+ OnSynchronouslyDestroyedCallback done_cb) override;
void OnPowerEfficientState(bool is_power_efficient) override;
private:
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.cc b/chromium/media/mojo/clients/mojo_audio_decoder.cc
index 092c50fe4db..2bf1b1a59b9 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.cc
@@ -47,6 +47,10 @@ bool MojoAudioDecoder::SupportsDecryption() const {
#endif
}
+AudioDecoderType MojoAudioDecoder::GetDecoderType() const {
+ return decoder_type_;
+}
+
std::string MojoAudioDecoder::GetDisplayName() const {
return "MojoAudioDecoder";
}
@@ -199,11 +203,13 @@ void MojoAudioDecoder::OnConnectionError() {
}
void MojoAudioDecoder::OnInitialized(const Status& status,
- bool needs_bitstream_conversion) {
+ bool needs_bitstream_conversion,
+ AudioDecoderType decoder_type) {
DVLOG(1) << __func__ << ": success:" << status.is_ok();
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
needs_bitstream_conversion_ = needs_bitstream_conversion;
+ decoder_type_ = decoder_type;
if (status.is_ok() && !mojo_decoder_buffer_writer_) {
mojo::ScopedDataPipeConsumerHandle remote_consumer_handle;
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.h b/chromium/media/mojo/clients/mojo_audio_decoder.h
index ded8cd2ef76..fbb85ab5f72 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.h
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.h
@@ -36,7 +36,8 @@ class MojoAudioDecoder final : public AudioDecoder,
// Decoder implementation
bool IsPlatformDecoder() const final;
bool SupportsDecryption() const final;
- std::string GetDisplayName() const final;
+ std::string GetDisplayName() const override;
+ AudioDecoderType GetDecoderType() const override;
// AudioDecoder implementation.
void Initialize(const AudioDecoderConfig& config,
@@ -66,7 +67,9 @@ class MojoAudioDecoder final : public AudioDecoder,
void FailInit(InitCB init_cb, Status err);
// Called when |remote_decoder_| finished initialization.
- void OnInitialized(const Status& status, bool needs_bitstream_conversion);
+ void OnInitialized(const Status& status,
+ bool needs_bitstream_conversion,
+ AudioDecoderType decoder_type);
// Called when |remote_decoder_| accepted or rejected DecoderBuffer.
void OnDecodeStatus(const Status& decode_status);
@@ -103,6 +106,7 @@ class MojoAudioDecoder final : public AudioDecoder,
// Flag telling whether this decoder requires bitstream conversion.
// Passed from |remote_decoder_| as a result of its initialization.
bool needs_bitstream_conversion_ = false;
+ AudioDecoderType decoder_type_ = AudioDecoderType::kUnknown;
DISALLOW_COPY_AND_ASSIGN(MojoAudioDecoder);
};
diff --git a/chromium/media/mojo/clients/mojo_cdm.cc b/chromium/media/mojo/clients/mojo_cdm.cc
index 5ed0040ef79..403c19f040f 100644
--- a/chromium/media/mojo/clients/mojo_cdm.cc
+++ b/chromium/media/mojo/clients/mojo_cdm.cc
@@ -58,6 +58,12 @@ MojoCdm::MojoCdm(mojo::Remote<mojom::ContentDecryptionModule> remote_cdm,
DCHECK(session_keys_change_cb_);
DCHECK(session_expiration_update_cb_);
+#if defined(OS_WIN)
+ // TODO(xhwang): Need a way to implement RequiresMediaFoundationRenderer().
+ // The plan is to pass back this info when we create the CDM, e.g. in the
+ // `cdm_created_cb` of `MojoCdmFactory::Create()`.
+#endif // defined(OS_WIN)
+
remote_cdm_->SetClient(client_receiver_.BindNewEndpointAndPassRemote());
// Report a false event here as a baseline.
@@ -83,7 +89,7 @@ MojoCdm::~MojoCdm() {
}
// Reject any outstanding promises and close all the existing sessions.
- cdm_promise_adapter_.Clear();
+ cdm_promise_adapter_.Clear(CdmPromiseAdapter::ClearReason::kDestruction);
cdm_session_tracker_.CloseRemainingSessions(session_closed_cb_);
}
@@ -103,7 +109,7 @@ void MojoCdm::OnConnectionError(uint32_t custom_reason,
// As communication with the remote CDM is broken, reject any outstanding
// promises and close all the existing sessions.
- cdm_promise_adapter_.Clear();
+ cdm_promise_adapter_.Clear(CdmPromiseAdapter::ClearReason::kConnectionError);
cdm_session_tracker_.CloseRemainingSessions(session_closed_cb_);
}
@@ -272,6 +278,15 @@ base::Optional<base::UnguessableToken> MojoCdm::GetCdmId() const {
return cdm_id_;
}
+#if defined(OS_WIN)
+bool MojoCdm::RequiresMediaFoundationRenderer() {
+ DVLOG(2) << __func__ << " this:" << this
+ << " is_mf_renderer_content_:" << is_mf_renderer_content_;
+
+ return is_mf_renderer_content_;
+}
+#endif // defined(OS_WIN)
+
void MojoCdm::OnSessionMessage(const std::string& session_id,
MessageType message_type,
const std::vector<uint8_t>& message) {
diff --git a/chromium/media/mojo/clients/mojo_cdm.h b/chromium/media/mojo/clients/mojo_cdm.h
index 830edc13cb0..b0f88e5c01c 100644
--- a/chromium/media/mojo/clients/mojo_cdm.h
+++ b/chromium/media/mojo/clients/mojo_cdm.h
@@ -15,6 +15,7 @@
#include "base/memory/weak_ptr.h"
#include "base/synchronization/lock.h"
#include "base/threading/thread_checker.h"
+#include "build/build_config.h"
#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
#include "media/base/cdm_initialized_promise.h"
@@ -78,6 +79,9 @@ class MojoCdm final : public ContentDecryptionModule,
std::unique_ptr<CallbackRegistration> RegisterEventCB(EventCB event_cb) final;
Decryptor* GetDecryptor() final;
base::Optional<base::UnguessableToken> GetCdmId() const final;
+#if defined(OS_WIN)
+ bool RequiresMediaFoundationRenderer() final;
+#endif // defined(OS_WIN)
private:
~MojoCdm() final;
@@ -148,6 +152,11 @@ class MojoCdm final : public ContentDecryptionModule,
CallbackRegistry<EventCB::RunType> event_callbacks_;
+#if defined(OS_WIN)
+ // The current content is for MediaFoundationRenderer or not.
+ bool is_mf_renderer_content_ = false;
+#endif // defined(OS_WIN)
+
// This must be the last member.
base::WeakPtrFactory<MojoCdm> weak_factory_{this};
diff --git a/chromium/media/mojo/clients/mojo_decoder_factory.h b/chromium/media/mojo/clients/mojo_decoder_factory.h
index 61283c60546..9bce9003f1d 100644
--- a/chromium/media/mojo/clients/mojo_decoder_factory.h
+++ b/chromium/media/mojo/clients/mojo_decoder_factory.h
@@ -26,6 +26,8 @@ class MojoDecoderFactory final : public DecoderFactory {
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) final;
+ // TODO(crbug.com/1173503): Implement GetSupportedVideoDecoderConfigs.
+
void CreateVideoDecoders(
scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
diff --git a/chromium/media/mojo/clients/mojo_decryptor.cc b/chromium/media/mojo/clients/mojo_decryptor.cc
index 11888924e53..33f472fb0c0 100644
--- a/chromium/media/mojo/clients/mojo_decryptor.cc
+++ b/chromium/media/mojo/clients/mojo_decryptor.cc
@@ -31,13 +31,6 @@ void ReleaseFrameResource(
releaser.reset();
}
-// Converts a repeating callback to a once callback with the same signature so
-// that it can be used with mojo::WrapCallbackWithDefaultInvokeIfNotRun.
-template <typename T>
-base::OnceCallback<T> ToOnceCallback(const base::RepeatingCallback<T>& cb) {
- return static_cast<base::OnceCallback<T>>(cb);
-}
-
} // namespace
// TODO(xhwang): Consider adding an Initialize() to reduce the amount of work
@@ -140,35 +133,34 @@ void MojoDecryptor::InitializeVideoDecoder(const VideoDecoderConfig& config,
void MojoDecryptor::DecryptAndDecodeAudio(
scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) {
+ AudioDecodeCB audio_decode_cb) {
DVLOG(3) << __func__ << ": " << encrypted->AsHumanReadableString();
DCHECK(thread_checker_.CalledOnValidThread());
mojom::DecoderBufferPtr mojo_buffer =
audio_buffer_writer_->WriteDecoderBuffer(std::move(encrypted));
if (!mojo_buffer) {
- audio_decode_cb.Run(kError, AudioFrames());
+ std::move(audio_decode_cb).Run(kError, AudioFrames());
return;
}
remote_decryptor_->DecryptAndDecodeAudio(
std::move(mojo_buffer),
- base::BindOnce(
- &MojoDecryptor::OnAudioDecoded, weak_factory_.GetWeakPtr(),
- mojo::WrapCallbackWithDefaultInvokeIfNotRun(
- ToOnceCallback(audio_decode_cb), kError, AudioFrames())));
+ base::BindOnce(&MojoDecryptor::OnAudioDecoded, weak_factory_.GetWeakPtr(),
+ mojo::WrapCallbackWithDefaultInvokeIfNotRun(
+ std::move(audio_decode_cb), kError, AudioFrames())));
}
void MojoDecryptor::DecryptAndDecodeVideo(
scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) {
+ VideoDecodeCB video_decode_cb) {
DVLOG(3) << __func__ << ": " << encrypted->AsHumanReadableString();
DCHECK(thread_checker_.CalledOnValidThread());
mojom::DecoderBufferPtr mojo_buffer =
video_buffer_writer_->WriteDecoderBuffer(std::move(encrypted));
if (!mojo_buffer) {
- video_decode_cb.Run(kError, nullptr);
+ std::move(video_decode_cb).Run(kError, nullptr);
return;
}
@@ -176,7 +168,7 @@ void MojoDecryptor::DecryptAndDecodeVideo(
std::move(mojo_buffer),
base::BindOnce(&MojoDecryptor::OnVideoDecoded, weak_factory_.GetWeakPtr(),
mojo::WrapCallbackWithDefaultInvokeIfNotRun(
- ToOnceCallback(video_decode_cb), kError, nullptr)));
+ std::move(video_decode_cb), kError, nullptr)));
}
void MojoDecryptor::ResetDecoder(StreamType stream_type) {
@@ -223,7 +215,7 @@ void MojoDecryptor::OnBufferRead(DecryptCB decrypt_cb,
}
void MojoDecryptor::OnAudioDecoded(
- AudioDecodeOnceCB audio_decode_cb,
+ AudioDecodeCB audio_decode_cb,
Status status,
std::vector<mojom::AudioBufferPtr> audio_buffers) {
DVLOG_IF(1, status != kSuccess) << __func__ << "(" << status << ")";
@@ -238,7 +230,7 @@ void MojoDecryptor::OnAudioDecoded(
}
void MojoDecryptor::OnVideoDecoded(
- VideoDecodeOnceCB video_decode_cb,
+ VideoDecodeCB video_decode_cb,
Status status,
const scoped_refptr<VideoFrame>& video_frame,
mojo::PendingRemote<mojom::FrameResourceReleaser> releaser) {
diff --git a/chromium/media/mojo/clients/mojo_decryptor.h b/chromium/media/mojo/clients/mojo_decryptor.h
index 944c6005614..932e9456ee9 100644
--- a/chromium/media/mojo/clients/mojo_decryptor.h
+++ b/chromium/media/mojo/clients/mojo_decryptor.h
@@ -43,23 +43,13 @@ class MojoDecryptor final : public Decryptor {
void InitializeVideoDecoder(const VideoDecoderConfig& config,
DecoderInitCB init_cb) final;
void DecryptAndDecodeAudio(scoped_refptr<DecoderBuffer> encrypted,
- const AudioDecodeCB& audio_decode_cb) final;
+ AudioDecodeCB audio_decode_cb) final;
void DecryptAndDecodeVideo(scoped_refptr<DecoderBuffer> encrypted,
- const VideoDecodeCB& video_decode_cb) final;
+ VideoDecodeCB video_decode_cb) final;
void ResetDecoder(StreamType stream_type) final;
void DeinitializeDecoder(StreamType stream_type) final;
private:
- // These are once callbacks corresponding to repeating callbacks DecryptCB,
- // DecoderInitCB, AudioDecodeCB and VideoDecodeCB. They are needed so that we
- // can use WrapCallbackWithDefaultInvokeIfNotRun to make sure callbacks always
- // run.
- // TODO(xhwang): Update Decryptor to use OnceCallback. The change is easy,
- // but updating tests is hard given gmock doesn't support move-only types.
- // See http://crbug.com/751838
- using AudioDecodeOnceCB = base::OnceCallback<AudioDecodeCB::RunType>;
- using VideoDecodeOnceCB = base::OnceCallback<VideoDecodeCB::RunType>;
-
// Called when a buffer is decrypted.
void OnBufferDecrypted(DecryptCB decrypt_cb,
Status status,
@@ -67,11 +57,11 @@ class MojoDecryptor final : public Decryptor {
void OnBufferRead(DecryptCB decrypt_cb,
Status status,
scoped_refptr<DecoderBuffer> buffer);
- void OnAudioDecoded(AudioDecodeOnceCB audio_decode_cb,
+ void OnAudioDecoded(AudioDecodeCB audio_decode_cb,
Status status,
std::vector<mojom::AudioBufferPtr> audio_buffers);
void OnVideoDecoded(
- VideoDecodeOnceCB video_decode_cb,
+ VideoDecodeCB video_decode_cb,
Status status,
const scoped_refptr<VideoFrame>& video_frame,
mojo::PendingRemote<mojom::FrameResourceReleaser> releaser);
diff --git a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
index 6de784b000c..fa9638f6245 100644
--- a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
@@ -67,9 +67,8 @@ class MojoDecryptorTest : public ::testing::Test {
mojo_decryptor_service_.reset();
}
- void ReturnSharedBufferVideoFrame(
- scoped_refptr<DecoderBuffer> encrypted,
- const Decryptor::VideoDecodeCB& video_decode_cb) {
+ void ReturnSharedBufferVideoFrame(scoped_refptr<DecoderBuffer> encrypted,
+ Decryptor::VideoDecodeCB video_decode_cb) {
// We don't care about the encrypted data, just create a simple VideoFrame.
scoped_refptr<VideoFrame> frame(
MojoSharedBufferVideoFrame::CreateDefaultForTesting(
@@ -81,11 +80,11 @@ class MojoDecryptorTest : public ::testing::Test {
// Currently freeing buffers only works for MojoSharedMemory, so make
// sure |frame| is of that type.
EXPECT_EQ(VideoFrame::STORAGE_MOJO_SHARED_BUFFER, frame->storage_type());
- video_decode_cb.Run(Decryptor::kSuccess, std::move(frame));
+ std::move(video_decode_cb).Run(Decryptor::kSuccess, std::move(frame));
}
void ReturnAudioFrames(scoped_refptr<DecoderBuffer> encrypted,
- const Decryptor::AudioDecodeCB& audio_decode_cb) {
+ Decryptor::AudioDecodeCB audio_decode_cb) {
const ChannelLayout kChannelLayout = CHANNEL_LAYOUT_4_0;
const int kSampleRate = 48000;
const base::TimeDelta start_time = base::TimeDelta::FromSecondsD(1000.0);
@@ -94,13 +93,14 @@ class MojoDecryptorTest : public ::testing::Test {
ChannelLayoutToChannelCount(kChannelLayout), kSampleRate, 0.0f, 1.0f,
kSampleRate / 10, start_time);
Decryptor::AudioFrames audio_frames = {audio_buffer};
- audio_decode_cb.Run(Decryptor::kSuccess, audio_frames);
+ std::move(audio_decode_cb).Run(Decryptor::kSuccess, audio_frames);
}
void ReturnEOSVideoFrame(scoped_refptr<DecoderBuffer> encrypted,
- const Decryptor::VideoDecodeCB& video_decode_cb) {
+ Decryptor::VideoDecodeCB video_decode_cb) {
// Simply create and return an End-Of-Stream VideoFrame.
- video_decode_cb.Run(Decryptor::kSuccess, VideoFrame::CreateEOSFrame());
+ std::move(video_decode_cb)
+ .Run(Decryptor::kSuccess, VideoFrame::CreateEOSFrame());
}
MOCK_METHOD2(AudioDecoded,
diff --git a/chromium/media/mojo/clients/mojo_renderer.cc b/chromium/media/mojo/clients/mojo_renderer.cc
index a1d3343e01c..3e3a752134b 100644
--- a/chromium/media/mojo/clients/mojo_renderer.cc
+++ b/chromium/media/mojo/clients/mojo_renderer.cc
@@ -248,16 +248,23 @@ void MojoRenderer::OnEnded() {
client_->OnEnded();
}
-void MojoRenderer::OnError() {
+void MojoRenderer::OnError(const Status& status) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK(!init_cb_);
encountered_error_ = true;
+ base::Optional<PipelineStatus> pipeline_status =
+ StatusCodeToPipelineStatus(status.code());
+
+ // If an unexpected status code is encountered default
+ // back to a decode error.
+ if (!pipeline_status) {
+ // TODO(crbug.com/1153465): Log status code that failed to convert.
+ pipeline_status = PipelineStatus::PIPELINE_ERROR_DECODE;
+ }
- // TODO(tim): Should we plumb error code from remote renderer?
- // http://crbug.com/410451.
- client_->OnError(PIPELINE_ERROR_DECODE);
+ client_->OnError(*pipeline_status);
}
void MojoRenderer::OnVideoNaturalSizeChange(const gfx::Size& size) {
diff --git a/chromium/media/mojo/clients/mojo_renderer.h b/chromium/media/mojo/clients/mojo_renderer.h
index c1c22837447..50354d88b73 100644
--- a/chromium/media/mojo/clients/mojo_renderer.h
+++ b/chromium/media/mojo/clients/mojo_renderer.h
@@ -72,7 +72,7 @@ class MojoRenderer : public Renderer, public mojom::RendererClient {
void OnBufferingStateChange(BufferingState state,
BufferingStateChangeReason reason) override;
void OnEnded() override;
- void OnError() override;
+ void OnError(const Status& status) override;
void OnAudioConfigChange(const AudioDecoderConfig& config) override;
void OnVideoConfigChange(const VideoDecoderConfig& config) override;
void OnVideoNaturalSizeChange(const gfx::Size& size) override;
@@ -128,7 +128,7 @@ class MojoRenderer : public Renderer, public mojom::RendererClient {
// Mojo demuxer streams.
// Owned by MojoRenderer instead of remote mojom::Renderer
- // becuase these demuxer streams need to be destroyed as soon as |this| is
+ // because these demuxer streams need to be destroyed as soon as |this| is
// destroyed. The local demuxer streams returned by MediaResource cannot be
// used after |this| is destroyed.
// TODO(alokp): Add tests for MojoDemuxerStreamImpl.
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.cc b/chromium/media/mojo/clients/mojo_renderer_factory.cc
index add8975c735..3b317f7978d 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.cc
@@ -44,6 +44,25 @@ std::unique_ptr<Renderer> MojoRendererFactory::CreateRenderer(
std::move(renderer_remote));
}
+#if defined(OS_WIN)
+std::unique_ptr<MojoRenderer>
+MojoRendererFactory::CreateMediaFoundationRenderer(
+ mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
+ renderer_extension_receiver,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ VideoRendererSink* video_renderer_sink) {
+ DCHECK(interface_factory_);
+ mojo::PendingRemote<mojom::Renderer> renderer_remote;
+ interface_factory_->CreateMediaFoundationRenderer(
+ renderer_remote.InitWithNewPipeAndPassReceiver(),
+ std::move(renderer_extension_receiver));
+
+ return std::make_unique<MojoRenderer>(
+ media_task_runner, /*video_overlay_factory=*/nullptr, video_renderer_sink,
+ std::move(renderer_remote));
+}
+#endif // defined(OS_WIN)
+
#if BUILDFLAG(ENABLE_CAST_RENDERER)
std::unique_ptr<MojoRenderer> MojoRendererFactory::CreateCastRenderer(
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.h b/chromium/media/mojo/clients/mojo_renderer_factory.h
index 2459433e5ee..bab4060fa2b 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.h
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.h
@@ -44,6 +44,14 @@ class MojoRendererFactory final : public RendererFactory {
RequestOverlayInfoCB request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) final;
+#if defined(OS_WIN)
+ std::unique_ptr<MojoRenderer> CreateMediaFoundationRenderer(
+ mojo::PendingReceiver<mojom::MediaFoundationRendererExtension>
+ renderer_extension_receiver,
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ VideoRendererSink* video_renderer_sink);
+#endif // defined (OS_WIN)
+
#if BUILDFLAG(ENABLE_CAST_RENDERER)
std::unique_ptr<MojoRenderer> CreateCastRenderer(
const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.cc b/chromium/media/mojo/clients/mojo_video_decoder.cc
index 78da171d12a..a093a377cd6 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_video_decoder.cc
@@ -143,6 +143,10 @@ bool MojoVideoDecoder::SupportsDecryption() const {
#endif
}
+VideoDecoderType MojoVideoDecoder::GetDecoderType() const {
+ return decoder_type_;
+}
+
std::string MojoVideoDecoder::GetDisplayName() const {
return "MojoVideoDecoder";
}
@@ -207,12 +211,14 @@ void MojoVideoDecoder::Initialize(const VideoDecoderConfig& config,
void MojoVideoDecoder::OnInitializeDone(const Status& status,
bool needs_bitstream_conversion,
- int32_t max_decode_requests) {
+ int32_t max_decode_requests,
+ VideoDecoderType decoder_type) {
DVLOG(1) << __func__ << ": status = " << std::hex << status.code();
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
initialized_ = status.is_ok();
needs_bitstream_conversion_ = needs_bitstream_conversion;
max_decode_requests_ = max_decode_requests;
+ decoder_type_ = decoder_type;
std::move(init_cb_).Run(status);
}
@@ -352,6 +358,11 @@ int MojoVideoDecoder::GetMaxDecodeRequests() const {
return max_decode_requests_;
}
+bool MojoVideoDecoder::IsOptimizedForRTC() const {
+ DVLOG(3) << __func__;
+ return true;
+}
+
void MojoVideoDecoder::BindRemoteDecoder() {
DVLOG(3) << __func__;
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.h b/chromium/media/mojo/clients/mojo_video_decoder.h
index 201b4ecdd74..b5fdef8375d 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.h
+++ b/chromium/media/mojo/clients/mojo_video_decoder.h
@@ -60,7 +60,8 @@ class MojoVideoDecoder final : public VideoDecoder,
// Decoder implementation
bool IsPlatformDecoder() const final;
bool SupportsDecryption() const final;
- std::string GetDisplayName() const final;
+ std::string GetDisplayName() const override;
+ VideoDecoderType GetDecoderType() const final;
// VideoDecoder implementation.
void Initialize(const VideoDecoderConfig& config,
@@ -74,6 +75,7 @@ class MojoVideoDecoder final : public VideoDecoder,
bool NeedsBitstreamConversion() const final;
bool CanReadWithoutStalling() const final;
int GetMaxDecodeRequests() const final;
+ bool IsOptimizedForRTC() const final;
// mojom::VideoDecoderClient implementation.
void OnVideoFrameDecoded(
@@ -91,7 +93,8 @@ class MojoVideoDecoder final : public VideoDecoder,
void FailInit(InitCB init_cb, Status err);
void OnInitializeDone(const Status& status,
bool needs_bitstream_conversion,
- int32_t max_decode_requests);
+ int32_t max_decode_requests,
+ VideoDecoderType decoder_type);
void OnDecodeDone(uint64_t decode_id, const Status& status);
void OnResetDone();
@@ -146,6 +149,7 @@ class MojoVideoDecoder final : public VideoDecoder,
bool initialized_ = false;
bool needs_bitstream_conversion_ = false;
bool can_read_without_stalling_ = true;
+ VideoDecoderType decoder_type_ = VideoDecoderType::kUnknown;
// True if UMA metrics of success/failure after first few seconds of playback
// have been already reported.
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client.cc b/chromium/media/mojo/clients/win/media_foundation_renderer_client.cc
new file mode 100644
index 00000000000..3bb5f2d15a7
--- /dev/null
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client.cc
@@ -0,0 +1,403 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/clients/win/media_foundation_renderer_client.h"
+
+#include <utility>
+
+#include "base/callback_helpers.h"
+#include "base/metrics/histogram_functions.h"
+#include "base/metrics/histogram_macros.h"
+#include "media/base/win/mf_helpers.h"
+
+namespace media {
+
+MediaFoundationRendererClient::MediaFoundationRendererClient(
+ mojo::PendingRemote<RendererExtension> renderer_extension_remote,
+ scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner,
+ std::unique_ptr<media::MojoRenderer> mojo_renderer,
+ media::VideoRendererSink* sink)
+ : mojo_renderer_(std::move(mojo_renderer)),
+ sink_(sink),
+ media_task_runner_(std::move(media_task_runner)),
+ compositor_task_runner_(std::move(compositor_task_runner)),
+ delayed_bind_renderer_extension_remote_(
+ std::move(renderer_extension_remote)) {
+ DVLOG_FUNC(1);
+}
+
+MediaFoundationRendererClient::~MediaFoundationRendererClient() {
+ DVLOG_FUNC(1);
+ if (video_rendering_started_) {
+ sink_->Stop();
+ }
+}
+
+void MediaFoundationRendererClient::Initialize(MediaResource* media_resource,
+ RendererClient* client,
+ PipelineStatusCallback init_cb) {
+ DVLOG_FUNC(1);
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(!init_cb_);
+
+ // Consume and bind the delayed PendingRemote now that we
+ // are on |media_task_runner_|.
+ renderer_extension_remote_.Bind(
+ std::move(delayed_bind_renderer_extension_remote_), media_task_runner_);
+
+ // Handle unexpected mojo pipe disconnection such as "mf_cdm" utility process
+ // crashed or killed in Browser task manager.
+ renderer_extension_remote_.set_disconnect_handler(
+ base::BindOnce(&MediaFoundationRendererClient::OnConnectionError,
+ base::Unretained(this)));
+
+ client_ = client;
+ init_cb_ = std::move(init_cb);
+
+ const std::vector<media::DemuxerStream*> media_streams =
+ media_resource->GetAllStreams();
+ for (const media::DemuxerStream* stream : media_streams) {
+ if (stream->type() == media::DemuxerStream::Type::VIDEO) {
+ has_video_ = true;
+ break;
+ }
+ }
+
+ mojo_renderer_->Initialize(
+ media_resource, this,
+ base::BindOnce(
+ &MediaFoundationRendererClient::OnRemoteRendererInitialized,
+ weak_factory_.GetWeakPtr()));
+}
+
+void MediaFoundationRendererClient::OnConnectionError() {
+ DVLOG_FUNC(1);
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ if (waiting_for_dcomp_surface_handle_) {
+ OnReceivedRemoteDCOMPSurface(mojo::ScopedHandle());
+ }
+}
+
+void MediaFoundationRendererClient::OnRemoteRendererInitialized(
+ PipelineStatus status) {
+ DVLOG_FUNC(1) << "status=" << status;
+
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ if (status != media::PipelineStatus::PIPELINE_OK) {
+ DCHECK(!init_cb_.is_null());
+ std::move(init_cb_).Run(status);
+ return;
+ }
+
+ if (has_video_) {
+ // TODO(frankli): Add code to init DCOMPTextureWrapper.
+ } else {
+ std::move(init_cb_).Run(status);
+ }
+}
+
+void MediaFoundationRendererClient::OnDCOMPSurfaceHandleCreated(bool success) {
+ if (!media_task_runner_->BelongsToCurrentThread()) {
+ media_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &MediaFoundationRendererClient::OnDCOMPSurfaceHandleCreated,
+ weak_factory_.GetWeakPtr(), success));
+ return;
+ }
+
+ DVLOG_FUNC(1);
+ DCHECK(has_video_);
+
+ dcomp_surface_handle_bound_ = true;
+ return;
+}
+
+void MediaFoundationRendererClient::OnReceivedRemoteDCOMPSurface(
+ mojo::ScopedHandle surface_handle) {
+ DVLOG_FUNC(1);
+ DCHECK(has_video_);
+ DCHECK(surface_handle.is_valid());
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+
+ waiting_for_dcomp_surface_handle_ = false;
+ base::win::ScopedHandle local_handle =
+ mojo::UnwrapPlatformHandle(std::move(surface_handle)).TakeHandle();
+ RegisterDCOMPSurfaceHandleInGPUProcess(std::move(local_handle));
+}
+
+void MediaFoundationRendererClient::RegisterDCOMPSurfaceHandleInGPUProcess(
+ base::win::ScopedHandle surface_handle) {
+ if (!media_task_runner_->BelongsToCurrentThread()) {
+ media_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&MediaFoundationRendererClient::
+ RegisterDCOMPSurfaceHandleInGPUProcess,
+ weak_factory_.GetWeakPtr(), std::move(surface_handle)));
+ return;
+ }
+
+ DVLOG_FUNC(1) << "surface_handle=" << surface_handle.Get();
+ DCHECK(has_video_);
+
+ mojo::ScopedHandle mojo_surface_handle =
+ mojo::WrapPlatformHandle(mojo::PlatformHandle(std::move(surface_handle)));
+
+ // TODO(frankli): Pass the |mojo_surface_handle| to Gpu process.
+}
+
+void MediaFoundationRendererClient::OnDCOMPSurfaceRegisteredInGPUProcess(
+ const base::UnguessableToken& token) {
+ DVLOG_FUNC(1);
+ DCHECK(has_video_);
+
+ return;
+}
+
+void MediaFoundationRendererClient::OnDCOMPSurfaceTextureReleased() {
+ DCHECK(has_video_);
+ return;
+}
+
+void MediaFoundationRendererClient::OnDCOMPStreamTextureInitialized(
+ bool success) {
+ DVLOG_FUNC(1) << "success=" << success;
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(!init_cb_.is_null());
+ DCHECK(has_video_);
+
+ media::PipelineStatus status = media::PipelineStatus::PIPELINE_OK;
+ if (!success) {
+ status = media::PipelineStatus::PIPELINE_ERROR_INITIALIZATION_FAILED;
+ }
+ if (natural_size_.width() != 0 || natural_size_.height() != 0) {
+ InitializeDCOMPRendering();
+ }
+ std::move(init_cb_).Run(status);
+}
+
+void MediaFoundationRendererClient::OnVideoFrameCreated(
+ scoped_refptr<media::VideoFrame> video_frame) {
+ if (!media_task_runner_->BelongsToCurrentThread()) {
+ media_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&MediaFoundationRendererClient::OnVideoFrameCreated,
+ weak_factory_.GetWeakPtr(), video_frame));
+ return;
+ }
+
+ DVLOG_FUNC(1);
+ DCHECK(has_video_);
+
+ video_frame->metadata().protected_video = true;
+ video_frame->metadata().allow_overlay = true;
+
+ dcomp_frame_ = video_frame;
+
+ sink_->PaintSingleFrame(dcomp_frame_, true);
+}
+
+void MediaFoundationRendererClient::OnCompositionParamsReceived(
+ gfx::Rect output_rect) {
+ DVLOG_FUNC(1) << "output_rect=" << output_rect.ToString();
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(has_video_);
+
+ renderer_extension_remote_->SetOutputParams(output_rect);
+ return;
+}
+
+bool MediaFoundationRendererClient::MojoSetDCOMPMode(bool enabled) {
+ DVLOG_FUNC(1) << "enabled=" << enabled;
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(renderer_extension_remote_.is_bound());
+
+ bool success = false;
+ if (!renderer_extension_remote_->SetDCOMPMode(enabled, &success)) {
+ return false;
+ }
+ return success;
+}
+
+void MediaFoundationRendererClient::MojoGetDCOMPSurface() {
+ DVLOG_FUNC(1);
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(renderer_extension_remote_.is_bound());
+
+ if (!renderer_extension_remote_.is_connected()) {
+ media_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &MediaFoundationRendererClient::OnReceivedRemoteDCOMPSurface,
+ weak_factory_.GetWeakPtr(), mojo::ScopedHandle()));
+ return;
+ }
+ waiting_for_dcomp_surface_handle_ = true;
+ renderer_extension_remote_->GetDCOMPSurface(base::BindOnce(
+ &MediaFoundationRendererClient::OnReceivedRemoteDCOMPSurface,
+ weak_factory_.GetWeakPtr()));
+}
+
+void MediaFoundationRendererClient::InitializeDCOMPRendering() {
+ DVLOG_FUNC(1);
+ DCHECK(has_video_);
+
+ if (dcomp_rendering_initialized_) {
+ return;
+ }
+
+ if (!MojoSetDCOMPMode(true)) {
+ DLOG(ERROR) << "Failed to initialize DCOMP mode on remote renderer. this="
+ << this;
+ return;
+ }
+ MojoGetDCOMPSurface();
+
+ dcomp_rendering_initialized_ = true;
+ return;
+}
+
+void MediaFoundationRendererClient::SetCdm(CdmContext* cdm_context,
+ CdmAttachedCB cdm_attached_cb) {
+ DVLOG_FUNC(1) << "cdm_context=" << cdm_context;
+ DCHECK(cdm_context);
+
+ if (cdm_context_) {
+ DLOG(ERROR) << "Switching CDM not supported. this=" << this;
+ std::move(cdm_attached_cb).Run(false);
+ return;
+ }
+
+ cdm_context_ = cdm_context;
+ DCHECK(cdm_attached_cb_.is_null());
+ cdm_attached_cb_ = std::move(cdm_attached_cb);
+ mojo_renderer_->SetCdm(
+ cdm_context_,
+ base::BindOnce(&MediaFoundationRendererClient::OnCdmAttached,
+ weak_factory_.GetWeakPtr()));
+}
+
+void MediaFoundationRendererClient::SetLatencyHint(
+ base::Optional<base::TimeDelta> /*latency_hint*/) {
+ // We do not use the latency hint today
+}
+
+void MediaFoundationRendererClient::OnCdmAttached(bool success) {
+ DCHECK(cdm_attached_cb_);
+ std::move(cdm_attached_cb_).Run(success);
+}
+
+void MediaFoundationRendererClient::Flush(base::OnceClosure flush_cb) {
+ mojo_renderer_->Flush(std::move(flush_cb));
+}
+
+void MediaFoundationRendererClient::StartPlayingFrom(base::TimeDelta time) {
+ mojo_renderer_->StartPlayingFrom(time);
+}
+
+void MediaFoundationRendererClient::SetPlaybackRate(double playback_rate) {
+ mojo_renderer_->SetPlaybackRate(playback_rate);
+}
+
+void MediaFoundationRendererClient::SetVolume(float volume) {
+ mojo_renderer_->SetVolume(volume);
+}
+
+base::TimeDelta MediaFoundationRendererClient::GetMediaTime() {
+ return mojo_renderer_->GetMediaTime();
+}
+
+void MediaFoundationRendererClient::OnSelectedVideoTracksChanged(
+ const std::vector<media::DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) {
+ bool video_track_selected = (enabled_tracks.size() > 0);
+ DVLOG_FUNC(1) << "video_track_selected=" << video_track_selected;
+ renderer_extension_remote_->SetVideoStreamEnabled(video_track_selected);
+ std::move(change_completed_cb).Run();
+}
+
+void MediaFoundationRendererClient::OnError(PipelineStatus status) {
+ DVLOG_FUNC(1) << "status=" << status;
+ client_->OnError(status);
+}
+
+void MediaFoundationRendererClient::OnEnded() {
+ client_->OnEnded();
+}
+
+void MediaFoundationRendererClient::OnStatisticsUpdate(
+ const media::PipelineStatistics& stats) {
+ client_->OnStatisticsUpdate(stats);
+}
+
+void MediaFoundationRendererClient::OnBufferingStateChange(
+ media::BufferingState state,
+ media::BufferingStateChangeReason reason) {
+ client_->OnBufferingStateChange(state, reason);
+}
+
+void MediaFoundationRendererClient::OnWaiting(WaitingReason reason) {
+ client_->OnWaiting(reason);
+}
+
+void MediaFoundationRendererClient::OnAudioConfigChange(
+ const media::AudioDecoderConfig& config) {
+ client_->OnAudioConfigChange(config);
+}
+void MediaFoundationRendererClient::OnVideoConfigChange(
+ const media::VideoDecoderConfig& config) {
+ client_->OnVideoConfigChange(config);
+}
+
+void MediaFoundationRendererClient::OnVideoNaturalSizeChange(
+ const gfx::Size& size) {
+ DVLOG_FUNC(1) << "size=" << size.ToString();
+ DCHECK(media_task_runner_->BelongsToCurrentThread());
+ DCHECK(has_video_);
+
+ natural_size_ = size;
+ // Skip creation of a new video frame if the DCOMP surface has not yet been
+ // bound to the DCOMP texture as we will create a new frame after binding has
+ // completed.
+ if (dcomp_surface_handle_bound_) {
+ // TODO(frankli): Add code to call DCOMPTextureWrapper::CreateVideoFrame().
+ }
+ InitializeDCOMPRendering();
+ client_->OnVideoNaturalSizeChange(natural_size_);
+}
+
+void MediaFoundationRendererClient::OnVideoOpacityChange(bool opaque) {
+ DVLOG_FUNC(1) << "opaque=" << opaque;
+ DCHECK(has_video_);
+ client_->OnVideoOpacityChange(opaque);
+}
+
+void MediaFoundationRendererClient::OnVideoFrameRateChange(
+ base::Optional<int> fps) {
+ DVLOG_FUNC(1) << "fps=" << (fps ? *fps : -1);
+ DCHECK(has_video_);
+ client_->OnVideoFrameRateChange(fps);
+}
+
+scoped_refptr<media::VideoFrame> MediaFoundationRendererClient::Render(
+ base::TimeTicks deadline_min,
+ base::TimeTicks deadline_max,
+ bool background_rendering) {
+ // Returns no video frame as it is rendered independently by Windows Direct
+ // Composition.
+ return nullptr;
+}
+
+void MediaFoundationRendererClient::OnFrameDropped() {
+ return;
+}
+
+base::TimeDelta MediaFoundationRendererClient::GetPreferredRenderInterval() {
+ // TODO(frankli): use 'viz::BeginFrameArgs::MinInterval()'.
+ return base::TimeDelta::FromSeconds(0);
+}
+
+} // namespace media
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client.h b/chromium/media/mojo/clients/win/media_foundation_renderer_client.h
new file mode 100644
index 00000000000..7ba6d844dea
--- /dev/null
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client.h
@@ -0,0 +1,152 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_CLIENTS_WIN_MEDIA_FOUNDATION_RENDERER_CLIENT_H_
+#define MEDIA_MOJO_CLIENTS_WIN_MEDIA_FOUNDATION_RENDERER_CLIENT_H_
+
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "media/base/media_resource.h"
+#include "media/base/renderer.h"
+#include "media/base/renderer_client.h"
+#include "media/base/video_renderer_sink.h"
+#include "media/mojo/clients/mojo_renderer.h"
+#include "media/mojo/mojom/renderer_extensions.mojom.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/receiver.h"
+
+namespace media {
+
+// MediaFoundationRendererClient lives in Renderer process and mirrors a
+// MediaFoundationRenderer living in the MF_CDM LPAC Utility process.
+//
+// It is responsible for forwarding media::Renderer calls from WMPI to the
+// MediaFoundationRenderer, using |mojo_renderer|. It also manages a
+// DCOMPTexture, (via |dcomp_texture_wrapper_|) and notifies the
+// VideoRendererSink when new frames are available.
+//
+// This class handles all calls on |media_task_runner_|, except for
+// OnFrameAvailable(), which is called on |compositor_task_runner_|.
+//
+// N.B: This class implements media::RendererClient, in order to intercept
+// OnVideoNaturalSizeChange() events, to update DCOMPTextureWrapper. All events
+// (including OnVideoNaturalSizeChange()) are bubbled up to |client_|.
+//
+class MediaFoundationRendererClient
+ : public media::Renderer,
+ public media::RendererClient,
+ public media::VideoRendererSink::RenderCallback {
+ public:
+ using RendererExtension = media::mojom::MediaFoundationRendererExtension;
+
+ MediaFoundationRendererClient(
+ mojo::PendingRemote<RendererExtension> renderer_extension_remote,
+ scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner,
+ std::unique_ptr<media::MojoRenderer> mojo_renderer,
+ media::VideoRendererSink* sink);
+
+ ~MediaFoundationRendererClient() override;
+
+ // media::Renderer implementation.
+ void Initialize(MediaResource* media_resource,
+ RendererClient* client,
+ PipelineStatusCallback init_cb) override;
+ void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
+ void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void Flush(base::OnceClosure flush_cb) override;
+ void StartPlayingFrom(base::TimeDelta time) override;
+ void SetPlaybackRate(double playback_rate) override;
+ void SetVolume(float volume) override;
+ base::TimeDelta GetMediaTime() override;
+ void OnSelectedVideoTracksChanged(
+ const std::vector<media::DemuxerStream*>& enabled_tracks,
+ base::OnceClosure change_completed_cb) override;
+
+ // media::RendererClient implementation.
+ void OnError(PipelineStatus status) override;
+ void OnEnded() override;
+ void OnStatisticsUpdate(const media::PipelineStatistics& stats) override;
+ void OnBufferingStateChange(media::BufferingState state,
+ media::BufferingStateChangeReason) override;
+ void OnWaiting(media::WaitingReason reason) override;
+ void OnAudioConfigChange(const media::AudioDecoderConfig& config) override;
+ void OnVideoConfigChange(const media::VideoDecoderConfig& config) override;
+ void OnVideoNaturalSizeChange(const gfx::Size& size) override;
+ void OnVideoOpacityChange(bool opaque) override;
+ void OnVideoFrameRateChange(base::Optional<int>) override;
+
+ // media::VideoRendererSink::RenderCallback implementation.
+ scoped_refptr<media::VideoFrame> Render(base::TimeTicks deadline_min,
+ base::TimeTicks deadline_max,
+ bool background_rendering) override;
+ void OnFrameDropped() override;
+ base::TimeDelta GetPreferredRenderInterval() override;
+
+ private:
+ void OnConnectionError();
+ void OnRemoteRendererInitialized(media::PipelineStatus status);
+ void OnVideoFrameCreated(scoped_refptr<media::VideoFrame> video_frame);
+ void OnDCOMPStreamTextureInitialized(bool success);
+ void OnDCOMPSurfaceTextureReleased();
+ void OnDCOMPSurfaceHandleCreated(bool success);
+ void OnReceivedRemoteDCOMPSurface(mojo::ScopedHandle surface_handle);
+ void OnDCOMPSurfaceRegisteredInGPUProcess(
+ const base::UnguessableToken& token);
+ void OnCompositionParamsReceived(gfx::Rect output_rect);
+
+ void InitializeDCOMPRendering();
+ void RegisterDCOMPSurfaceHandleInGPUProcess(
+ base::win::ScopedHandle surface_handle);
+ void OnCdmAttached(bool success);
+ void InitializeMojoCdmTelemetryPtrServer();
+ void OnCDMTelemetryPtrConnectionError();
+
+ bool MojoSetDCOMPMode(bool enabled);
+ void MojoGetDCOMPSurface();
+
+ // Used to forward calls to the MediaFoundationRenderer living in the MF_CDM
+ // LPAC Utility process.
+ std::unique_ptr<media::MojoRenderer> mojo_renderer_;
+
+ RendererClient* client_ = nullptr;
+
+ VideoRendererSink* sink_;
+ bool video_rendering_started_ = false;
+ bool dcomp_rendering_initialized_ = false;
+ // video's native size.
+ gfx::Size natural_size_;
+
+ scoped_refptr<base::SingleThreadTaskRunner> media_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner_;
+ scoped_refptr<media::VideoFrame> dcomp_frame_;
+ bool dcomp_surface_handle_bound_ = false;
+ bool has_video_ = false;
+
+ PipelineStatusCallback init_cb_;
+ CdmContext* cdm_context_ = nullptr;
+ CdmAttachedCB cdm_attached_cb_;
+
+ // Used temporarily, to delay binding to |renderer_extension_remote_| until we
+ // are on the right sequence, when Initialize() is called.
+ mojo::PendingRemote<RendererExtension>
+ delayed_bind_renderer_extension_remote_;
+
+ // Used to call methods on the MediaFoundationRenderer in the MF_CMD LPAC
+ // Utility process.
+ mojo::Remote<RendererExtension> renderer_extension_remote_;
+
+ bool waiting_for_dcomp_surface_handle_ = false;
+
+ // NOTE: Weak pointers must be invalidated before all other member variables.
+ base::WeakPtrFactory<MediaFoundationRendererClient> weak_factory_{this};
+
+ DISALLOW_COPY_AND_ASSIGN(MediaFoundationRendererClient);
+};
+
+} // namespace media
+
+#endif // MEDIA_MOJO_CLIENTS_WIN_MEDIA_FOUNDATION_RENDERER_CLIENT_H_
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc
new file mode 100644
index 00000000000..694fba80909
--- /dev/null
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.cc
@@ -0,0 +1,61 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/clients/win/media_foundation_renderer_client_factory.h"
+
+#include "media/base/win/mf_helpers.h"
+#include "media/mojo/clients/mojo_renderer.h"
+#include "media/mojo/clients/mojo_renderer_factory.h"
+#include "media/mojo/clients/win/media_foundation_renderer_client.h"
+#include "media/mojo/mojom/renderer_extensions.mojom.h"
+
+namespace media {
+
+MediaFoundationRendererClientFactory::MediaFoundationRendererClientFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner,
+ std::unique_ptr<media::MojoRendererFactory> mojo_renderer_factory)
+ : compositor_task_runner_(std::move(compositor_task_runner)),
+ mojo_renderer_factory_(std::move(mojo_renderer_factory)) {
+ DVLOG_FUNC(1);
+}
+
+MediaFoundationRendererClientFactory::~MediaFoundationRendererClientFactory() {
+ DVLOG_FUNC(1);
+}
+
+std::unique_ptr<media::Renderer>
+MediaFoundationRendererClientFactory::CreateRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const scoped_refptr<base::TaskRunner>& /*worker_task_runner*/,
+ media::AudioRendererSink* /*audio_renderer_sink*/,
+ media::VideoRendererSink* video_renderer_sink,
+ media::RequestOverlayInfoCB /*request_overlay_info_cb*/,
+ const gfx::ColorSpace& /*target_color_space*/) {
+ DVLOG_FUNC(1);
+
+ // Used to send messages from the MediaFoundationRendererClient (Renderer
+ // process), to the MediaFoundationRenderer (MF_CDM LPAC Utility process).
+ // The |renderer_extension_receiver| will be bound in MediaFoundationRenderer.
+ mojo::PendingRemote<media::mojom::MediaFoundationRendererExtension>
+ renderer_extension_remote;
+ auto renderer_extension_receiver =
+ renderer_extension_remote.InitWithNewPipeAndPassReceiver();
+
+ std::unique_ptr<media::MojoRenderer> mojo_renderer =
+ mojo_renderer_factory_->CreateMediaFoundationRenderer(
+ std::move(renderer_extension_receiver), media_task_runner,
+ video_renderer_sink);
+
+ // mojo_renderer's ownership is passed to MediaFoundationRendererClient.
+ return std::make_unique<MediaFoundationRendererClient>(
+ std::move(renderer_extension_remote), media_task_runner,
+ compositor_task_runner_, std::move(mojo_renderer), video_renderer_sink);
+}
+
+media::MediaResource::Type
+MediaFoundationRendererClientFactory::GetRequiredMediaResourceType() {
+ return media::MediaResource::Type::STREAM;
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h
new file mode 100644
index 00000000000..dc3e8e4d701
--- /dev/null
+++ b/chromium/media/mojo/clients/win/media_foundation_renderer_client_factory.h
@@ -0,0 +1,45 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_CLIENTS_WIN_MEDIA_FOUNDATION_RENDERER_CLIENT_FACTORY_H_
+#define MEDIA_MOJO_CLIENTS_WIN_MEDIA_FOUNDATION_RENDERER_CLIENT_FACTORY_H_
+
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/single_thread_task_runner.h"
+#include "media/base/renderer_factory.h"
+#include "media/mojo/clients/mojo_renderer_factory.h"
+#include "mojo/public/cpp/bindings/interface_request.h"
+
+namespace media {
+
+// The default class for creating a MediaFoundationRendererClient
+// and its associated MediaFoundationRenderer.
+class MediaFoundationRendererClientFactory : public media::RendererFactory {
+ public:
+ MediaFoundationRendererClientFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner,
+ std::unique_ptr<media::MojoRendererFactory> mojo_renderer_factory);
+ ~MediaFoundationRendererClientFactory() override;
+
+ std::unique_ptr<media::Renderer> CreateRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ const scoped_refptr<base::TaskRunner>& worker_task_runner,
+ media::AudioRendererSink* audio_renderer_sink,
+ media::VideoRendererSink* video_renderer_sink,
+ media::RequestOverlayInfoCB request_surface_cb,
+ const gfx::ColorSpace& target_color_space) override;
+
+ // The MediaFoundationRenderer uses a Type::URL.
+ media::MediaResource::Type GetRequiredMediaResourceType() override;
+
+ private:
+ scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner_;
+
+ std::unique_ptr<media::MojoRendererFactory> mojo_renderer_factory_;
+};
+
+} // namespace media
+
+#endif // MEDIA_MOJO_CLIENTS_WIN_MEDIA_FOUNDATION_RENDERER_CLIENT_FACTORY_H_
diff --git a/chromium/media/mojo/common/mojo_data_pipe_read_write.cc b/chromium/media/mojo/common/mojo_data_pipe_read_write.cc
index c3faa4a7c86..10cdb3694a3 100644
--- a/chromium/media/mojo/common/mojo_data_pipe_read_write.cc
+++ b/chromium/media/mojo/common/mojo_data_pipe_read_write.cc
@@ -171,7 +171,7 @@ void MojoDataPipeWriter::Write(const uint8_t* buffer,
// Cannot write if the pipe is already closed.
if (!producer_handle_.is_valid()) {
DVLOG(1) << __func__
- << ": Failed to write buffer becuase the pipe is already closed";
+ << ": Failed to write buffer because the pipe is already closed";
std::move(done_cb).Run(false);
return;
}
diff --git a/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc b/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc
index b4978d10a5c..69bad8b0465 100644
--- a/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc
+++ b/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc
@@ -26,12 +26,14 @@ class MojoDataPipeReadWrite {
public:
MojoDataPipeReadWrite(
uint32_t data_pipe_capacity_bytes = kDefaultDataPipeCapacityBytes) {
- mojo::DataPipe data_pipe(data_pipe_capacity_bytes);
-
- writer_ = std::make_unique<MojoDataPipeWriter>(
- std::move(data_pipe.producer_handle));
- reader_ = std::make_unique<MojoDataPipeReader>(
- std::move(data_pipe.consumer_handle));
+ mojo::ScopedDataPipeProducerHandle producer_handle;
+ mojo::ScopedDataPipeConsumerHandle consumer_handle;
+ CHECK_EQ(mojo::CreateDataPipe(data_pipe_capacity_bytes, producer_handle,
+ consumer_handle),
+ MOJO_RESULT_OK);
+
+ writer_ = std::make_unique<MojoDataPipeWriter>(std::move(producer_handle));
+ reader_ = std::make_unique<MojoDataPipeReader>(std::move(consumer_handle));
}
void WriteAndRead(const uint8_t* buffer,
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc b/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc
index 9d8e66858be..2fd33ef6369 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc
@@ -8,6 +8,7 @@
#include "base/bind.h"
#include "base/logging.h"
+#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -21,6 +22,30 @@ using media::mojo_pipe_read_write_util::IsPipeReadWriteError;
namespace media {
+// Creates mojo::DataPipe and sets `producer_handle` and `consumer_handle`.
+// Returns true on success. Otherwise returns false and reset the handles.
+bool CreateDataPipe(uint32_t capacity,
+ mojo::ScopedDataPipeProducerHandle* producer_handle,
+ mojo::ScopedDataPipeConsumerHandle* consumer_handle) {
+ MojoCreateDataPipeOptions options;
+ options.struct_size = sizeof(MojoCreateDataPipeOptions);
+ options.flags = MOJO_CREATE_DATA_PIPE_FLAG_NONE;
+ options.element_num_bytes = 1;
+ options.capacity_num_bytes = capacity;
+
+ auto result =
+ mojo::CreateDataPipe(&options, *producer_handle, *consumer_handle);
+
+ if (result != MOJO_RESULT_OK) {
+ DLOG(ERROR) << "DataPipe creation failed with " << result;
+ producer_handle->reset();
+ consumer_handle->reset();
+ return false;
+ }
+
+ return true;
+}
+
uint32_t GetDefaultDecoderBufferConverterCapacity(DemuxerStream::Type type) {
uint32_t capacity = 0;
@@ -50,10 +75,12 @@ std::unique_ptr<MojoDecoderBufferReader> MojoDecoderBufferReader::Create(
DVLOG(1) << __func__;
DCHECK_GT(capacity, 0u);
- auto data_pipe = std::make_unique<mojo::DataPipe>(capacity);
- *producer_handle = std::move(data_pipe->producer_handle);
- return std::make_unique<MojoDecoderBufferReader>(
- std::move(data_pipe->consumer_handle));
+ // Create a MojoDecoderBufferReader even on the failure case and
+ // `ReadDecoderBuffer()` below will fail.
+ // TODO(xhwang): Update callers to handle failure so we can return null.
+ mojo::ScopedDataPipeConsumerHandle consumer_handle;
+ ignore_result(CreateDataPipe(capacity, producer_handle, &consumer_handle));
+ return std::make_unique<MojoDecoderBufferReader>(std::move(consumer_handle));
}
MojoDecoderBufferReader::MojoDecoderBufferReader(
@@ -66,14 +93,19 @@ MojoDecoderBufferReader::MojoDecoderBufferReader(
bytes_read_(0) {
DVLOG(1) << __func__;
+ if (!consumer_handle_.is_valid()) {
+ DLOG(ERROR) << __func__ << ": Invalid consumer handle";
+ return;
+ }
+
MojoResult result = pipe_watcher_.Watch(
consumer_handle_.get(), MOJO_HANDLE_SIGNAL_READABLE,
MOJO_WATCH_CONDITION_SATISFIED,
base::BindRepeating(&MojoDecoderBufferReader::OnPipeReadable,
base::Unretained(this)));
if (result != MOJO_RESULT_OK) {
- DVLOG(1) << __func__
- << ": Failed to start watching the pipe. result=" << result;
+ DLOG(ERROR) << __func__
+ << ": Failed to start watching the pipe. result=" << result;
consumer_handle_.reset();
}
}
@@ -275,10 +307,12 @@ std::unique_ptr<MojoDecoderBufferWriter> MojoDecoderBufferWriter::Create(
DVLOG(1) << __func__;
DCHECK_GT(capacity, 0u);
- auto data_pipe = std::make_unique<mojo::DataPipe>(capacity);
- *consumer_handle = std::move(data_pipe->consumer_handle);
- return std::make_unique<MojoDecoderBufferWriter>(
- std::move(data_pipe->producer_handle));
+ // Create a MojoDecoderBufferWriter even on the failure case and
+ // `WriteDecoderBuffer()` below will fail.
+ // TODO(xhwang): Update callers to handle failure so we can return null.
+ mojo::ScopedDataPipeProducerHandle producer_handle;
+ ignore_result(CreateDataPipe(capacity, &producer_handle, consumer_handle));
+ return std::make_unique<MojoDecoderBufferWriter>(std::move(producer_handle));
}
MojoDecoderBufferWriter::MojoDecoderBufferWriter(
@@ -291,14 +325,19 @@ MojoDecoderBufferWriter::MojoDecoderBufferWriter(
bytes_written_(0) {
DVLOG(1) << __func__;
- MojoResult result =
- pipe_watcher_.Watch(producer_handle_.get(), MOJO_HANDLE_SIGNAL_WRITABLE,
- MOJO_WATCH_CONDITION_SATISFIED,
- base::Bind(&MojoDecoderBufferWriter::OnPipeWritable,
- base::Unretained(this)));
+ if (!producer_handle_.is_valid()) {
+ DLOG(ERROR) << __func__ << ": Invalid producer handle";
+ return;
+ }
+
+ MojoResult result = pipe_watcher_.Watch(
+ producer_handle_.get(), MOJO_HANDLE_SIGNAL_WRITABLE,
+ MOJO_WATCH_CONDITION_SATISFIED,
+ base::BindRepeating(&MojoDecoderBufferWriter::OnPipeWritable,
+ base::Unretained(this)));
if (result != MOJO_RESULT_OK) {
- DVLOG(1) << __func__
- << ": Failed to start watching the pipe. result=" << result;
+ DLOG(ERROR) << __func__
+ << ": Failed to start watching the pipe. result=" << result;
producer_handle_.reset();
}
}
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter.h b/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
index 9f780d84a92..7c8ca3d99e7 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
@@ -19,6 +19,12 @@ namespace media {
class DecoderBuffer;
+// Creates mojo::DataPipe and sets `producer_handle` and `consumer_handle`.
+// Returns true on success. Otherwise returns false and reset the handles.
+bool CreateDataPipe(uint32_t capacity,
+ mojo::ScopedDataPipeProducerHandle* producer_handle,
+ mojo::ScopedDataPipeConsumerHandle* consumer_handle);
+
// Returns the default capacity to be used with MojoDecoderBufferReader and
// MojoDecoderBufferWriter for |type|.
uint32_t GetDefaultDecoderBufferConverterCapacity(DemuxerStream::Type type);
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
index 98f53ee4539..c1917f38015 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
@@ -32,12 +32,14 @@ class MojoDecoderBufferConverter {
public:
MojoDecoderBufferConverter(
uint32_t data_pipe_capacity_bytes = kDefaultDataPipeCapacityBytes) {
- mojo::DataPipe data_pipe(data_pipe_capacity_bytes);
-
- writer = std::make_unique<MojoDecoderBufferWriter>(
- std::move(data_pipe.producer_handle));
- reader = std::make_unique<MojoDecoderBufferReader>(
- std::move(data_pipe.consumer_handle));
+ mojo::ScopedDataPipeProducerHandle producer_handle;
+ mojo::ScopedDataPipeConsumerHandle consumer_handle;
+ EXPECT_TRUE(CreateDataPipe(data_pipe_capacity_bytes, &producer_handle,
+ &consumer_handle));
+ writer =
+ std::make_unique<MojoDecoderBufferWriter>(std::move(producer_handle));
+ reader =
+ std::make_unique<MojoDecoderBufferReader>(std::move(consumer_handle));
}
void ConvertAndVerify(scoped_refptr<DecoderBuffer> media_buffer) {
@@ -401,4 +403,36 @@ TEST(MojoDecoderBufferConverterTest, FlushDuringConcurrentReads) {
run_loop.Run();
}
+TEST(MojoDecoderBufferConverterTest, WriterWithInvalidHandle) {
+ base::test::SingleThreadTaskEnvironment task_environment;
+ const uint8_t kData[] = "Hello, world";
+ auto media_buffer = DecoderBuffer::CopyFrom(kData, base::size(kData));
+
+ auto writer = std::make_unique<MojoDecoderBufferWriter>(
+ mojo::ScopedDataPipeProducerHandle());
+ EXPECT_FALSE(writer->WriteDecoderBuffer(media_buffer));
+}
+
+TEST(MojoDecoderBufferConverterTest, ReaderWithInvalidHandle) {
+ base::test::SingleThreadTaskEnvironment task_environment;
+ base::RunLoop run_loop;
+
+ // Write a real buffer for testing.
+ const uint8_t kData[] = "Hello, world";
+ auto media_buffer = DecoderBuffer::CopyFrom(kData, base::size(kData));
+ MojoDecoderBufferConverter converter;
+ auto mojo_buffer = converter.writer->WriteDecoderBuffer(media_buffer);
+ DCHECK(mojo_buffer);
+
+ // Read with an invalid handle.
+ base::MockCallback<MojoDecoderBufferReader::ReadCB> mock_cb;
+ EXPECT_CALL(mock_cb, Run(testing::IsNull()))
+ .WillOnce(testing::InvokeWithoutArgs(&run_loop, &base::RunLoop::Quit));
+ auto reader = std::make_unique<MojoDecoderBufferReader>(
+ mojo::ScopedDataPipeConsumerHandle());
+ reader->ReadDecoderBuffer(std::move(mojo_buffer), mock_cb.Get());
+
+ run_loop.Run();
+}
+
} // namespace media
diff --git a/chromium/media/mojo/mojom/BUILD.gn b/chromium/media/mojo/mojom/BUILD.gn
index 05942bd5ea3..44e9f738a81 100644
--- a/chromium/media/mojo/mojom/BUILD.gn
+++ b/chromium/media/mojo/mojom/BUILD.gn
@@ -29,6 +29,7 @@ mojom("mojom") {
"key_system_support.mojom",
"media_log.mojom",
"media_metrics_provider.mojom",
+ "media_player.mojom",
"media_service.mojom",
"media_types.mojom",
"output_protection.mojom",
@@ -63,6 +64,7 @@ mojom("mojom") {
"//gpu/ipc/common:interfaces",
"//media/learning/mojo/public/mojom",
"//mojo/public/mojom/base",
+ "//services/media_session/public/mojom",
"//services/network/public/mojom",
"//services/service_manager/public/mojom",
"//ui/gfx/geometry/mojom",
@@ -99,6 +101,29 @@ mojom("mojom") {
{
types = [
{
+ mojom = "media.mojom.FullscreenVideoStatus"
+ cpp = "::blink::WebFullscreenVideoStatus"
+ },
+ ]
+ traits_headers = [ "//media/base/ipc/media_param_traits_macros.h" ]
+ },
+ {
+ types = [
+ {
+ mojom = "media.mojom.MediaContentType"
+ cpp = "::media::MediaContentType"
+ },
+ ]
+ traits_headers = [ "//media/base/ipc/media_param_traits_macros.h" ]
+ traits_public_deps = [
+ "//base",
+ "//media",
+ "//media/base/ipc",
+ ]
+ },
+ {
+ types = [
+ {
mojom = "media.mojom.VideoFrameMetadata"
cpp = "::media::VideoFrameMetadata"
},
@@ -115,6 +140,10 @@ mojom("mojom") {
mojom = "media.mojom.AudioDecoderConfig"
cpp = "::media::AudioDecoderConfig"
},
+ {
+ mojom = "media.mojom.AudioDecoderType"
+ cpp = "::media::AudioDecoderType"
+ },
]
traits_headers = [ "audio_decoder_config_mojom_traits.h" ]
traits_sources = [ "audio_decoder_config_mojom_traits.cc" ]
@@ -143,15 +172,25 @@ mojom("mojom") {
{
types = [
{
+ mojom = "media.mojom.CopyMode"
+ cpp = "::media::VideoFrameMetadata::CopyMode"
+ },
+ ]
+ traits_headers = [ "media_types_enum_mojom_traits.h" ]
+ },
+ {
+ types = [
+ {
mojom = "media.mojom.VideoRotation"
cpp = "::media::VideoRotation"
},
{
- mojom = "media.mojom.CopyMode"
- cpp = "::media::VideoFrameMetadata::CopyMode"
+ mojom = "media.mojom.VideoTransformation"
+ cpp = "::media::VideoTransformation"
},
]
- traits_headers = [ "media_types_enum_mojom_traits.h" ]
+ traits_headers = [ "video_transformation_mojom_traits.h" ]
+ traits_sources = [ "video_transformation_mojom_traits.cc" ]
},
{
types = [
@@ -220,7 +259,6 @@ mojom("mojom") {
"//media/base/ipc",
]
},
-
{
types = [
{
@@ -228,8 +266,20 @@ mojom("mojom") {
cpp = "::media::PipelineStatistics"
},
{
- mojom = "media.mojom.PipelineDecoderInfo"
- cpp = "::media::PipelineDecoderInfo"
+ mojom = "media.mojom.AudioDecoderInfo"
+ cpp = "::media::AudioDecoderInfo"
+ },
+ {
+ mojom = "media.mojom.VideoDecoderInfo"
+ cpp = "::media::VideoDecoderInfo"
+ },
+ {
+ mojom = "media.mojom.AudioDecoderType"
+ cpp = "::media::AudioDecoderType"
+ },
+ {
+ mojom = "media.mojom.VideoDecoderType"
+ cpp = "::media::VideoDecoderType"
},
]
traits_headers = [ "pipeline_status_mojom_traits.h" ]
@@ -283,6 +333,10 @@ mojom("mojom") {
mojom = "media.mojom.VideoDecoderImplementation"
cpp = "::media::VideoDecoderImplementation"
},
+ {
+ mojom = "media.mojom.VideoDecoderType"
+ cpp = "::media::VideoDecoderType"
+ },
]
traits_headers = [
"supported_video_decoder_config_mojom_traits.h",
@@ -449,10 +503,6 @@ mojom("mojom") {
cpp = "::media::VideoPixelFormat"
},
{
- mojom = "media.mojom.VideoTransformation"
- cpp = "::media::VideoTransformation"
- },
- {
mojom = "media.mojom.WaitingReason"
cpp = "::media::WaitingReason"
},
@@ -469,11 +519,7 @@ mojom("mojom") {
cpp = "::media::StatusCode"
},
]
- traits_headers = [
- "video_transformation_mojom_traits.h",
- "//media/base/ipc/media_param_traits_macros.h",
- ]
- traits_sources = [ "video_transformation_mojom_traits.cc" ]
+ traits_headers = [ "//media/base/ipc/media_param_traits_macros.h" ]
},
{
types = [
diff --git a/chromium/media/mojo/mojom/android_overlay.mojom b/chromium/media/mojo/mojom/android_overlay.mojom
index 4e861e08a11..f0e50f61cc5 100644
--- a/chromium/media/mojo/mojom/android_overlay.mojom
+++ b/chromium/media/mojo/mojom/android_overlay.mojom
@@ -38,11 +38,20 @@ interface AndroidOverlayClient {
// binder separately.
OnSurfaceReady(uint64 surface_key);
- // Indicates that this overlay has been permanently destroyed, or failed to
- // initialize. It can happen before or after OnSurfaceReady. It will be the
- // last callback from the overlay in any case.
+ // Indicates that this overlay has failed to initialize, before a surface
+ // was provided via OnSurfaceReady. No calls will be sent to the client
+ // after this.
OnDestroyed();
+ // Indicates that this overlay has been destroyed, after an Android Surface
+ // has been provided via OnSurfaceReady. It is required that the client
+ // complete cleanup before signalling completion. In other words, Android
+ // has told us that the surface is going away, and the API contract says
+ // that we must not use it after that callback completes. No other calls
+ // into the client will come after this.
+ [Sync]
+ OnSynchronouslyDestroyed() => ();
+
// Called to provide the current power-efficiency state. May be called more
// than once.
OnPowerEfficientState(bool is_power_efficient);
diff --git a/chromium/media/mojo/mojom/audio_decoder.mojom b/chromium/media/mojo/mojom/audio_decoder.mojom
index 599834e87be..25029a8a39b 100644
--- a/chromium/media/mojo/mojom/audio_decoder.mojom
+++ b/chromium/media/mojo/mojom/audio_decoder.mojom
@@ -20,7 +20,9 @@ interface AudioDecoder {
// bitstream conversion.
Initialize(AudioDecoderConfig config,
mojo_base.mojom.UnguessableToken? cdm_id)
- => (Status success, bool needs_bitstream_conversion);
+ => (Status success,
+ bool needs_bitstream_conversion,
+ AudioDecoderType decoder_type);
// Establishes data connection. Should be called before Decode().
SetDataSource(handle<data_pipe_consumer> receive_pipe);
diff --git a/chromium/media/mojo/mojom/interface_factory.mojom b/chromium/media/mojo/mojom/interface_factory.mojom
index b478762e691..09e7328dd7c 100644
--- a/chromium/media/mojo/mojom/interface_factory.mojom
+++ b/chromium/media/mojo/mojom/interface_factory.mojom
@@ -46,6 +46,14 @@ interface InterfaceFactory {
pending_receiver<Renderer> renderer,
pending_receiver<MediaPlayerRendererExtension> renderer_extension);
+ [EnableIf=is_win]
+ // Creates a MediaFoundationRenderer (MediaFoundationRendererClientFactory).
+ // - |renderer_extension| is bound in MediaFoundationRenderer, and receives
+ // calls from MediaFoundationRendererClient.
+ CreateMediaFoundationRenderer(
+ pending_receiver<Renderer> renderer,
+ pending_receiver<MediaFoundationRendererExtension> renderer_extension);
+
[EnableIf=is_android]
// Creates a FlingingRenderer (FlingingRendererClientFactory).
// The |presentation_id| is used to find an already set-up RemotePlayback
diff --git a/chromium/media/mojo/mojom/media_metrics_provider.mojom b/chromium/media/mojo/mojom/media_metrics_provider.mojom
index 526e2b17018..3dee284ed9d 100644
--- a/chromium/media/mojo/mojom/media_metrics_provider.mojom
+++ b/chromium/media/mojo/mojom/media_metrics_provider.mojom
@@ -37,7 +37,11 @@ interface MediaMetricsProvider {
// Assigns a playback ID and sets up this provider instance with information
// needed to make UKM reports. No other methods may be called until after
// Initialize() has been called. |url_scheme| is only used if |!is_mse|.
- Initialize(bool is_mse, MediaURLScheme url_scheme);
+ // If |stream_type| is one of the MediaStream types (i.e., not kNone),
+ // UMA metrics are not recorded.
+ Initialize(bool is_mse,
+ MediaURLScheme url_scheme,
+ MediaStreamType stream_type);
// Called when a playback ends in error. The status is reported to UKM when
// the provider is destructed.
@@ -76,6 +80,6 @@ interface MediaMetricsProvider {
// Can be called multiple times to set properties about a playback.
SetHasAudio(AudioCodec codec);
SetHasVideo(VideoCodec codec);
- SetVideoPipelineInfo(PipelineDecoderInfo info);
- SetAudioPipelineInfo(PipelineDecoderInfo info);
+ SetVideoPipelineInfo(VideoDecoderInfo info);
+ SetAudioPipelineInfo(AudioDecoderInfo info);
};
diff --git a/chromium/media/mojo/mojom/media_player.mojom b/chromium/media/mojo/mojom/media_player.mojom
new file mode 100644
index 00000000000..727d298ed49
--- /dev/null
+++ b/chromium/media/mojo/mojom/media_player.mojom
@@ -0,0 +1,99 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module media.mojom;
+
+import "media/mojo/mojom/media_types.mojom";
+import "mojo/public/mojom/base/time.mojom";
+import "services/media_session/public/mojom/media_session.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
+
+// Implemented by HTMLMediaElement in the renderer process.
+interface MediaPlayer {
+ // Sends |observer| to the renderer process so that it can be established a
+ // communication channel with the implementor of MediaPlayerObserver, and
+ // allows multiple observers for more observers like PictureInPictureService.
+ AddMediaPlayerObserver(
+ pending_associated_remote<MediaPlayerObserver> observer);
+
+ // Requests the media player to start or resume media playback.
+ RequestPlay();
+
+ // Requests the media player to pause media playback.
+ RequestPause(bool triggered_by_user);
+
+ // Requests the media player to move forward the media playback position.
+ RequestSeekForward(mojo_base.mojom.TimeDelta seek_time);
+
+ // Requests the media player to move backward the media playback position.
+ RequestSeekBackward(mojo_base.mojom.TimeDelta seek_time);
+
+ // Requests the media player to enter the Picture-in-Picture mode.
+ RequestEnterPictureInPicture();
+
+ // Requests the media player to exit the Picture-in-Picture mode.
+ RequestExitPictureInPicture();
+
+ // Set the media player sink id to |sink_id|.
+ SetAudioSinkId(string sink_id);
+};
+
+// Implemented by MediaWebContentsObserver::MediaPlayerObserverHostImpl in the
+// browser process.
+interface MediaPlayerObserver {
+ // Notifies that the media player started playing content.
+ OnMediaPlaying();
+
+ // Notifies that the media player stopped playing content,
+ // indicating in |stream_ended| if playback has reached the end of the stream.
+ OnMediaPaused(bool stream_ended);
+
+ // Notifies that the muted status of the media player has changed.
+ OnMutedStatusChanged(bool muted);
+
+ // Notifies that the media metadata of the media player has changed, along
+ // with the kind of tracks the media player has, and the type of content.
+ OnMediaMetadataChanged(
+ bool has_audio, bool has_video, MediaContentType content_type);
+
+ // Notifies the browser process that the media playback position has changed,
+ // and reports the new current position via |media_position|.
+ OnMediaPositionStateChanged(media_session.mojom.MediaPosition media_position);
+
+ // Notifies that the player has entered fullscreen.
+ // This does not differentiate native controls fullscreen and custom controls
+ // fullscreen. |status| is used by MediaWebContentsObserver to trigger
+ // automatically Picture-in-Picture for fullscreen videos.
+ OnMediaEffectivelyFullscreenChanged(FullscreenVideoStatus status);
+
+ // Notifies that the size of the media player has changed.
+ OnMediaSizeChanged(gfx.mojom.Size size);
+
+ // Notifies the browser process of PictureinPicture playback's availability.
+ OnPictureInPictureAvailabilityChanged(bool available);
+
+ // Notifies that the audio output sink has changed.
+ OnAudioOutputSinkChanged(string hashed_device_id);
+
+ // Notifies the browser process that the ability to switch audio output
+ // devices for the associated media player has been disabled.
+ OnAudioOutputSinkChangingDisabled();
+
+ // Notifies that a buffer underflow event happened for the media player.
+ OnBufferUnderflow();
+
+ // Notifies that a playback seek event happened for the media player.
+ OnSeek();
+};
+
+// Implemented by MediaWebContentsObserver::MediaPlayerHostImpl in the browser
+// process.
+interface MediaPlayerHost {
+ // Sends a message to the browser notifying the render frame associated to the
+ // document owning the HTMLMediaElement that a new MediaPlayer is available,
+ // passing a pending remote (i.e. |player_remote|) that will be used in the
+ // browser process to establish a channel with the HTMLMediaElement.
+ OnMediaPlayerAdded(pending_associated_remote<MediaPlayer> player_remote,
+ int32 player_id);
+};
diff --git a/chromium/media/mojo/mojom/media_types.mojom b/chromium/media/mojo/mojom/media_types.mojom
index e54857cddec..1ae7c91c0a8 100644
--- a/chromium/media/mojo/mojom/media_types.mojom
+++ b/chromium/media/mojo/mojom/media_types.mojom
@@ -38,6 +38,10 @@ enum DecodeStatus;
[Native]
enum StatusCode;
+// See media/base/media_content_type.h for descriptions.
+[Native]
+enum MediaContentType;
+
// See media/base/media_log_record.h for description.
[Native]
struct MediaLogRecord;
@@ -66,6 +70,14 @@ enum VideoCodecProfile;
[Native]
enum VideoPixelFormat;
+// See media/base/decoder.h for descriptions.
+[Native]
+enum VideoDecoderType;
+
+// See media/base/decoder.h for descriptions.
+[Native]
+enum AudioDecoderType;
+
// See media/base/video_transformation.h for descriptions.
enum VideoRotation {
kVideoRotation0,
@@ -80,6 +92,11 @@ enum CopyMode {
kCopyMailboxesOnly,
};
+// see third_party/blink/public/platform/web_fullscreen_video_status.h for
+// descriptions.
+[Native]
+enum FullscreenVideoStatus;
+
// See media/base/video_transformation.h for descriptions.
struct VideoTransformation {
VideoRotation rotation;
@@ -280,8 +297,7 @@ struct VideoFrameMetadata {
bool read_lock_fences_enabled;
- bool has_rotation;
- VideoRotation rotation;
+ VideoTransformation? transformation;
bool texture_owner;
@@ -321,6 +337,8 @@ struct VideoFrameMetadata {
mojo_base.mojom.TimeTicks? receive_time;
mojo_base.mojom.TimeDelta? wallclock_frame_duration;
+
+ uint32 hw_protected_validation_id;
};
// This defines a mojo transport format for media::VideoFrame.
@@ -426,9 +444,16 @@ struct PredictionTargets {
uint32 frames_power_efficient = 0;
};
-// See media/base/pipeline/status.h for descriptions.
-struct PipelineDecoderInfo {
- string decoder_name;
+// See media/base/pipeline_status.h for descriptions.
+struct AudioDecoderInfo {
+ AudioDecoderType decoder_type;
+ bool is_platform_decoder = false;
+ bool has_decrypting_demuxer_stream = false;
+};
+
+// See media/base/pipeline_status.h for descriptions.
+struct VideoDecoderInfo {
+ VideoDecoderType decoder_type;
bool is_platform_decoder = false;
bool has_decrypting_demuxer_stream = false;
};
@@ -441,3 +466,15 @@ struct Status {
array<media.mojom.Status> causes;
mojo_base.mojom.Value? data;
};
+
+// Types of media stream, categorised by the media stream's source.
+// The enum values are emitted to metrics. Do not reorder.
+enum MediaStreamType {
+ kLocalElementCapture = 0, // The source is a local capture from element.
+ kLocalDeviceCapture = 1, // The source is a local device capture, e.g. webcam.
+ kLocalTabCapture = 2, // The source is a local tab capture.
+ kLocalDesktopCapture = 3, // The source is a local desktop capture.
+ kLocalDisplayCapture = 4, // The source is a local display capture.
+ kRemote = 5, // The source is a remote peer connection.
+ kNone = 6, // Not a media stream.
+};
diff --git a/chromium/media/mojo/mojom/pipeline_status_mojom_traits.h b/chromium/media/mojo/mojom/pipeline_status_mojom_traits.h
index 43cb23f6f52..d2893e209d9 100644
--- a/chromium/media/mojo/mojom/pipeline_status_mojom_traits.h
+++ b/chromium/media/mojo/mojom/pipeline_status_mojom_traits.h
@@ -47,27 +47,54 @@ struct StructTraits<media::mojom::PipelineStatisticsDataView,
};
template <>
-struct StructTraits<media::mojom::PipelineDecoderInfoDataView,
- media::PipelineDecoderInfo> {
- static std::string decoder_name(const media::PipelineDecoderInfo& input) {
- return input.decoder_name;
+struct StructTraits<media::mojom::AudioDecoderInfoDataView,
+ media::AudioDecoderInfo> {
+ static media::AudioDecoderType decoder_type(
+ const media::AudioDecoderInfo& input) {
+ return input.decoder_type;
}
- static bool is_platform_decoder(const media::PipelineDecoderInfo& input) {
+ static bool is_platform_decoder(const media::AudioDecoderInfo& input) {
return input.is_platform_decoder;
}
static bool has_decrypting_demuxer_stream(
- const media::PipelineDecoderInfo& input) {
+ const media::AudioDecoderInfo& input) {
return input.has_decrypting_demuxer_stream;
}
- static bool Read(media::mojom::PipelineDecoderInfoDataView data,
- media::PipelineDecoderInfo* output) {
+ static bool Read(media::mojom::AudioDecoderInfoDataView data,
+ media::AudioDecoderInfo* output) {
output->is_platform_decoder = data.is_platform_decoder();
output->has_decrypting_demuxer_stream =
data.has_decrypting_demuxer_stream();
- return data.ReadDecoderName(&output->decoder_name);
+ return data.ReadDecoderType(&output->decoder_type);
+ }
+};
+
+template <>
+struct StructTraits<media::mojom::VideoDecoderInfoDataView,
+ media::VideoDecoderInfo> {
+ static media::VideoDecoderType decoder_type(
+ const media::VideoDecoderInfo& input) {
+ return input.decoder_type;
+ }
+
+ static bool is_platform_decoder(const media::VideoDecoderInfo& input) {
+ return input.is_platform_decoder;
+ }
+
+ static bool has_decrypting_demuxer_stream(
+ const media::VideoDecoderInfo& input) {
+ return input.has_decrypting_demuxer_stream;
+ }
+
+ static bool Read(media::mojom::VideoDecoderInfoDataView data,
+ media::VideoDecoderInfo* output) {
+ output->is_platform_decoder = data.is_platform_decoder();
+ output->has_decrypting_demuxer_stream =
+ data.has_decrypting_demuxer_stream();
+ return data.ReadDecoderType(&output->decoder_type);
}
};
diff --git a/chromium/media/mojo/mojom/renderer.mojom b/chromium/media/mojo/mojom/renderer.mojom
index d8ac141e0b5..72ce80b1624 100644
--- a/chromium/media/mojo/mojom/renderer.mojom
+++ b/chromium/media/mojo/mojom/renderer.mojom
@@ -74,7 +74,7 @@ interface RendererClient {
// Executed if any error was encountered during decode or rendering. If
// this error happens during an operation that has a completion callback,
// OnError() will be called before firing the completion callback.
- OnError();
+ OnError(Status status);
// Executed whenever DemuxerStream status returns kConfigChange. Initial
// configs provided by OnMetadata.
diff --git a/chromium/media/mojo/mojom/renderer_extensions.mojom b/chromium/media/mojo/mojom/renderer_extensions.mojom
index 2bd84f47308..bd245a41130 100644
--- a/chromium/media/mojo/mojom/renderer_extensions.mojom
+++ b/chromium/media/mojo/mojom/renderer_extensions.mojom
@@ -47,3 +47,28 @@ interface FlingingRendererClientExtension {
// network).
OnRemotePlayStateChange(MediaStatusState state);
};
+
+[EnableIf=is_win]
+// Extension of the mojo::RendererClient communication layer for MF-based CDM
+// Renderer.
+// This allows the media::Renderer from Renderer process calling into the
+// MediaFoundationRenderer in the "mf_cdm" sandbox'ed Utility process.
+// Concretely, the MediaFoundationRendererClient uses these methods to send
+// commands to MediaFoundationRenderer, which lives in the mf_cdm LPAC-based
+// Utility process.
+// Please refer to media/renderers/win/media_foundation_renderer_extension.h
+// for its C++ interface equivalence.
+interface MediaFoundationRendererExtension {
+ // Enable Direct Composition video rendering.
+ [Sync]
+ SetDCOMPMode(bool enabled) => (bool succeeded);
+
+ // Get a Direct Composition Surface handle.
+ GetDCOMPSurface() => (handle? dcomp_surface);
+
+ // Notify renderer whether video is enabled.
+ SetVideoStreamEnabled(bool enabled);
+
+ // Notify renderer of output composition parameters
+ SetOutputParams(gfx.mojom.Rect rect);
+};
diff --git a/chromium/media/mojo/mojom/speech_recognition_service.mojom b/chromium/media/mojo/mojom/speech_recognition_service.mojom
index f85ef0e4b3f..0c8b14579ca 100644
--- a/chromium/media/mojo/mojom/speech_recognition_service.mojom
+++ b/chromium/media/mojo/mojom/speech_recognition_service.mojom
@@ -6,6 +6,7 @@ module media.mojom;
import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/file_path.mojom";
+import "mojo/public/mojom/base/time.mojom";
import "services/network/public/mojom/url_loader_factory.mojom";
// The main interface a client uses to interact with a speech recognition
@@ -58,6 +59,15 @@ interface SpeechRecognitionRecognizer {
// will return the recognition events containing the transcribed audio back
// to the originating media.
SendAudioToSpeechRecognitionService(AudioDataS16 buffer);
+
+ // Notify the speech recognition recognizer that the caption bubble was
+ // closed. Used to determine whether the caption bubble was visible when
+ // recording watch time.
+ OnCaptionBubbleClosed();
+
+ // Notify the speech recognition recognizer that audio was received by the
+ // renderer after the caption bubble was closed.
+ AudioReceivedAfterBubbleClosed(mojo_base.mojom.TimeDelta duration);
};
// The interface used to return speech recognition events from the speech
diff --git a/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h b/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h
index 5ef4800dc37..ac43d30ecbf 100644
--- a/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h
+++ b/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h
@@ -6,9 +6,9 @@
#define MEDIA_MOJO_MOJOM_SUPPORTED_VIDEO_DECODER_CONFIG_MOJOM_TRAITS_H_
#include "media/base/ipc/media_param_traits.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/mojo/mojom/media_types.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
-#include "media/video/supported_video_decoder_config.h"
#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
namespace mojo {
diff --git a/chromium/media/mojo/mojom/video_decoder.mojom b/chromium/media/mojo/mojom/video_decoder.mojom
index 1ff471e0148..1a3f89d100f 100644
--- a/chromium/media/mojo/mojom/video_decoder.mojom
+++ b/chromium/media/mojo/mojom/video_decoder.mojom
@@ -128,7 +128,8 @@ interface VideoDecoder {
mojo_base.mojom.UnguessableToken? cdm_id)
=> (Status status,
bool needs_bitstream_conversion,
- int32 max_decode_requests);
+ int32 max_decode_requests,
+ VideoDecoderType decoder_type);
// Request decoding of exactly one frame or an EOS buffer. This must not be
// called while there are pending Initialize(), Reset(), or Decode(EOS)
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator.mojom b/chromium/media/mojo/mojom/video_encode_accelerator.mojom
index 661a9e70b09..3516920dd39 100644
--- a/chromium/media/mojo/mojom/video_encode_accelerator.mojom
+++ b/chromium/media/mojo/mojom/video_encode_accelerator.mojom
@@ -83,7 +83,7 @@ struct VideoEncodeAcceleratorConfig {
// See media::VideoEncodeAccelerator::Config::StorageType
enum StorageType {
kShmem,
- kDmabuf
+ kGpuMemoryBuffer,
};
VideoPixelFormat input_format;
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
index cb37413d8e9..df744f70048 100644
--- a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
@@ -167,8 +167,9 @@ EnumTraits<media::mojom::VideoEncodeAcceleratorConfig_StorageType,
media::VideoEncodeAccelerator::Config::StorageType>::
ToMojom(media::VideoEncodeAccelerator::Config::StorageType input) {
switch (input) {
- case media::VideoEncodeAccelerator::Config::StorageType::kDmabuf:
- return media::mojom::VideoEncodeAcceleratorConfig_StorageType::kDmabuf;
+ case media::VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer:
+ return media::mojom::VideoEncodeAcceleratorConfig_StorageType::
+ kGpuMemoryBuffer;
case media::VideoEncodeAccelerator::Config::StorageType::kShmem:
return media::mojom::VideoEncodeAcceleratorConfig_StorageType::kShmem;
}
@@ -185,8 +186,10 @@ bool EnumTraits<media::mojom::VideoEncodeAcceleratorConfig_StorageType,
case media::mojom::VideoEncodeAcceleratorConfig_StorageType::kShmem:
*output = media::VideoEncodeAccelerator::Config::StorageType::kShmem;
return true;
- case media::mojom::VideoEncodeAcceleratorConfig_StorageType::kDmabuf:
- *output = media::VideoEncodeAccelerator::Config::StorageType::kDmabuf;
+ case media::mojom::VideoEncodeAcceleratorConfig_StorageType::
+ kGpuMemoryBuffer:
+ *output =
+ media::VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
return true;
}
NOTREACHED();
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc
index 2504085975d..475dfee84d3 100644
--- a/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits_unittest.cc
@@ -33,7 +33,7 @@ TEST(VideoEncoderInfoStructTraitTest, RoundTrip) {
::media::VideoEncoderInfo output = input;
ASSERT_TRUE(mojo::test::SerializeAndDeserialize<mojom::VideoEncoderInfo>(
- &input, &output));
+ input, output));
EXPECT_EQ(input, output);
}
@@ -47,7 +47,7 @@ TEST(SpatialLayerStructTraitTest, RoundTrip) {
input_spatial_layer.num_of_temporal_layers = 3u;
::media::VideoEncodeAccelerator::Config::SpatialLayer output_spatial_layer;
ASSERT_TRUE(mojo::test::SerializeAndDeserialize<mojom::SpatialLayer>(
- &input_spatial_layer, &output_spatial_layer));
+ input_spatial_layer, output_spatial_layer));
EXPECT_EQ(input_spatial_layer, output_spatial_layer);
}
@@ -70,7 +70,7 @@ TEST(VideoEncodeAcceleratorConfigStructTraitTest, RoundTrip) {
::media::VideoEncodeAccelerator::Config input_config(
::media::PIXEL_FORMAT_NV12, kBaseSize, ::media::VP9PROFILE_PROFILE0,
kBaseBitrateBps, kBaseFramerate, base::nullopt, base::nullopt, false,
- ::media::VideoEncodeAccelerator::Config::StorageType::kDmabuf,
+ ::media::VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer,
::media::VideoEncodeAccelerator::Config::ContentType::kCamera,
input_spatial_layers);
DVLOG(4) << input_config.AsHumanReadableString();
@@ -78,7 +78,7 @@ TEST(VideoEncodeAcceleratorConfigStructTraitTest, RoundTrip) {
::media::VideoEncodeAccelerator::Config output_config{};
ASSERT_TRUE(
mojo::test::SerializeAndDeserialize<mojom::VideoEncodeAcceleratorConfig>(
- &input_config, &output_config));
+ input_config, output_config));
DVLOG(4) << output_config.AsHumanReadableString();
EXPECT_EQ(input_config, output_config);
}
@@ -91,7 +91,7 @@ TEST(BitstreamBufferMetadataTraitTest, RoundTrip) {
::media::BitstreamBufferMetadata output_metadata;
ASSERT_TRUE(
mojo::test::SerializeAndDeserialize<mojom::BitstreamBufferMetadata>(
- &input_metadata, &output_metadata));
+ input_metadata, output_metadata));
EXPECT_EQ(input_metadata, output_metadata);
Vp8Metadata vp8;
@@ -102,7 +102,7 @@ TEST(BitstreamBufferMetadataTraitTest, RoundTrip) {
output_metadata = ::media::BitstreamBufferMetadata();
ASSERT_TRUE(
mojo::test::SerializeAndDeserialize<mojom::BitstreamBufferMetadata>(
- &input_metadata, &output_metadata));
+ input_metadata, output_metadata));
EXPECT_EQ(input_metadata, output_metadata);
input_metadata.vp8.reset();
@@ -115,7 +115,7 @@ TEST(BitstreamBufferMetadataTraitTest, RoundTrip) {
output_metadata = ::media::BitstreamBufferMetadata();
ASSERT_TRUE(
mojo::test::SerializeAndDeserialize<mojom::BitstreamBufferMetadata>(
- &input_metadata, &output_metadata));
+ input_metadata, output_metadata));
EXPECT_EQ(input_metadata, output_metadata);
}
} // namespace media
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
index 552167c0490..0e77001b970 100644
--- a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
@@ -54,13 +54,11 @@ bool StructTraits<media::mojom::VideoFrameMetadataDataView,
DESERIALIZE_INTO_OPT(frame_rate);
DESERIALIZE_INTO_OPT(rtp_timestamp);
- if (input.has_rotation()) {
- media::VideoRotation rotation;
- if (!input.ReadRotation(&rotation))
- return false;
+ // unsigned int.
+ output->hw_protected_validation_id = input.hw_protected_validation_id();
- output->rotation = rotation;
- }
+ READ_AND_ASSIGN_OPT(media::VideoTransformation, transformation,
+ Transformation);
if (input.has_copy_mode()) {
media::VideoFrameMetadata::CopyMode copy_mode;
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h
index aca92114ace..06b7cf3ebc8 100644
--- a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.h
@@ -9,8 +9,10 @@
#include "base/optional.h"
#include "media/base/ipc/media_param_traits_macros.h"
#include "media/base/video_frame_metadata.h"
+#include "media/base/video_transformation.h"
#include "media/mojo/mojom/media_types.mojom-shared.h"
#include "media/mojo/mojom/media_types_enum_mojom_traits.h"
+#include "media/mojo/mojom/video_transformation_mojom_traits.h"
#include "mojo/public/cpp/bindings/struct_traits.h"
#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
@@ -53,6 +55,11 @@ struct StructTraits<media::mojom::VideoFrameMetadataDataView,
return input.hw_protected;
}
+ static uint32_t hw_protected_validation_id(
+ const media::VideoFrameMetadata& input) {
+ return input.hw_protected_validation_id;
+ }
+
static bool power_efficient(const media::VideoFrameMetadata& input) {
return input.power_efficient;
}
@@ -72,9 +79,10 @@ struct StructTraits<media::mojom::VideoFrameMetadataDataView,
copy_mode,
media::VideoFrameMetadata::CopyMode::kCopyToNewTexture)
- GENERATE_OPT_SERIALIZATION(media::VideoRotation,
- rotation,
- media::VideoRotation::VIDEO_ROTATION_0)
+ static base::Optional<media::VideoTransformation> transformation(
+ const media::VideoFrameMetadata& input) {
+ return input.transformation;
+ }
GENERATE_OPT_SERIALIZATION(double, device_scale_factor, 0.0)
GENERATE_OPT_SERIALIZATION(double, page_scale_factor, 0.0)
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
index 38f528440b3..f84a39befa1 100644
--- a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
@@ -21,7 +21,7 @@ namespace {
class VideoFrameMetadataStructTraitsTest
: public testing::Test,
- public media::mojom::VideoFrameMetadataTraitsTestService {
+ public mojom::VideoFrameMetadataTraitsTestService {
public:
VideoFrameMetadataStructTraitsTest() = default;
@@ -62,7 +62,7 @@ TEST_F(VideoFrameMetadataStructTraitsTest, EmptyMetadata) {
EXPECT_FALSE(metadata_out.capture_counter.has_value());
EXPECT_FALSE(metadata_out.capture_update_rect.has_value());
- EXPECT_FALSE(metadata_out.rotation.has_value());
+ EXPECT_FALSE(metadata_out.transformation.has_value());
EXPECT_FALSE(metadata_out.allow_overlay);
EXPECT_FALSE(metadata_out.copy_mode.has_value());
EXPECT_FALSE(metadata_out.end_of_stream);
@@ -70,6 +70,7 @@ TEST_F(VideoFrameMetadataStructTraitsTest, EmptyMetadata) {
EXPECT_FALSE(metadata_out.wants_promotion_hint);
EXPECT_FALSE(metadata_out.protected_video);
EXPECT_FALSE(metadata_out.hw_protected);
+ EXPECT_FALSE(metadata_out.hw_protected_validation_id);
EXPECT_FALSE(metadata_out.power_efficient);
EXPECT_FALSE(metadata_out.read_lock_fences_enabled);
EXPECT_FALSE(metadata_out.interactive_content);
@@ -99,16 +100,16 @@ TEST_F(VideoFrameMetadataStructTraitsTest, ValidMetadata) {
// ints
metadata_in.capture_counter = 123;
+ metadata_in.hw_protected_validation_id = 456;
// gfx::Rects
metadata_in.capture_update_rect = gfx::Rect(12, 34, 360, 480);
- // media::VideoRotations
- metadata_in.rotation = media::VideoRotation::VIDEO_ROTATION_90;
+ // VideoTransformation
+ metadata_in.transformation = VideoTransformation(VIDEO_ROTATION_90, true);
- // media::VideoFrameMetadata::CopyMode
- metadata_in.copy_mode =
- media::VideoFrameMetadata::CopyMode::kCopyToNewTexture;
+ // VideoFrameMetadata::CopyMode
+ metadata_in.copy_mode = VideoFrameMetadata::CopyMode::kCopyToNewTexture;
// bools
metadata_in.allow_overlay = true;
@@ -153,7 +154,7 @@ TEST_F(VideoFrameMetadataStructTraitsTest, ValidMetadata) {
EXPECT_EQ(metadata_in.capture_counter, metadata_out.capture_counter);
EXPECT_EQ(metadata_in.capture_update_rect, metadata_out.capture_update_rect);
- EXPECT_EQ(metadata_in.rotation, metadata_out.rotation);
+ EXPECT_EQ(metadata_in.transformation, metadata_out.transformation);
EXPECT_EQ(metadata_in.allow_overlay, metadata_out.allow_overlay);
EXPECT_EQ(metadata_in.copy_mode, metadata_out.copy_mode);
EXPECT_EQ(metadata_in.end_of_stream, metadata_out.end_of_stream);
@@ -162,6 +163,8 @@ TEST_F(VideoFrameMetadataStructTraitsTest, ValidMetadata) {
metadata_out.wants_promotion_hint);
EXPECT_EQ(metadata_in.protected_video, metadata_out.protected_video);
EXPECT_EQ(metadata_in.hw_protected, metadata_out.hw_protected);
+ EXPECT_EQ(metadata_in.hw_protected_validation_id,
+ metadata_out.hw_protected_validation_id);
EXPECT_EQ(metadata_in.power_efficient, metadata_out.power_efficient);
EXPECT_EQ(metadata_in.read_lock_fences_enabled,
metadata_out.read_lock_fences_enabled);
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
index 4f7bae7a300..eac5560cc57 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
@@ -31,7 +31,7 @@ namespace {
media::mojom::VideoFrameDataPtr MakeVideoFrameData(
const media::VideoFrame* input) {
- if (input->metadata()->end_of_stream) {
+ if (input->metadata().end_of_stream) {
return media::mojom::VideoFrameData::NewEosData(
media::mojom::EosVideoFrameData::New());
}
@@ -81,9 +81,10 @@ media::mojom::VideoFrameDataPtr MakeVideoFrameData(
#endif
std::vector<gpu::MailboxHolder> mailbox_holder(media::VideoFrame::kMaxPlanes);
- size_t num_planes = media::VideoFrame::NumPlanes(input->format());
- DCHECK_LE(num_planes, mailbox_holder.size());
- for (size_t i = 0; i < num_planes; i++)
+ DCHECK_LE(input->NumTextures(), mailbox_holder.size());
+ // STORAGE_GPU_MEMORY_BUFFER may carry meaningful or dummy mailboxes,
+ // we should only access them when there are textures.
+ for (size_t i = 0; i < input->NumTextures(); i++)
mailbox_holder[i] = input->mailbox_holder(i);
if (input->storage_type() == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
@@ -238,7 +239,7 @@ bool StructTraits<media::mojom::VideoFrameDataView,
std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer =
support.CreateGpuMemoryBufferImplFromHandle(
std::move(gpu_memory_buffer_handle), coded_size, *buffer_format,
- gfx::BufferUsage::SCANOUT_VEA_READ_CAMERA_AND_CPU_READ_WRITE,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE,
base::NullCallback());
if (!gpu_memory_buffer)
return false;
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits.h b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
index 3323ef2ee2f..9071089bd2e 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
@@ -79,7 +79,7 @@ struct StructTraits<media::mojom::VideoFrameDataView,
// const &.
static const media::VideoFrameMetadata& metadata(
const scoped_refptr<media::VideoFrame>& input) {
- return *(input->metadata());
+ return input->metadata();
}
static bool Read(media::mojom::VideoFrameDataView input,
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
index 3c6a2c3afb6..1ac03ed08eb 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
@@ -77,7 +77,7 @@ TEST_F(VideoFrameStructTraitsTest, EOS) {
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_TRUE(frame->metadata()->end_of_stream);
+ EXPECT_TRUE(frame->metadata().end_of_stream);
}
TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) {
@@ -86,12 +86,12 @@ TEST_F(VideoFrameStructTraitsTest, MojoSharedBufferVideoFrame) {
scoped_refptr<VideoFrame> frame =
MojoSharedBufferVideoFrame::CreateDefaultForTesting(
format, gfx::Size(100, 100), base::TimeDelta::FromSeconds(100));
- frame->metadata()->frame_rate = 42.0;
+ frame->metadata().frame_rate = 42.0;
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_FALSE(frame->metadata()->end_of_stream);
- EXPECT_EQ(*frame->metadata()->frame_rate, 42.0);
+ EXPECT_FALSE(frame->metadata().end_of_stream);
+ EXPECT_EQ(*frame->metadata().frame_rate, 42.0);
EXPECT_EQ(frame->coded_size(), gfx::Size(100, 100));
EXPECT_EQ(frame->timestamp(), base::TimeDelta::FromSeconds(100));
@@ -126,7 +126,7 @@ TEST_F(VideoFrameStructTraitsTest, DmabufVideoFrame) {
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_FALSE(frame->metadata()->end_of_stream);
+ EXPECT_FALSE(frame->metadata().end_of_stream);
EXPECT_EQ(frame->format(), PIXEL_FORMAT_NV12);
EXPECT_EQ(frame->coded_size(), gfx::Size(1280, 720));
EXPECT_EQ(frame->visible_rect(), gfx::Rect(0, 0, 1280, 720));
@@ -148,7 +148,7 @@ TEST_F(VideoFrameStructTraitsTest, MailboxVideoFrame) {
ASSERT_TRUE(RoundTrip(&frame));
ASSERT_TRUE(frame);
- EXPECT_FALSE(frame->metadata()->end_of_stream);
+ EXPECT_FALSE(frame->metadata().end_of_stream);
EXPECT_EQ(frame->format(), PIXEL_FORMAT_ARGB);
EXPECT_EQ(frame->coded_size(), gfx::Size(100, 100));
EXPECT_EQ(frame->visible_rect(), gfx::Rect(10, 10, 80, 80));
@@ -182,7 +182,7 @@ TEST_F(VideoFrameStructTraitsTest, GpuMemoryBufferVideoFrame) {
ASSERT_TRUE(frame);
ASSERT_EQ(frame->storage_type(), VideoFrame::STORAGE_GPU_MEMORY_BUFFER);
EXPECT_TRUE(frame->HasGpuMemoryBuffer());
- EXPECT_FALSE(frame->metadata()->end_of_stream);
+ EXPECT_FALSE(frame->metadata().end_of_stream);
EXPECT_EQ(frame->format(), PIXEL_FORMAT_NV12);
EXPECT_EQ(frame->coded_size(), coded_size);
EXPECT_EQ(frame->visible_rect(), visible_rect);
diff --git a/chromium/media/mojo/mojom/video_transformation_mojom_traits.cc b/chromium/media/mojo/mojom/video_transformation_mojom_traits.cc
index 9aebac3bf3f..14ff07017fc 100644
--- a/chromium/media/mojo/mojom/video_transformation_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_transformation_mojom_traits.cc
@@ -4,6 +4,8 @@
#include "media/mojo/mojom/video_transformation_mojom_traits.h"
+#include "media/mojo/mojom/media_types.mojom.h"
+
namespace mojo {
// static
diff --git a/chromium/media/mojo/mojom/video_transformation_mojom_traits.h b/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
index 11ddd8126c9..bd53385d302 100644
--- a/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
@@ -7,7 +7,7 @@
#include "media/base/ipc/media_param_traits.h"
#include "media/base/video_transformation.h"
-#include "media/mojo/mojom/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom-shared.h"
#include "media/mojo/mojom/media_types_enum_mojom_traits.h"
namespace mojo {
diff --git a/chromium/media/mojo/mojom/watch_time_recorder.mojom b/chromium/media/mojo/mojom/watch_time_recorder.mojom
index 9f9a917ebc6..0bec65de8c0 100644
--- a/chromium/media/mojo/mojom/watch_time_recorder.mojom
+++ b/chromium/media/mojo/mojom/watch_time_recorder.mojom
@@ -18,6 +18,9 @@ struct PlaybackProperties {
bool is_mse;
bool is_eme;
bool is_embedded_media_experience; // Playback from 'Downloads' on Android.
+ // Type of played-back MediaStream, or kNone if the playback is not from a
+ // MediaStream.
+ MediaStreamType media_stream_type;
};
// Structure describing mutable properties for the current watch time report.
@@ -28,8 +31,8 @@ struct SecondaryPlaybackProperties {
VideoCodec video_codec; // playbacks (HLS, remoting, etc).
AudioCodecProfile audio_codec_profile;
VideoCodecProfile video_codec_profile;
- string audio_decoder_name;
- string video_decoder_name;
+ AudioDecoderType audio_decoder;
+ VideoDecoderType video_decoder;
EncryptionScheme audio_encryption_scheme;
EncryptionScheme video_encryption_scheme;
gfx.mojom.Size natural_size; // Size of video frame; (0, 0) if audio only.
diff --git a/chromium/media/mojo/services/BUILD.gn b/chromium/media/mojo/services/BUILD.gn
index afdf5766a53..5565b1ffdb8 100644
--- a/chromium/media/mojo/services/BUILD.gn
+++ b/chromium/media/mojo/services/BUILD.gn
@@ -141,10 +141,20 @@ component("services") {
deps += [ "//sandbox" ]
}
}
- if (is_ash) {
+ if (is_chromeos_ash) {
deps +=
[ "//chromeos/components/cdm_factory_daemon:cdm_factory_daemon_gpu" ]
}
+
+ if (is_win) {
+ sources += [
+ "media_foundation_mojo_media_client.cc",
+ "media_foundation_mojo_media_client.h",
+ "media_foundation_renderer_wrapper.cc",
+ "media_foundation_renderer_wrapper.h",
+ ]
+ deps += [ "//media/base/win:media_foundation_util" ]
+ }
}
source_set("unit_tests") {
@@ -198,7 +208,7 @@ source_set("unit_tests") {
deps += [ "//media/cdm:cdm_api" ]
}
- if (is_ash) {
+ if (is_chromeos_ash) {
deps += [
"//components/chromeos_camera:mjpeg_decode_accelerator_service_unittest",
]
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.cc b/chromium/media/mojo/services/gpu_mojo_media_client.cc
index 88ecd898dd6..f11336e1a78 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.cc
@@ -13,7 +13,6 @@
#include "gpu/ipc/service/gpu_channel.h"
#include "media/base/audio_decoder.h"
#include "media/base/cdm_factory.h"
-#include "media/base/fallback_video_decoder.h"
#include "media/base/media_switches.h"
#include "media/base/video_decoder.h"
#include "media/gpu/gpu_video_accelerator_util.h"
@@ -59,9 +58,9 @@ using media::android_mojo_util::CreateProvisionFetcher;
using media::android_mojo_util::CreateMediaDrmStorage;
#endif // defined(OS_ANDROID)
-#if BUILDFLAG(IS_ASH)
+#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_factory.h"
-#endif // BUILDFLAG(IS_ASH)
+#endif // BUILDFLAG(IS_CHROMEOS_ASH)
namespace media {
@@ -106,21 +105,12 @@ D3D11VideoDecoder::GetD3D11DeviceCB GetD3D11DeviceCallback() {
#endif
#if BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
-// Returns true if |gpu_preferences| says that the direct video decoder is
-// supported and the feature flag says so. This only applies to ChromeOS builds,
-// otherwise it returns false.
+// Returns true if |gpu_preferences| says that the direct video decoder is in
+// use.
bool ShouldUseChromeOSDirectVideoDecoder(
const gpu::GpuPreferences& gpu_preferences) {
-#if BUILDFLAG(IS_ASH)
- const bool should_use_direct_video_decoder =
- !gpu_preferences.platform_disallows_chromeos_direct_video_decoder &&
- base::FeatureList::IsEnabled(kUseChromeOSDirectVideoDecoder);
-
- // For testing purposes, the following flag allows using the "other" video
- // decoder implementation.
- if (base::FeatureList::IsEnabled(kUseAlternateVideoDecoderImplementation))
- return !should_use_direct_video_decoder;
- return should_use_direct_video_decoder;
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ return gpu_preferences.enable_chromeos_direct_video_decoder;
#else
return false;
#endif
@@ -347,7 +337,7 @@ std::unique_ptr<CdmFactory> GpuMojoMediaClient::CreateCdmFactory(
return std::make_unique<AndroidCdmFactory>(
base::BindRepeating(&CreateProvisionFetcher, frame_interfaces),
base::BindRepeating(&CreateMediaDrmStorage, frame_interfaces));
-#elif BUILDFLAG(IS_ASH)
+#elif BUILDFLAG(IS_CHROMEOS_ASH)
return std::make_unique<chromeos::ChromeOsCdmFactory>(frame_interfaces);
#else
return nullptr;
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.h b/chromium/media/mojo/services/gpu_mojo_media_client.h
index 27dbb020c99..667b43a0115 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.h
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.h
@@ -17,9 +17,9 @@
#include "gpu/config/gpu_feature_info.h"
#include "gpu/config/gpu_preferences.h"
#include "media/base/android_overlay_mojo_factory.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/media_buildflags.h"
#include "media/mojo/services/mojo_media_client.h"
-#include "media/video/supported_video_decoder_config.h"
namespace gpu {
class GpuMemoryBufferFactory;
diff --git a/chromium/media/mojo/services/interface_factory_impl.cc b/chromium/media/mojo/services/interface_factory_impl.cc
index bc04bef061e..fbfbbd24cbc 100644
--- a/chromium/media/mojo/services/interface_factory_impl.cc
+++ b/chromium/media/mojo/services/interface_factory_impl.cc
@@ -34,6 +34,11 @@
#include "media/mojo/services/mojo_cdm_service.h"
#endif // BUILDFLAG(ENABLE_MOJO_CDM)
+#if defined(OS_WIN)
+#include "media/mojo/services/media_foundation_renderer_wrapper.h"
+#include "media/mojo/services/mojo_renderer_service.h"
+#endif // defined(OS_WIN)
+
namespace media {
InterfaceFactoryImpl::InterfaceFactoryImpl(
@@ -101,16 +106,8 @@ void InterfaceFactoryImpl::CreateDefaultRenderer(
std::make_unique<MojoRendererService>(&cdm_service_context_,
std::move(renderer));
- MojoRendererService* mojo_renderer_service_ptr = mojo_renderer_service.get();
-
- mojo::ReceiverId receiver_id = renderer_receivers_.Add(
- std::move(mojo_renderer_service), std::move(receiver));
-
- // base::Unretained() is safe because the callback will be fired by
- // |mojo_renderer_service|, which is owned by |renderer_receivers_|.
- mojo_renderer_service_ptr->set_bad_message_cb(base::BindRepeating(
- base::IgnoreResult(&mojo::UniqueReceiverSet<mojom::Renderer>::Remove),
- base::Unretained(&renderer_receivers_), receiver_id));
+ renderer_receivers_.Add(std::move(mojo_renderer_service),
+ std::move(receiver));
#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
}
@@ -131,16 +128,8 @@ void InterfaceFactoryImpl::CreateCastRenderer(
std::make_unique<MojoRendererService>(&cdm_service_context_,
std::move(renderer));
- MojoRendererService* mojo_renderer_service_ptr = mojo_renderer_service.get();
-
- mojo::ReceiverId receiver_id = renderer_receivers_.Add(
- std::move(mojo_renderer_service), std::move(receiver));
-
- // base::Unretained() is safe because the callback will be fired by
- // |mojo_renderer_service|, which is owned by |renderer_receivers_|.
- mojo_renderer_service_ptr->set_bad_message_cb(base::BindRepeating(
- base::IgnoreResult(&mojo::UniqueReceiverSet<mojom::Renderer>::Remove),
- base::Unretained(&renderer_receivers_), receiver_id));
+ renderer_receivers_.Add(std::move(mojo_renderer_service),
+ std::move(receiver));
}
#endif
@@ -163,6 +152,21 @@ void InterfaceFactoryImpl::CreateFlingingRenderer(
}
#endif // defined(OS_ANDROID)
+#if defined(OS_WIN)
+void InterfaceFactoryImpl::CreateMediaFoundationRenderer(
+ mojo::PendingReceiver<media::mojom::Renderer> receiver,
+ mojo::PendingReceiver<media::mojom::MediaFoundationRendererExtension>
+ renderer_extension_receiver) {
+ DVLOG(1) << __func__ << ": this=" << this;
+
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner =
+ base::ThreadTaskRunnerHandle::Get();
+ CreateMediaFoundationRendererOnTaskRunner(
+ std::move(task_runner), std::move(receiver),
+ std::move(renderer_extension_receiver));
+}
+#endif // defined (OS_WIN)
+
void InterfaceFactoryImpl::CreateCdm(const std::string& key_system,
const CdmConfig& cdm_config,
CreateCdmCallback callback) {
@@ -282,4 +286,33 @@ void InterfaceFactoryImpl::OnCdmServiceCreated(
#endif // BUILDFLAG(ENABLE_MOJO_CDM)
+#if defined(OS_WIN)
+void InterfaceFactoryImpl::CreateMediaFoundationRendererOnTaskRunner(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ mojo::PendingReceiver<media::mojom::Renderer> receiver,
+ mojo::PendingReceiver<media::mojom::MediaFoundationRendererExtension>
+ renderer_extension_receiver) {
+ DVLOG(1) << __func__ << ": this=" << this;
+
+ if (!task_runner->RunsTasksInCurrentSequence()) {
+ task_runner->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &InterfaceFactoryImpl::CreateMediaFoundationRendererOnTaskRunner,
+ base::Unretained(this), task_runner, std::move(receiver),
+ std::move(renderer_extension_receiver)));
+ return;
+ }
+
+ DVLOG(1) << __func__ << ": this=" << this;
+
+ auto renderer = std::make_unique<media::MediaFoundationRendererWrapper>(
+ /*muted=*/false, std::move(task_runner),
+ std::move(renderer_extension_receiver));
+
+ media::MojoRendererService::Create(&cdm_service_context_, std::move(renderer),
+ std::move(receiver));
+}
+#endif // defined(OS_WIN)
+
} // namespace media
diff --git a/chromium/media/mojo/services/interface_factory_impl.h b/chromium/media/mojo/services/interface_factory_impl.h
index c8341e6a35b..be55cc55dd4 100644
--- a/chromium/media/mojo/services/interface_factory_impl.h
+++ b/chromium/media/mojo/services/interface_factory_impl.h
@@ -68,6 +68,13 @@ class InterfaceFactoryImpl final
client_extension,
mojo::PendingReceiver<mojom::Renderer> receiver) final;
#endif // defined(OS_ANDROID)
+#if defined(OS_WIN)
+ void CreateMediaFoundationRenderer(
+ mojo::PendingReceiver<media::mojom::Renderer> receiver,
+ mojo::PendingReceiver<media::mojom::MediaFoundationRendererExtension>
+ renderer_extension_receiver) final;
+#endif // defined(OS_WIN)
+
void CreateCdm(const std::string& key_system,
const CdmConfig& cdm_config,
CreateCdmCallback callback) final;
@@ -91,6 +98,14 @@ class InterfaceFactoryImpl final
const std::string& error_message);
#endif // BUILDFLAG(ENABLE_MOJO_CDM)
+#if defined(OS_WIN)
+ void CreateMediaFoundationRendererOnTaskRunner(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ mojo::PendingReceiver<media::mojom::Renderer> receiver,
+ mojo::PendingReceiver<media::mojom::MediaFoundationRendererExtension>
+ renderer_extension_receiver);
+#endif // defined(OS_WIN)
+
// Must be declared before the receivers below because the bound objects might
// take a raw pointer of |cdm_service_context_| and assume it's always
// available.
diff --git a/chromium/media/mojo/services/media_foundation_mojo_media_client.cc b/chromium/media/mojo/services/media_foundation_mojo_media_client.cc
new file mode 100644
index 00000000000..aa427c267db
--- /dev/null
+++ b/chromium/media/mojo/services/media_foundation_mojo_media_client.cc
@@ -0,0 +1,44 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/services/media_foundation_mojo_media_client.h"
+
+#include "media/base/audio_decoder.h"
+#include "media/base/win/mf_helpers.h"
+#include "media/cdm/cdm_adapter_factory.h"
+#include "media/mojo/services/mojo_cdm_helper.h"
+
+namespace media {
+
+namespace {
+
+std::unique_ptr<media::CdmAuxiliaryHelper> CreateCdmHelper(
+ mojom::FrameInterfaceFactory* frame_interfaces) {
+ return std::make_unique<media::MojoCdmHelper>(frame_interfaces);
+}
+
+} // namespace
+
+MediaFoundationMojoMediaClient::MediaFoundationMojoMediaClient() {
+ DVLOG_FUNC(1);
+}
+
+MediaFoundationMojoMediaClient::~MediaFoundationMojoMediaClient() {
+ DVLOG_FUNC(1);
+}
+
+// MojoMediaClient overrides.
+
+std::unique_ptr<media::CdmFactory>
+MediaFoundationMojoMediaClient::CreateCdmFactory(
+ mojom::FrameInterfaceFactory* frame_interfaces) {
+ DVLOG_FUNC(1);
+
+ // TODO(frankli): consider to use MediaFoundationCdmFactory instead of
+ // CdmAdapterFactory.
+ return std::make_unique<media::CdmAdapterFactory>(
+ base::BindRepeating(&CreateCdmHelper, frame_interfaces));
+}
+
+} // namespace media
diff --git a/chromium/media/mojo/services/media_foundation_mojo_media_client.h b/chromium/media/mojo/services/media_foundation_mojo_media_client.h
new file mode 100644
index 00000000000..e476f0be1b8
--- /dev/null
+++ b/chromium/media/mojo/services/media_foundation_mojo_media_client.h
@@ -0,0 +1,32 @@
+// Copyright 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_SERVICES_MEDIA_FOUNDATION_MOJO_MEDIA_CLIENT_H_
+#define MEDIA_MOJO_SERVICES_MEDIA_FOUNDATION_MOJO_MEDIA_CLIENT_H_
+
+#include "base/macros.h"
+#include "base/single_thread_task_runner.h"
+#include "media/mojo/services/mojo_media_client.h"
+
+namespace media {
+
+// This class is the |mojo_media_client| parameter to create
+// media::MediaService. The MediaService itself is running in the mf_cdm utility
+// process to host MediaFoundationRenderer/Cdm.
+class MediaFoundationMojoMediaClient : public media::MojoMediaClient {
+ public:
+ MediaFoundationMojoMediaClient();
+ ~MediaFoundationMojoMediaClient() final;
+
+ // MojoMediaClient implementation.
+ std::unique_ptr<media::CdmFactory> CreateCdmFactory(
+ mojom::FrameInterfaceFactory* frame_interfaces) final;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MediaFoundationMojoMediaClient);
+};
+
+} // namespace media
+
+#endif // MEDIA_MOJO_SERVICES_MEDIA_FOUNDATION_MOJO_MEDIA_CLIENT_H_
diff --git a/chromium/media/mojo/services/media_foundation_renderer_wrapper.cc b/chromium/media/mojo/services/media_foundation_renderer_wrapper.cc
new file mode 100644
index 00000000000..21953a23dfe
--- /dev/null
+++ b/chromium/media/mojo/services/media_foundation_renderer_wrapper.cc
@@ -0,0 +1,101 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/mojo/services/media_foundation_renderer_wrapper.h"
+
+#include "base/callback_helpers.h"
+#include "media/base/win/mf_helpers.h"
+#include "media/mojo/mojom/renderer_extensions.mojom.h"
+#include "mojo/public/cpp/system/platform_handle.h"
+
+namespace media {
+
+MediaFoundationRendererWrapper::MediaFoundationRendererWrapper(
+ bool web_contents_muted,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ mojo::PendingReceiver<RendererExtension> renderer_extension_receiver)
+ : renderer_(std::make_unique<media::MediaFoundationRenderer>(
+ web_contents_muted,
+ std::move(task_runner))),
+ renderer_extension_receiver_(this,
+ std::move(renderer_extension_receiver)) {
+ DVLOG_FUNC(1);
+}
+
+MediaFoundationRendererWrapper::~MediaFoundationRendererWrapper() {
+ DVLOG_FUNC(1);
+}
+
+void MediaFoundationRendererWrapper::Initialize(
+ media::MediaResource* media_resource,
+ media::RendererClient* client,
+ media::PipelineStatusCallback init_cb) {
+ renderer_->Initialize(media_resource, client, std::move(init_cb));
+}
+
+void MediaFoundationRendererWrapper::SetCdm(CdmContext* cdm_context,
+ CdmAttachedCB cdm_attached_cb) {
+ renderer_->SetCdm(cdm_context, std::move(cdm_attached_cb));
+}
+
+void MediaFoundationRendererWrapper::SetLatencyHint(
+ base::Optional<base::TimeDelta> latency_hint) {
+ renderer_->SetLatencyHint(latency_hint);
+}
+
+void MediaFoundationRendererWrapper::Flush(base::OnceClosure flush_cb) {
+ renderer_->Flush(std::move(flush_cb));
+}
+
+void MediaFoundationRendererWrapper::StartPlayingFrom(base::TimeDelta time) {
+ renderer_->StartPlayingFrom(time);
+}
+
+void MediaFoundationRendererWrapper::SetPlaybackRate(double playback_rate) {
+ renderer_->SetPlaybackRate(playback_rate);
+}
+
+void MediaFoundationRendererWrapper::SetVolume(float volume) {
+ return renderer_->SetVolume(volume);
+}
+
+base::TimeDelta MediaFoundationRendererWrapper::GetMediaTime() {
+ return renderer_->GetMediaTime();
+}
+
+void MediaFoundationRendererWrapper::SetDCOMPMode(
+ bool enabled,
+ SetDCOMPModeCallback callback) {
+ renderer_->SetDCompMode(enabled, std::move(callback));
+}
+
+void MediaFoundationRendererWrapper::GetDCOMPSurface(
+ GetDCOMPSurfaceCallback callback) {
+ get_decomp_surface_cb_ = std::move(callback);
+ renderer_->GetDCompSurface(
+ base::BindOnce(&MediaFoundationRendererWrapper::OnReceiveDCOMPSurface,
+ weak_factory_.GetWeakPtr()));
+}
+
+void MediaFoundationRendererWrapper::SetVideoStreamEnabled(bool enabled) {
+ renderer_->SetVideoStreamEnabled(enabled);
+}
+
+void MediaFoundationRendererWrapper::SetOutputParams(
+ const gfx::Rect& output_rect) {
+ renderer_->SetOutputParams(output_rect);
+}
+
+void MediaFoundationRendererWrapper::OnReceiveDCOMPSurface(HANDLE handle) {
+ base::win::ScopedHandle local_surface_handle;
+ local_surface_handle.Set(handle);
+ if (get_decomp_surface_cb_) {
+ mojo::ScopedHandle surface_handle;
+ surface_handle = mojo::WrapPlatformHandle(
+ mojo::PlatformHandle(std::move(local_surface_handle)));
+ std::move(get_decomp_surface_cb_).Run(std::move(surface_handle));
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/mojo/services/media_foundation_renderer_wrapper.h b/chromium/media/mojo/services/media_foundation_renderer_wrapper.h
new file mode 100644
index 00000000000..01f8fa0ea06
--- /dev/null
+++ b/chromium/media/mojo/services/media_foundation_renderer_wrapper.h
@@ -0,0 +1,70 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_MOJO_SERVICES_MEDIA_FOUNDATION_RENDERER_WRAPPER_H_
+#define MEDIA_MOJO_SERVICES_MEDIA_FOUNDATION_RENDERER_WRAPPER_H_
+
+#include "base/callback.h"
+#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "media/base/media_resource.h"
+#include "media/base/pipeline_status.h"
+#include "media/base/renderer.h"
+#include "media/base/renderer_client.h"
+#include "media/mojo/mojom/renderer_extensions.mojom.h"
+#include "media/renderers/win/media_foundation_renderer.h"
+#include "mojo/public/cpp/bindings/pending_receiver.h"
+#include "mojo/public/cpp/bindings/receiver.h"
+
+namespace media {
+
+// Wrap media::MediaFoundationRenderer to remove its dependence on
+// media::mojom::MediaFoundationRendererExtension interface.
+//
+class MediaFoundationRendererWrapper
+ : public media::Renderer,
+ public media::mojom::MediaFoundationRendererExtension {
+ public:
+ using RendererExtension = media::mojom::MediaFoundationRendererExtension;
+
+ MediaFoundationRendererWrapper(
+ bool web_contents_muted,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
+ mojo::PendingReceiver<RendererExtension> renderer_extension_receiver);
+
+ ~MediaFoundationRendererWrapper() final;
+
+ // media::Renderer implementation.
+ void Initialize(media::MediaResource* media_resource,
+ media::RendererClient* client,
+ media::PipelineStatusCallback init_cb) override;
+ void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
+ void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
+ void Flush(base::OnceClosure flush_cb) override;
+ void StartPlayingFrom(base::TimeDelta time) override;
+ void SetPlaybackRate(double playback_rate) override;
+ void SetVolume(float volume) override;
+ base::TimeDelta GetMediaTime() override;
+
+ // media::mojom::MediaFoundationRendererExtension implementation.
+ void SetDCOMPMode(bool enabled, SetDCOMPModeCallback callback) final;
+ void GetDCOMPSurface(GetDCOMPSurfaceCallback callback) final;
+ void SetVideoStreamEnabled(bool enabled) final;
+ void SetOutputParams(const gfx::Rect& output_rect) final;
+
+ private:
+ void OnReceiveDCOMPSurface(HANDLE handle);
+
+ std::unique_ptr<media::MediaFoundationRenderer> renderer_;
+ mojo::Receiver<MediaFoundationRendererExtension> renderer_extension_receiver_;
+ GetDCOMPSurfaceCallback get_decomp_surface_cb_;
+
+ base::WeakPtrFactory<MediaFoundationRendererWrapper> weak_factory_{this};
+
+ DISALLOW_COPY_AND_ASSIGN(MediaFoundationRendererWrapper);
+};
+
+} // namespace media
+
+#endif // MEDIA_MOJO_SERVICES_MEDIA_FOUNDATION_RENDERER_WRAPPER_H_
diff --git a/chromium/media/mojo/services/media_metrics_provider.cc b/chromium/media/mojo/services/media_metrics_provider.cc
index 3b641080dde..1798f0dd518 100644
--- a/chromium/media/mojo/services/media_metrics_provider.cc
+++ b/chromium/media/mojo/services/media_metrics_provider.cc
@@ -56,6 +56,10 @@ MediaMetricsProvider::MediaMetricsProvider(
uma_info_(is_incognito == BrowsingMode::kIncognito) {}
MediaMetricsProvider::~MediaMetricsProvider() {
+ // These UKM and UMA metrics do not apply to MediaStreams.
+ if (media_stream_type_ != mojom::MediaStreamType::kNone)
+ return;
+
// UKM may be unavailable in content_shell or other non-chrome/ builds; it
// may also be unavailable if browser shutdown has started; so this may be a
// nullptr. If it's unavailable, UKM reporting will be skipped.
@@ -102,10 +106,9 @@ std::string MediaMetricsProvider::GetUMANameForAVStream(
return uma_name + "Other";
#if !defined(OS_ANDROID)
- if (player_info.video_pipeline_info.decoder_name ==
- DecryptingVideoDecoder::kDecoderName) {
+ if (player_info.video_pipeline_info.decoder_type ==
+ VideoDecoderType::kDecrypting)
return uma_name + "DVD";
- }
#endif
if (player_info.video_pipeline_info.has_decrypting_demuxer_stream)
@@ -142,7 +145,8 @@ void MediaMetricsProvider::ReportPipelineUMA() {
// Report whether video decoder fallback happened, but only if a video decoder
// was reported.
- if (!uma_info_.video_pipeline_info.decoder_name.empty()) {
+ if (uma_info_.video_pipeline_info.decoder_type !=
+ VideoDecoderType::kUnknown) {
base::UmaHistogramBoolean("Media.VideoDecoderFallback",
uma_info_.video_decoder_changed);
}
@@ -195,21 +199,22 @@ void MediaMetricsProvider::SetHaveEnough() {
uma_info_.has_reached_have_enough = true;
}
-void MediaMetricsProvider::SetVideoPipelineInfo(
- const PipelineDecoderInfo& info) {
- auto old_name = uma_info_.video_pipeline_info.decoder_name;
- if (!old_name.empty() && old_name != info.decoder_name)
+void MediaMetricsProvider::SetVideoPipelineInfo(const VideoDecoderInfo& info) {
+ auto old_decoder = uma_info_.video_pipeline_info.decoder_type;
+ if (old_decoder != VideoDecoderType::kUnknown &&
+ old_decoder != info.decoder_type)
uma_info_.video_decoder_changed = true;
uma_info_.video_pipeline_info = info;
}
-void MediaMetricsProvider::SetAudioPipelineInfo(
- const PipelineDecoderInfo& info) {
+void MediaMetricsProvider::SetAudioPipelineInfo(const AudioDecoderInfo& info) {
uma_info_.audio_pipeline_info = info;
}
-void MediaMetricsProvider::Initialize(bool is_mse,
- mojom::MediaURLScheme url_scheme) {
+void MediaMetricsProvider::Initialize(
+ bool is_mse,
+ mojom::MediaURLScheme url_scheme,
+ mojom::MediaStreamType media_stream_type) {
if (initialized_) {
mojo::ReportBadMessage(kInvalidInitialize);
return;
@@ -218,6 +223,7 @@ void MediaMetricsProvider::Initialize(bool is_mse,
is_mse_ = is_mse;
initialized_ = true;
url_scheme_ = url_scheme;
+ media_stream_type_ = media_stream_type;
}
void MediaMetricsProvider::OnError(PipelineStatus status) {
diff --git a/chromium/media/mojo/services/media_metrics_provider.h b/chromium/media/mojo/services/media_metrics_provider.h
index ef0ce4b523a..e10715732bd 100644
--- a/chromium/media/mojo/services/media_metrics_provider.h
+++ b/chromium/media/mojo/services/media_metrics_provider.h
@@ -95,15 +95,17 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
bool video_decoder_changed = false;
AudioCodec audio_codec;
VideoCodec video_codec;
- PipelineDecoderInfo video_pipeline_info;
- PipelineDecoderInfo audio_pipeline_info;
+ VideoDecoderInfo video_pipeline_info;
+ AudioDecoderInfo audio_pipeline_info;
PipelineStatus last_pipeline_status = PIPELINE_OK;
};
// mojom::MediaMetricsProvider implementation:
- void Initialize(bool is_mse, mojom::MediaURLScheme url_scheme) override;
+ void Initialize(bool is_mse,
+ mojom::MediaURLScheme url_scheme,
+ mojom::MediaStreamType media_stream_type) override;
void OnError(PipelineStatus status) override;
- void SetAudioPipelineInfo(const PipelineDecoderInfo& info) override;
+ void SetAudioPipelineInfo(const AudioDecoderInfo& info) override;
void SetContainerName(
container_names::MediaContainerName container_name) override;
void SetHasAudio(AudioCodec audio_codec) override;
@@ -114,7 +116,7 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
void SetTimeToMetadata(base::TimeDelta elapsed) override;
void SetTimeToFirstFrame(base::TimeDelta elapsed) override;
void SetTimeToPlayReady(base::TimeDelta elapsed) override;
- void SetVideoPipelineInfo(const PipelineDecoderInfo& info) override;
+ void SetVideoPipelineInfo(const VideoDecoderInfo& info) override;
void AcquireWatchTimeRecorder(
mojom::PlaybackPropertiesPtr properties,
@@ -152,6 +154,7 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
bool initialized_ = false;
bool is_mse_;
mojom::MediaURLScheme url_scheme_;
+ mojom::MediaStreamType media_stream_type_;
base::TimeDelta time_to_metadata_ = kNoTimestamp;
base::TimeDelta time_to_first_frame_ = kNoTimestamp;
diff --git a/chromium/media/mojo/services/media_metrics_provider_unittest.cc b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
index af54242353c..d22d954904e 100644
--- a/chromium/media/mojo/services/media_metrics_provider_unittest.cc
+++ b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
@@ -35,7 +35,9 @@ class MediaMetricsProviderTest : public testing::Test {
bool is_incognito,
bool is_top_frame,
const std::string& origin,
- mojom::MediaURLScheme scheme) {
+ mojom::MediaURLScheme scheme,
+ mojom::MediaStreamType media_stream_type =
+ mojom::MediaStreamType::kNone) {
source_id_ = test_recorder_->GetNewSourceID();
test_recorder_->UpdateSourceURL(source_id_, GURL(origin));
@@ -53,7 +55,7 @@ class MediaMetricsProviderTest : public testing::Test {
&MediaMetricsProviderTest::GetRecordAggregateWatchTimeCallback,
base::Unretained(this)),
provider_.BindNewPipeAndPassReceiver());
- provider_->Initialize(is_mse, scheme);
+ provider_->Initialize(is_mse, scheme, media_stream_type);
}
ukm::SourceId GetSourceId() { return source_id_; }
@@ -154,11 +156,48 @@ TEST_F(MediaMetricsProviderTest, TestUkm) {
}
}
+TEST_F(MediaMetricsProviderTest, TestUkmMediaStream) {
+ Initialize(true, false, true, kTestOrigin, mojom::MediaURLScheme::kMissing,
+ mojom::MediaStreamType::kRemote);
+ provider_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ {
+ const auto& entries =
+ test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(0u, entries.size());
+ }
+
+ // Now try one with different values and optional parameters set.
+ const std::string kTestOrigin2 = "https://test2.google.com/";
+ const base::TimeDelta kMetadataTime = base::TimeDelta::FromSeconds(1);
+ const base::TimeDelta kFirstFrameTime = base::TimeDelta::FromSeconds(2);
+ const base::TimeDelta kPlayReadyTime = base::TimeDelta::FromSeconds(3);
+
+ ResetMetricRecorders();
+ Initialize(false, false, false, kTestOrigin2, mojom::MediaURLScheme::kMissing,
+ mojom::MediaStreamType::kLocalDeviceCapture);
+ provider_->SetIsEME();
+ provider_->SetTimeToMetadata(kMetadataTime);
+ provider_->SetTimeToFirstFrame(kFirstFrameTime);
+ provider_->SetTimeToPlayReady(kPlayReadyTime);
+ provider_->SetContainerName(container_names::CONTAINER_MOV);
+ provider_->OnError(PIPELINE_ERROR_DECODE);
+ provider_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ {
+ const auto& entries =
+ test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ EXPECT_EQ(0u, entries.size());
+ }
+}
+
TEST_F(MediaMetricsProviderTest, TestPipelineUMA) {
base::HistogramTester histogram_tester;
Initialize(false, false, false, kTestOrigin, mojom::MediaURLScheme::kHttps);
- provider_->SetAudioPipelineInfo({false, false, "TestAudioDecoder"});
- provider_->SetVideoPipelineInfo({false, false, "TestVideoDecoder"});
+ provider_->SetAudioPipelineInfo({false, false, AudioDecoderType::kMojo});
+ provider_->SetVideoPipelineInfo({false, false, VideoDecoderType::kMojo});
provider_->SetHasAudio(AudioCodec::kCodecVorbis);
provider_->SetHasVideo(VideoCodec::kCodecVP9);
provider_->SetHasPlayed();
@@ -171,11 +210,29 @@ TEST_F(MediaMetricsProviderTest, TestPipelineUMA) {
histogram_tester.ExpectBucketCount("Media.HasEverPlayed", true, 1);
}
+TEST_F(MediaMetricsProviderTest, TestPipelineUMAMediaStream) {
+ base::HistogramTester histogram_tester;
+ Initialize(false, false, false, kTestOrigin, mojom::MediaURLScheme::kHttps,
+ mojom::MediaStreamType::kRemote);
+ provider_->SetAudioPipelineInfo({false, false, AudioDecoderType::kMojo});
+ provider_->SetVideoPipelineInfo({false, false, VideoDecoderType::kMojo});
+ provider_->SetHasAudio(AudioCodec::kCodecVorbis);
+ provider_->SetHasVideo(VideoCodec::kCodecVP9);
+ provider_->SetHasPlayed();
+ provider_->SetHaveEnough();
+ provider_.reset();
+ base::RunLoop().RunUntilIdle();
+ histogram_tester.ExpectBucketCount("Media.PipelineStatus.AudioVideo.VP9.SW",
+ PIPELINE_OK, 0);
+ histogram_tester.ExpectBucketCount("Media.VideoDecoderFallback", false, 0);
+ histogram_tester.ExpectBucketCount("Media.HasEverPlayed", true, 0);
+}
+
TEST_F(MediaMetricsProviderTest, TestPipelineUMANoAudioEMEHW) {
base::HistogramTester histogram_tester;
Initialize(false, false, false, kTestOrigin, mojom::MediaURLScheme::kHttps);
provider_->SetIsEME();
- provider_->SetVideoPipelineInfo({true, true, "TestEMEVideoDecoder"});
+ provider_->SetVideoPipelineInfo({true, true, VideoDecoderType::kMojo});
provider_->SetHasVideo(VideoCodec::kCodecAV1);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
@@ -192,13 +249,13 @@ TEST_F(MediaMetricsProviderTest, TestPipelineUMADecoderFallback) {
base::HistogramTester histogram_tester;
Initialize(false, false, false, kTestOrigin, mojom::MediaURLScheme::kHttps);
provider_->SetIsEME();
- provider_->SetAudioPipelineInfo({false, false, "TestAudioDecoder"});
- provider_->SetVideoPipelineInfo({true, false, "D3D11VideoDecoder"});
+ provider_->SetAudioPipelineInfo({false, false, AudioDecoderType::kMojo});
+ provider_->SetVideoPipelineInfo({true, false, VideoDecoderType::kD3D11});
provider_->SetHasVideo(VideoCodec::kCodecVP9);
provider_->SetHasAudio(AudioCodec::kCodecVorbis);
provider_->SetHasPlayed();
provider_->SetHaveEnough();
- provider_->SetVideoPipelineInfo({true, false, "DXVAVideoDecoder"});
+ provider_->SetVideoPipelineInfo({true, false, VideoDecoderType::kFFmpeg});
provider_.reset();
base::RunLoop().RunUntilIdle();
histogram_tester.ExpectBucketCount("Media.PipelineStatus.AudioVideo.VP9.HW",
diff --git a/chromium/media/mojo/services/media_resource_shim.cc b/chromium/media/mojo/services/media_resource_shim.cc
index 2523f28b52d..6c90819fded 100644
--- a/chromium/media/mojo/services/media_resource_shim.cc
+++ b/chromium/media/mojo/services/media_resource_shim.cc
@@ -13,8 +13,8 @@ namespace media {
MediaResourceShim::MediaResourceShim(
std::vector<mojo::PendingRemote<mojom::DemuxerStream>> streams,
- const base::Closure& demuxer_ready_cb)
- : demuxer_ready_cb_(demuxer_ready_cb), streams_ready_(0) {
+ base::OnceClosure demuxer_ready_cb)
+ : demuxer_ready_cb_(std::move(demuxer_ready_cb)), streams_ready_(0) {
DCHECK(!streams.empty());
DCHECK(demuxer_ready_cb_);
diff --git a/chromium/media/mojo/services/media_resource_shim.h b/chromium/media/mojo/services/media_resource_shim.h
index 796b8f4988e..a042fc50a4e 100644
--- a/chromium/media/mojo/services/media_resource_shim.h
+++ b/chromium/media/mojo/services/media_resource_shim.h
@@ -24,7 +24,7 @@ class MediaResourceShim : public MediaResource {
// initialized. Calling any method before then is an error.
MediaResourceShim(
std::vector<mojo::PendingRemote<mojom::DemuxerStream>> streams,
- const base::Closure& demuxer_ready_cb);
+ base::OnceClosure demuxer_ready_cb);
~MediaResourceShim() override;
// MediaResource interface.
@@ -38,7 +38,7 @@ class MediaResourceShim : public MediaResource {
// Stored copy the ready callback provided during construction; cleared once
// all streams are ready.
- base::Closure demuxer_ready_cb_;
+ base::OnceClosure demuxer_ready_cb_;
// Container for demuxer stream adapters which interface with the mojo level
// demuxer streams. |streams_ready_| tracks how many streams are ready and is
diff --git a/chromium/media/mojo/services/media_service_factory.cc b/chromium/media/mojo/services/media_service_factory.cc
index af044a25cf7..27ebebc83e6 100644
--- a/chromium/media/mojo/services/media_service_factory.cc
+++ b/chromium/media/mojo/services/media_service_factory.cc
@@ -17,6 +17,10 @@
#include "media/mojo/services/android_mojo_media_client.h" // nogncheck
#endif
+#if defined(OS_WIN)
+#include "media/mojo/services/media_foundation_mojo_media_client.h"
+#endif
+
namespace media {
std::unique_ptr<MediaService> CreateMediaService(
@@ -24,6 +28,11 @@ std::unique_ptr<MediaService> CreateMediaService(
#if defined(OS_ANDROID)
return std::make_unique<MediaService>(
std::make_unique<AndroidMojoMediaClient>(), std::move(receiver));
+#elif defined(OS_WIN)
+ DVLOG(1) << "Create MediaService with MediaFoundationMojoMediaClient";
+ return std::make_unique<MediaService>(
+ std::make_unique<media::MediaFoundationMojoMediaClient>(),
+ std::move(receiver));
#else
NOTREACHED() << "No MediaService implementation available.";
return nullptr;
diff --git a/chromium/media/mojo/services/media_service_unittest.cc b/chromium/media/mojo/services/media_service_unittest.cc
index 893608eb3b2..9f8855a991e 100644
--- a/chromium/media/mojo/services/media_service_unittest.cc
+++ b/chromium/media/mojo/services/media_service_unittest.cc
@@ -71,7 +71,7 @@ class MockRendererClient : public mojom::RendererClient {
MOCK_METHOD2(OnBufferingStateChange,
void(BufferingState state, BufferingStateChangeReason reason));
MOCK_METHOD0(OnEnded, void());
- MOCK_METHOD0(OnError, void());
+ MOCK_METHOD1(OnError, void(const Status& status));
MOCK_METHOD1(OnVideoOpacityChange, void(bool opaque));
MOCK_METHOD1(OnAudioConfigChange, void(const AudioDecoderConfig&));
MOCK_METHOD1(OnVideoConfigChange, void(const VideoDecoderConfig&));
diff --git a/chromium/media/mojo/services/mojo_audio_decoder_service.cc b/chromium/media/mojo/services/mojo_audio_decoder_service.cc
index f73a6f00d2d..589bce46a40 100644
--- a/chromium/media/mojo/services/mojo_audio_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_audio_decoder_service.cc
@@ -74,8 +74,9 @@ void MojoAudioDecoderService::Initialize(
config, cdm_context,
base::BindOnce(&MojoAudioDecoderService::OnInitialized, weak_this_,
std::move(callback)),
- base::Bind(&MojoAudioDecoderService::OnAudioBufferReady, weak_this_),
- base::Bind(&MojoAudioDecoderService::OnWaiting, weak_this_));
+ base::BindRepeating(&MojoAudioDecoderService::OnAudioBufferReady,
+ weak_this_),
+ base::BindRepeating(&MojoAudioDecoderService::OnWaiting, weak_this_));
}
void MojoAudioDecoderService::SetDataSource(
@@ -109,12 +110,14 @@ void MojoAudioDecoderService::OnInitialized(InitializeCallback callback,
if (!status.is_ok()) {
// Do not call decoder_->NeedsBitstreamConversion() if init failed.
- std::move(callback).Run(std::move(status), false);
+ std::move(callback).Run(std::move(status), false,
+ AudioDecoderType::kUnknown);
return;
}
std::move(callback).Run(std::move(status),
- decoder_->NeedsBitstreamConversion());
+ decoder_->NeedsBitstreamConversion(),
+ decoder_->GetDecoderType());
}
// The following methods are needed so that we can bind them with a weak pointer
@@ -130,8 +133,9 @@ void MojoAudioDecoderService::OnReadDone(DecodeCallback callback,
return;
}
- decoder_->Decode(buffer, base::Bind(&MojoAudioDecoderService::OnDecodeStatus,
- weak_this_, base::Passed(&callback)));
+ decoder_->Decode(buffer,
+ base::BindOnce(&MojoAudioDecoderService::OnDecodeStatus,
+ weak_this_, std::move(callback)));
}
void MojoAudioDecoderService::OnReaderFlushDone(ResetCallback callback) {
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.cc b/chromium/media/mojo/services/mojo_decryptor_service.cc
index 8af8a7fe06c..d113b425667 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.cc
+++ b/chromium/media/mojo/services/mojo_decryptor_service.cc
@@ -197,7 +197,6 @@ void MojoDecryptorService::OnDecryptDone(DecryptCallback callback,
DVLOG_IF(3, status == Status::kSuccess) << __func__;
if (!buffer) {
- DCHECK_NE(status, Status::kSuccess);
std::move(callback).Run(status, nullptr);
return;
}
@@ -235,8 +234,8 @@ void MojoDecryptorService::OnAudioRead(DecryptAndDecodeAudioCallback callback,
}
decryptor_->DecryptAndDecodeAudio(
- std::move(buffer), base::Bind(&MojoDecryptorService::OnAudioDecoded,
- weak_this_, base::Passed(&callback)));
+ std::move(buffer), base::BindOnce(&MojoDecryptorService::OnAudioDecoded,
+ weak_this_, std::move(callback)));
}
void MojoDecryptorService::OnVideoRead(DecryptAndDecodeVideoCallback callback,
@@ -247,8 +246,8 @@ void MojoDecryptorService::OnVideoRead(DecryptAndDecodeVideoCallback callback,
}
decryptor_->DecryptAndDecodeVideo(
- std::move(buffer), base::Bind(&MojoDecryptorService::OnVideoDecoded,
- weak_this_, base::Passed(&callback)));
+ std::move(buffer), base::BindOnce(&MojoDecryptorService::OnVideoDecoded,
+ weak_this_, std::move(callback)));
}
void MojoDecryptorService::OnReaderFlushDone(StreamType stream_type) {
diff --git a/chromium/media/mojo/services/mojo_media_client.h b/chromium/media/mojo/services/mojo_media_client.h
index 0888eb397d1..8d0ff976a04 100644
--- a/chromium/media/mojo/services/mojo_media_client.h
+++ b/chromium/media/mojo/services/mojo_media_client.h
@@ -13,12 +13,12 @@
#include "base/memory/ref_counted.h"
#include "base/unguessable_token.h"
#include "media/base/overlay_info.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/media_buildflags.h"
#include "media/mojo/buildflags.h"
#include "media/mojo/mojom/frame_interface_factory.mojom.h"
#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
-#include "media/video/supported_video_decoder_config.h"
namespace base {
class SingleThreadTaskRunner;
diff --git a/chromium/media/mojo/services/mojo_renderer_service.cc b/chromium/media/mojo/services/mojo_renderer_service.cc
index 4de989317ba..102932c7b5e 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.cc
+++ b/chromium/media/mojo/services/mojo_renderer_service.cc
@@ -18,17 +18,6 @@
namespace media {
-namespace {
-
-void CloseReceiverOnBadMessage(
- mojo::SelfOwnedReceiverRef<mojom::Renderer> receiver) {
- LOG(ERROR) << __func__;
- DCHECK(receiver);
- receiver->Close();
-}
-
-} // namespace
-
// Time interval to update media time.
const int kTimeUpdateIntervalMs = 50;
@@ -44,9 +33,6 @@ mojo::SelfOwnedReceiverRef<mojom::Renderer> MojoRendererService::Create(
mojo::MakeSelfOwnedReceiver<mojom::Renderer>(base::WrapUnique(service),
std::move(receiver));
- service->set_bad_message_cb(
- base::BindRepeating(&CloseReceiverOnBadMessage, self_owned_receiver));
-
return self_owned_receiver;
}
@@ -79,9 +65,10 @@ void MojoRendererService::Initialize(
if (!media_url_params) {
DCHECK(streams.has_value());
- media_resource_.reset(new MediaResourceShim(
- std::move(*streams), base::Bind(&MojoRendererService::OnStreamReady,
- weak_this_, base::Passed(&callback))));
+ media_resource_ = std::make_unique<MediaResourceShim>(
+ std::move(*streams),
+ base::BindOnce(&MojoRendererService::OnAllStreamsReady, weak_this_,
+ std::move(callback)));
return;
}
@@ -166,7 +153,9 @@ void MojoRendererService::SetCdm(
void MojoRendererService::OnError(PipelineStatus error) {
DVLOG(1) << __func__ << "(" << error << ")";
state_ = STATE_ERROR;
- client_->OnError();
+ StatusCode status_code = PipelineStatusToStatusCode(error);
+ auto status = Status(status_code, PipelineStatusToString(error));
+ client_->OnError(status);
}
void MojoRendererService::OnEnded() {
@@ -219,7 +208,7 @@ void MojoRendererService::OnVideoFrameRateChange(base::Optional<int> fps) {
// TODO(liberato): plumb to |client_|.
}
-void MojoRendererService::OnStreamReady(
+void MojoRendererService::OnAllStreamsReady(
base::OnceCallback<void(bool)> callback) {
DCHECK_EQ(state_, STATE_INITIALIZING);
@@ -272,7 +261,8 @@ void MojoRendererService::SchedulePeriodicMediaTimeUpdates() {
UpdateMediaTime(true);
time_update_timer_.Start(
FROM_HERE, base::TimeDelta::FromMilliseconds(kTimeUpdateIntervalMs),
- base::Bind(&MojoRendererService::UpdateMediaTime, weak_this_, false));
+ base::BindRepeating(&MojoRendererService::UpdateMediaTime, weak_this_,
+ false));
}
void MojoRendererService::OnFlushCompleted(FlushCallback callback) {
diff --git a/chromium/media/mojo/services/mojo_renderer_service.h b/chromium/media/mojo/services/mojo_renderer_service.h
index 6bff14670e6..08953954f11 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.h
+++ b/chromium/media/mojo/services/mojo_renderer_service.h
@@ -67,12 +67,6 @@ class MEDIA_MOJO_EXPORT MojoRendererService final : public mojom::Renderer,
void SetCdm(const base::Optional<base::UnguessableToken>& cdm_id,
SetCdmCallback callback) final;
- // TODO(tguilbert): Get rid of |bad_message_cb_|, now that it's no longer
- // needed.
- void set_bad_message_cb(base::Closure bad_message_cb) {
- bad_message_cb_ = bad_message_cb;
- }
-
private:
enum State {
STATE_UNINITIALIZED,
@@ -97,7 +91,7 @@ class MEDIA_MOJO_EXPORT MojoRendererService final : public mojom::Renderer,
// Called when the MediaResourceShim is ready to go (has a config,
// pipe handle, etc) and can be handed off to a renderer for use.
- void OnStreamReady(base::OnceCallback<void(bool)> callback);
+ void OnAllStreamsReady(base::OnceCallback<void(bool)> callback);
// Called when |audio_renderer_| initialization has completed.
void OnRendererInitializeDone(base::OnceCallback<void(bool)> callback,
@@ -138,11 +132,6 @@ class MEDIA_MOJO_EXPORT MojoRendererService final : public mojom::Renderer,
// Must use "media::" because "Renderer" is ambiguous.
std::unique_ptr<media::Renderer> renderer_;
- // Callback to be called when an invalid or unexpected message is received.
- // TODO(tguilbert): Revisit how to do InitiateScopedSurfaceRequest() so that
- // we can eliminate this callback. See http://crbug.com/669606
- base::Closure bad_message_cb_;
-
base::WeakPtr<MojoRendererService> weak_this_;
base::WeakPtrFactory<MojoRendererService> weak_factory_{this};
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.cc b/chromium/media/mojo/services/mojo_video_decoder_service.cc
index 15a1f7249a5..450d27ecd64 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.cc
@@ -280,9 +280,13 @@ void MojoVideoDecoderService::OnDecoderInitialized(Status status) {
TRACE_EVENT_ASYNC_END1("media", kInitializeTraceName, this, "success",
status.code());
- std::move(init_cb_).Run(
- status, status.is_ok() ? decoder_->NeedsBitstreamConversion() : false,
- status.is_ok() ? decoder_->GetMaxDecodeRequests() : 1);
+ if (!status.is_ok()) {
+ std::move(init_cb_).Run(status, false, 1, VideoDecoderType::kUnknown);
+ return;
+ }
+ std::move(init_cb_).Run(status, decoder_->NeedsBitstreamConversion(),
+ decoder_->GetMaxDecodeRequests(),
+ decoder_->GetDecoderType());
}
void MojoVideoDecoderService::OnReaderRead(
@@ -339,12 +343,12 @@ void MojoVideoDecoderService::OnDecoderOutput(scoped_refptr<VideoFrame> frame) {
DCHECK(client_);
DCHECK(decoder_);
TRACE_EVENT1("media", "MojoVideoDecoderService::OnDecoderOutput",
- "video_frame", frame->AsHumanReadableString())
+ "video_frame", frame->AsHumanReadableString());
// All MojoVideoDecoder-based decoders are hardware decoders. If you're the
// first to implement an out-of-process decoder that is not power efficent,
// you can remove this DCHECK.
- DCHECK(frame->metadata()->power_efficient);
+ DCHECK(frame->metadata().power_efficient);
base::Optional<base::UnguessableToken> release_token;
if (frame->HasReleaseMailboxCB() && video_frame_handle_releaser_) {
diff --git a/chromium/media/mojo/services/watch_time_recorder.cc b/chromium/media/mojo/services/watch_time_recorder.cc
index 4b7935640b8..6cb630ed5d0 100644
--- a/chromium/media/mojo/services/watch_time_recorder.cc
+++ b/chromium/media/mojo/services/watch_time_recorder.cc
@@ -26,84 +26,6 @@ namespace media {
constexpr base::TimeDelta kMinimumElapsedWatchTime =
base::TimeDelta::FromSeconds(limits::kMinimumElapsedWatchTimeSecs);
-// List of known AudioDecoder implementations; recorded to UKM, always add new
-// values to the end and do not reorder or delete values from this list.
-enum class AudioDecoderName : int {
- kUnknown = 0, // Decoder name string is not recognized or n/a.
- kFFmpeg = 1, // FFmpegAudioDecoder
- kMojo = 2, // MojoAudioDecoder
- kDecrypting = 3, // DecryptingAudioDecoder
- kMediaPlayer = 4, // MediaPlayer
-};
-
-// List of known VideoDecoder implementations; recorded to UKM, always add new
-// values to the end and do not reorder or delete values from this list.
-enum class VideoDecoderName : int {
- kUnknown = 0, // Decoder name string is not recognized or n/a.
- kGpu = 1, // GpuVideoDecoder
- kFFmpeg = 2, // FFmpegVideoDecoder
- kVpx = 3, // VpxVideoDecoder
- kAom = 4, // AomVideoDecoder
- kMojo = 5, // MojoVideoDecoder
- kDecrypting = 6, // DecryptingVideoDecoder
- kDav1d = 7, // Dav1dVideoDecoder
- kFuchsia = 8, // FuchsiaVideoDecoder
- kMediaPlayer = 9, // MediaPlayer
- kLibgav1 = 10, // Gav1VideoDecoder
-};
-
-static AudioDecoderName ConvertAudioDecoderNameToEnum(const std::string& name) {
- // See the unittest DISABLED_PrintExpectedDecoderNameHashes() for how these
- // values are computed.
- switch (base::PersistentHash(name)) {
- case 0xd39e0c2d:
- return AudioDecoderName::kFFmpeg;
- case 0xdaceafdb:
- return AudioDecoderName::kMojo;
- case 0xd39a2eda:
- return AudioDecoderName::kDecrypting;
- case 0x667dc202:
- return AudioDecoderName::kMediaPlayer;
- default:
- DLOG_IF(WARNING, !name.empty())
- << "Unknown decoder name encountered; metrics need updating: "
- << name;
- }
- return AudioDecoderName::kUnknown;
-}
-
-static VideoDecoderName ConvertVideoDecoderNameToEnum(const std::string& name) {
- // See the unittest DISABLED_PrintExpectedDecoderNameHashes() for how these
- // values are computed.
- switch (base::PersistentHash(name)) {
- case 0xacdee563:
- return VideoDecoderName::kFFmpeg;
- case 0x943f016f:
- return VideoDecoderName::kMojo;
- case 0xf66241b8:
- return VideoDecoderName::kGpu;
- case 0xb3802adb:
- return VideoDecoderName::kVpx;
- case 0xcff23b85:
- return VideoDecoderName::kAom;
- case 0xb52d52f5:
- return VideoDecoderName::kDecrypting;
- case 0xcd46efa0:
- return VideoDecoderName::kDav1d;
- case 0x27b31c6a:
- return VideoDecoderName::kFuchsia;
- case 0x667dc202:
- return VideoDecoderName::kMediaPlayer;
- case 0x0cd14d5b:
- return VideoDecoderName::kLibgav1;
- default:
- DLOG_IF(WARNING, !name.empty())
- << "Unknown decoder name encountered; metrics need updating: "
- << name;
- }
- return VideoDecoderName::kUnknown;
-}
-
static void RecordWatchTimeInternal(
base::StringPiece key,
base::TimeDelta value,
@@ -200,7 +122,7 @@ void WatchTimeRecorder::FinalizeWatchTime(
// watch time requirement. Otherwise, for SRC/MSE/EME keys, log them to the
// discard metric.
base::StringPiece key_str = ConvertWatchTimeKeyToStringForUma(kv.first);
- if (!key_str.empty()) {
+ if (ShouldRecordUma() && !key_str.empty()) {
if (kv.second >= kMinimumElapsedWatchTime) {
RecordWatchTimeInternal(key_str, kv.second);
} else if (kv.second > base::TimeDelta()) {
@@ -229,7 +151,8 @@ void WatchTimeRecorder::FinalizeWatchTime(
// Check for watch times entries that have corresponding MTBR entries and
// report the MTBR value using watch_time / |underflow_count|. Do this only
// for foreground reporters since we only have UMA keys for foreground.
- if (!properties_->is_background && !properties_->is_muted) {
+ if (ShouldRecordUma() && !properties_->is_background &&
+ !properties_->is_muted) {
for (auto& mapping : extended_metrics_keys_) {
auto it = watch_time_info_.find(mapping.watch_time_key);
if (it == watch_time_info_.end() || it->second < kMinimumElapsedWatchTime)
@@ -284,8 +207,10 @@ void WatchTimeRecorder::UpdateSecondaryProperties(
AudioCodecProfile::kUnknown ||
last_record.secondary_properties->video_codec_profile ==
VIDEO_CODEC_PROFILE_UNKNOWN ||
- last_record.secondary_properties->audio_decoder_name.empty() ||
- last_record.secondary_properties->video_decoder_name.empty()) {
+ last_record.secondary_properties->audio_decoder ==
+ AudioDecoderType::kUnknown ||
+ last_record.secondary_properties->video_decoder ==
+ VideoDecoderType::kUnknown) {
auto temp_props = last_record.secondary_properties.Clone();
if (last_record.secondary_properties->audio_codec == kUnknownAudioCodec)
temp_props->audio_codec = secondary_properties->audio_codec;
@@ -301,13 +226,13 @@ void WatchTimeRecorder::UpdateSecondaryProperties(
temp_props->video_codec_profile =
secondary_properties->video_codec_profile;
}
- if (last_record.secondary_properties->audio_decoder_name.empty()) {
- temp_props->audio_decoder_name =
- secondary_properties->audio_decoder_name;
+ if (last_record.secondary_properties->audio_decoder ==
+ AudioDecoderType::kUnknown) {
+ temp_props->audio_decoder = secondary_properties->audio_decoder;
}
- if (last_record.secondary_properties->video_decoder_name.empty()) {
- temp_props->video_decoder_name =
- secondary_properties->video_decoder_name;
+ if (last_record.secondary_properties->video_decoder ==
+ VideoDecoderType::kUnknown) {
+ temp_props->video_decoder = secondary_properties->video_decoder;
}
if (temp_props->Equals(*secondary_properties)) {
last_record.secondary_properties = std::move(temp_props);
@@ -512,30 +437,18 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
if (ukm_record.secondary_properties->audio_codec == kCodecAAC)
aac_profiles.insert(ukm_record.secondary_properties->audio_codec_profile);
- // We convert decoder names to a hash and then translate that hash to a zero
- // valued enum to avoid burdening the rest of the decoder code base. This
- // was the simplest and most effective solution for the following reasons:
- //
- // - We can't report hashes to UKM since the privacy team worries they may
- // end up as hashes of user data.
- // - Given that decoders are defined and implemented all over the code base
- // it's unwieldly to have a single location which defines all decoder
- // names.
- // - Due to the above, no single media/ location has access to all names.
- //
builder.SetAudioDecoderName(
- static_cast<int64_t>(ConvertAudioDecoderNameToEnum(
- ukm_record.secondary_properties->audio_decoder_name)));
+ static_cast<int64_t>(ukm_record.secondary_properties->audio_decoder));
builder.SetVideoDecoderName(
- static_cast<int64_t>(ConvertVideoDecoderNameToEnum(
- ukm_record.secondary_properties->video_decoder_name)));
-
+ static_cast<int64_t>(ukm_record.secondary_properties->video_decoder));
builder.SetAudioEncryptionScheme(static_cast<int64_t>(
ukm_record.secondary_properties->audio_encryption_scheme));
builder.SetVideoEncryptionScheme(static_cast<int64_t>(
ukm_record.secondary_properties->video_encryption_scheme));
builder.SetIsEME(properties_->is_eme);
builder.SetIsMSE(properties_->is_mse);
+ builder.SetMediaStreamType(
+ static_cast<int64_t>(properties_->media_stream_type));
builder.SetLastPipelineStatus(pipeline_status_);
builder.SetRebuffersCount(ukm_record.total_underflow_count);
builder.SetCompletedRebuffersCount(
@@ -552,7 +465,7 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
builder.Record(ukm_recorder);
}
- if (!aac_profiles.empty()) {
+ if (ShouldRecordUma() && !aac_profiles.empty()) {
for (auto profile : aac_profiles)
base::UmaHistogramEnumeration("Media.AudioCodecProfile.AAC", profile);
}
@@ -566,6 +479,10 @@ void WatchTimeRecorder::RecordUkmPlaybackData() {
ukm_records_.clear();
}
+bool WatchTimeRecorder::ShouldRecordUma() const {
+ return properties_->media_stream_type == mojom::MediaStreamType::kNone;
+}
+
WatchTimeRecorder::ExtendedMetricsKeyMap::ExtendedMetricsKeyMap(
const ExtendedMetricsKeyMap& copy)
: ExtendedMetricsKeyMap(copy.watch_time_key,
diff --git a/chromium/media/mojo/services/watch_time_recorder.h b/chromium/media/mojo/services/watch_time_recorder.h
index 837bc114211..3b373f3b19a 100644
--- a/chromium/media/mojo/services/watch_time_recorder.h
+++ b/chromium/media/mojo/services/watch_time_recorder.h
@@ -58,6 +58,7 @@ class MEDIA_MOJO_EXPORT WatchTimeRecorder : public mojom::WatchTimeRecorder {
// with a complete finalize (destruction or empty FinalizeWatchTime call).
// Clears |aggregate_watch_time_info_| upon completion.
void RecordUkmPlaybackData();
+ bool ShouldRecordUma() const;
const mojom::PlaybackPropertiesPtr properties_;
diff --git a/chromium/media/mojo/services/watch_time_recorder_unittest.cc b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
index bf1e96c8cbe..82597c9dd1d 100644
--- a/chromium/media/mojo/services/watch_time_recorder_unittest.cc
+++ b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
@@ -83,7 +83,8 @@ class WatchTimeRecorderTest : public testing::Test {
void Initialize(mojom::PlaybackPropertiesPtr properties) {
provider_->Initialize(properties->is_mse,
properties->is_mse ? mojom::MediaURLScheme::kUnknown
- : mojom::MediaURLScheme::kHttp);
+ : mojom::MediaURLScheme::kHttp,
+ properties->media_stream_type);
provider_->AcquireWatchTimeRecorder(std::move(properties),
wtr_.BindNewPipeAndPassReceiver());
}
@@ -91,9 +92,12 @@ class WatchTimeRecorderTest : public testing::Test {
void Initialize(bool has_audio,
bool has_video,
bool is_mse,
- bool is_encrypted) {
- Initialize(mojom::PlaybackProperties::New(
- has_audio, has_video, false, false, is_mse, is_encrypted, false));
+ bool is_encrypted,
+ mojom::MediaStreamType media_stream_type =
+ mojom::MediaStreamType::kNone) {
+ Initialize(mojom::PlaybackProperties::New(has_audio, has_video, false,
+ false, is_mse, is_encrypted,
+ false, media_stream_type));
}
void ExpectWatchTime(const std::vector<base::StringPiece>& keys,
@@ -177,7 +181,8 @@ class WatchTimeRecorderTest : public testing::Test {
mojom::SecondaryPlaybackPropertiesPtr CreateSecondaryProperties() {
return mojom::SecondaryPlaybackProperties::New(
kCodecAAC, kCodecH264, AudioCodecProfile::kUnknown, H264PROFILE_MAIN,
- "", "", EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
+ AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
+ EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(800, 600));
}
@@ -362,6 +367,157 @@ TEST_F(WatchTimeRecorderTest, TestBasicReporting) {
}
}
+TEST_F(WatchTimeRecorderTest, TestBasicReportingMediaStream) {
+ constexpr base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(25);
+ constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(50);
+
+ for (int i = 0; i <= static_cast<int>(WatchTimeKey::kWatchTimeKeyMax); ++i) {
+ const WatchTimeKey key = static_cast<WatchTimeKey>(i);
+
+ auto key_str = ConvertWatchTimeKeyToStringForUma(key);
+ SCOPED_TRACE(key_str.empty() ? base::NumberToString(i)
+ : key_str.as_string());
+
+ // Values for |is_background| and |is_muted| don't matter in this test since
+ // they don't prevent the muted or background keys from being recorded.
+ Initialize(true, false, true, true,
+ mojom::MediaStreamType::kLocalDeviceCapture);
+ wtr_->UpdateSecondaryProperties(CreateSecondaryProperties());
+
+ wtr_->RecordWatchTime(WatchTimeKey::kWatchTimeKeyMax, kWatchTime1);
+ wtr_->RecordWatchTime(key, kWatchTime1);
+ wtr_->RecordWatchTime(key, kWatchTime2);
+ base::RunLoop().RunUntilIdle();
+
+ // Nothing should be recorded yet since we haven't finalized.
+ ExpectWatchTime({}, base::TimeDelta());
+
+ // Only the requested key should be finalized.
+ wtr_->FinalizeWatchTime({key});
+ base::RunLoop().RunUntilIdle();
+
+ if (!key_str.empty())
+ ExpectWatchTime({}, base::TimeDelta());
+
+ ExpectMtbrTime({}, base::TimeDelta());
+ ExpectZeroRebuffers({});
+ ExpectNoUkmWatchTime();
+
+ ResetMetricRecorders();
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+ ExpectWatchTime({}, base::TimeDelta());
+ ExpectMtbrTime({}, base::TimeDelta());
+ ExpectZeroRebuffers({});
+
+ // UKM watch time should be recorded even with no UMA.
+ switch (key) {
+ case WatchTimeKey::kAudioAll:
+ case WatchTimeKey::kAudioBackgroundAll:
+ case WatchTimeKey::kAudioVideoAll:
+ case WatchTimeKey::kAudioVideoBackgroundAll:
+ case WatchTimeKey::kAudioVideoMutedAll:
+ case WatchTimeKey::kVideoAll:
+ case WatchTimeKey::kVideoBackgroundAll:
+ ExpectUkmWatchTime({UkmEntry::kWatchTimeName}, kWatchTime2);
+ break;
+
+ // These keys are not reported, instead we boolean flags for each type.
+ case WatchTimeKey::kAudioMse:
+ case WatchTimeKey::kAudioEme:
+ case WatchTimeKey::kAudioSrc:
+ case WatchTimeKey::kAudioEmbeddedExperience:
+ case WatchTimeKey::kAudioBackgroundMse:
+ case WatchTimeKey::kAudioBackgroundEme:
+ case WatchTimeKey::kAudioBackgroundSrc:
+ case WatchTimeKey::kAudioBackgroundEmbeddedExperience:
+ case WatchTimeKey::kAudioVideoMse:
+ case WatchTimeKey::kAudioVideoEme:
+ case WatchTimeKey::kAudioVideoSrc:
+ case WatchTimeKey::kAudioVideoEmbeddedExperience:
+ case WatchTimeKey::kAudioVideoMutedMse:
+ case WatchTimeKey::kAudioVideoMutedEme:
+ case WatchTimeKey::kAudioVideoMutedSrc:
+ case WatchTimeKey::kAudioVideoMutedEmbeddedExperience:
+ case WatchTimeKey::kAudioVideoBackgroundMse:
+ case WatchTimeKey::kAudioVideoBackgroundEme:
+ case WatchTimeKey::kAudioVideoBackgroundSrc:
+ case WatchTimeKey::kAudioVideoBackgroundEmbeddedExperience:
+ case WatchTimeKey::kVideoMse:
+ case WatchTimeKey::kVideoEme:
+ case WatchTimeKey::kVideoSrc:
+ case WatchTimeKey::kVideoEmbeddedExperience:
+ case WatchTimeKey::kVideoBackgroundMse:
+ case WatchTimeKey::kVideoBackgroundEme:
+ case WatchTimeKey::kVideoBackgroundSrc:
+ case WatchTimeKey::kVideoBackgroundEmbeddedExperience:
+ ExpectUkmWatchTime({}, base::TimeDelta());
+ break;
+
+ // These keys roll up into the battery watch time field.
+ case WatchTimeKey::kAudioBattery:
+ case WatchTimeKey::kAudioBackgroundBattery:
+ case WatchTimeKey::kAudioVideoBattery:
+ case WatchTimeKey::kAudioVideoMutedBattery:
+ case WatchTimeKey::kAudioVideoBackgroundBattery:
+ case WatchTimeKey::kVideoBattery:
+ case WatchTimeKey::kVideoBackgroundBattery:
+ ExpectUkmWatchTime({UkmEntry::kWatchTime_BatteryName}, kWatchTime2);
+ break;
+
+ // These keys roll up into the AC watch time field.
+ case WatchTimeKey::kAudioAc:
+ case WatchTimeKey::kAudioBackgroundAc:
+ case WatchTimeKey::kAudioVideoAc:
+ case WatchTimeKey::kAudioVideoBackgroundAc:
+ case WatchTimeKey::kAudioVideoMutedAc:
+ case WatchTimeKey::kVideoAc:
+ case WatchTimeKey::kVideoBackgroundAc:
+ ExpectUkmWatchTime({UkmEntry::kWatchTime_ACName}, kWatchTime2);
+ break;
+
+ case WatchTimeKey::kAudioVideoDisplayFullscreen:
+ case WatchTimeKey::kAudioVideoMutedDisplayFullscreen:
+ case WatchTimeKey::kVideoDisplayFullscreen:
+ ExpectUkmWatchTime({UkmEntry::kWatchTime_DisplayFullscreenName},
+ kWatchTime2);
+ break;
+
+ case WatchTimeKey::kAudioVideoDisplayInline:
+ case WatchTimeKey::kAudioVideoMutedDisplayInline:
+ case WatchTimeKey::kVideoDisplayInline:
+ ExpectUkmWatchTime({UkmEntry::kWatchTime_DisplayInlineName},
+ kWatchTime2);
+ break;
+
+ case WatchTimeKey::kAudioVideoDisplayPictureInPicture:
+ case WatchTimeKey::kAudioVideoMutedDisplayPictureInPicture:
+ case WatchTimeKey::kVideoDisplayPictureInPicture:
+ ExpectUkmWatchTime({UkmEntry::kWatchTime_DisplayPictureInPictureName},
+ kWatchTime2);
+ break;
+
+ case WatchTimeKey::kAudioNativeControlsOn:
+ case WatchTimeKey::kAudioVideoNativeControlsOn:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOn:
+ case WatchTimeKey::kVideoNativeControlsOn:
+ ExpectUkmWatchTime({UkmEntry::kWatchTime_NativeControlsOnName},
+ kWatchTime2);
+ break;
+
+ case WatchTimeKey::kAudioNativeControlsOff:
+ case WatchTimeKey::kAudioVideoNativeControlsOff:
+ case WatchTimeKey::kAudioVideoMutedNativeControlsOff:
+ case WatchTimeKey::kVideoNativeControlsOff:
+ ExpectUkmWatchTime({UkmEntry::kWatchTime_NativeControlsOffName},
+ kWatchTime2);
+ break;
+ }
+
+ ResetMetricRecorders();
+ }
+}
+
TEST_F(WatchTimeRecorderTest, TestRebufferingMetrics) {
Initialize(true, false, true, true);
@@ -405,6 +561,34 @@ TEST_F(WatchTimeRecorderTest, TestRebufferingMetrics) {
histogram_tester_->ExpectTotalCount(key.as_string(), 0);
}
+TEST_F(WatchTimeRecorderTest, TestRebufferingMetricsMediaStream) {
+ Initialize(true, false, true, true,
+ mojom::MediaStreamType::kLocalDeviceCapture);
+
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(50);
+ for (auto key : computation_keys_)
+ wtr_->RecordWatchTime(key, kWatchTime);
+ wtr_->UpdateUnderflowCount(1);
+ wtr_->UpdateUnderflowCount(2);
+
+ // Trigger finalization of everything.
+ wtr_->FinalizeWatchTime({});
+ base::RunLoop().RunUntilIdle();
+
+ ExpectMtbrTime({}, base::TimeDelta());
+ ExpectRebuffers({}, 0);
+
+ // Now rerun the test without any rebuffering.
+ ResetMetricRecorders();
+ for (auto key : computation_keys_)
+ wtr_->RecordWatchTime(key, kWatchTime);
+ wtr_->FinalizeWatchTime({});
+ base::RunLoop().RunUntilIdle();
+
+ ExpectMtbrTime({}, base::TimeDelta());
+ ExpectRebuffers({}, 0);
+}
+
TEST_F(WatchTimeRecorderTest, TestDiscardMetrics) {
Initialize(true, false, true, true);
wtr_->UpdateSecondaryProperties(CreateSecondaryProperties());
@@ -431,6 +615,29 @@ TEST_F(WatchTimeRecorderTest, TestDiscardMetrics) {
ExpectUkmWatchTime({}, base::TimeDelta());
}
+TEST_F(WatchTimeRecorderTest, TestDiscardMetricsMediaStream) {
+ Initialize(true, false, true, true,
+ mojom::MediaStreamType::kLocalDeviceCapture);
+ wtr_->UpdateSecondaryProperties(CreateSecondaryProperties());
+
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(5);
+ for (auto key : computation_keys_)
+ wtr_->RecordWatchTime(key, kWatchTime);
+
+ // Trigger finalization of everything.
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ // No watch time and no discard metrics should be logged.
+ ExpectWatchTime({}, base::TimeDelta());
+ for (auto key : discard_keys_) {
+ histogram_tester_->ExpectTotalCount(key.as_string(), 0);
+ }
+
+ // UKM watch time won't be logged because we aren't sending "All" keys.
+ ExpectUkmWatchTime({}, base::TimeDelta());
+}
+
#define EXPECT_UKM(name, value) \
test_recorder_->ExpectEntryMetric(entry, name, value)
#define EXPECT_NO_UKM(name) \
@@ -439,8 +646,9 @@ TEST_F(WatchTimeRecorderTest, TestDiscardMetrics) {
EXPECT_TRUE(test_recorder_->EntryHasMetric(entry, name));
TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, false, false, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, false, false,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
CreateSecondaryProperties();
Initialize(properties.Clone());
@@ -493,6 +701,8 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
static_cast<int64_t>(secondary_properties->video_encryption_scheme));
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
@@ -519,8 +729,9 @@ TEST_F(WatchTimeRecorderTest, TestFinalizeNoDuplication) {
}
TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, false, false, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, false, false,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
CreateSecondaryProperties();
Initialize(properties.Clone());
@@ -570,6 +781,8 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
static_cast<int64_t>(secondary_properties->video_encryption_scheme));
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
@@ -596,12 +809,14 @@ TEST_F(WatchTimeRecorderTest, FinalizeWithoutWatchTime) {
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, false, false, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, false, false,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
mojom::SecondaryPlaybackProperties::New(
kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- "", "", EncryptionScheme::kCenc, EncryptionScheme::kCbcs,
+ AudioDecoderType::kUnknown, VideoDecoderType::kUnknown,
+ EncryptionScheme::kCenc, EncryptionScheme::kCbcs,
gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
@@ -638,6 +853,8 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
static_cast<int64_t>(secondary_properties->video_encryption_scheme));
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
@@ -663,12 +880,14 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideo) {
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, true, true, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, true, true,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
mojom::SecondaryPlaybackProperties::New(
kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
- VP9PROFILE_PROFILE0, "", "", EncryptionScheme::kUnencrypted,
+ VP9PROFILE_PROFILE0, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
@@ -699,8 +918,8 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
wtr_->UpdateVideoDecodeStats(10, 2);
wtr_->OnError(PIPELINE_ERROR_DECODE);
- secondary_properties->audio_decoder_name = "MojoAudioDecoder";
- secondary_properties->video_decoder_name = "MojoVideoDecoder";
+ secondary_properties->audio_decoder = AudioDecoderType::kMojo;
+ secondary_properties->video_decoder = VideoDecoderType::kMojo;
wtr_->UpdateSecondaryProperties(secondary_properties.Clone());
wtr_->SetAutoplayInitiated(true);
@@ -756,6 +975,8 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
static_cast<int64_t>(secondary_properties->video_encryption_scheme));
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_ERROR_DECODE);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 3);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 2);
@@ -772,8 +993,9 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoWithExtras) {
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, true, true, false, false, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, true, true, false, false,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
CreateSecondaryProperties();
Initialize(properties.Clone());
@@ -813,6 +1035,8 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
static_cast<int64_t>(secondary_properties->video_encryption_scheme));
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
@@ -839,8 +1063,9 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoBackgroundMuted) {
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDuration) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, false, false, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, false, false,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
CreateSecondaryProperties();
Initialize(properties.Clone());
@@ -878,6 +1103,8 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDuration) {
static_cast<int64_t>(secondary_properties->video_encryption_scheme));
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
@@ -906,8 +1133,9 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDuration) {
}
TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDurationInfinite) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, false, false, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, false, false,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties =
CreateSecondaryProperties();
Initialize(properties.Clone());
@@ -945,6 +1173,8 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDurationInfinite) {
static_cast<int64_t>(secondary_properties->video_encryption_scheme));
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_UKM(UkmEntry::kRebuffersCountName, 0);
EXPECT_UKM(UkmEntry::kCompletedRebuffersCountName, 0);
@@ -972,10 +1202,46 @@ TEST_F(WatchTimeRecorderTest, BasicUkmAudioVideoDurationInfinite) {
}
}
+TEST_F(WatchTimeRecorderTest, BasicUkmMediaStreamType) {
+ std::vector<mojom::MediaStreamType> media_stream_types{
+ mojom::MediaStreamType::kLocalElementCapture,
+ mojom::MediaStreamType::kLocalDeviceCapture,
+ mojom::MediaStreamType::kLocalTabCapture,
+ mojom::MediaStreamType::kLocalDesktopCapture,
+ mojom::MediaStreamType::kLocalDisplayCapture,
+ mojom::MediaStreamType::kRemote,
+ mojom::MediaStreamType::kNone,
+ };
+
+ for (const auto& media_stream_type : media_stream_types) {
+ mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
+ true, true, false, false, false, false, false, media_stream_type);
+ Initialize(properties.Clone());
+ wtr_->UpdateSecondaryProperties(CreateSecondaryProperties());
+
+ constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(1);
+ wtr_->RecordWatchTime(WatchTimeKey::kAudioVideoAll, kWatchTime);
+ wtr_.reset();
+ base::RunLoop().RunUntilIdle();
+
+ const auto& entries =
+ test_recorder_->GetEntriesByName(UkmEntry::kEntryName);
+ ASSERT_EQ(1u, entries.size());
+
+ // Check that the media stream type is set correctly.
+ for (const auto* entry : entries) {
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(media_stream_type));
+ }
+ ResetMetricRecorders();
+ }
+}
+
// Might happen due to timing issues, so ensure no crashes.
TEST_F(WatchTimeRecorderTest, NoSecondaryProperties) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, true, true, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, true, true,
+ false, mojom::MediaStreamType::kNone);
Initialize(properties.Clone());
constexpr base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(54);
@@ -987,12 +1253,14 @@ TEST_F(WatchTimeRecorderTest, NoSecondaryProperties) {
}
TEST_F(WatchTimeRecorderTest, SingleSecondaryPropertiesUnknownToKnown) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, true, true, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, true, true,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
kUnknownAudioCodec, kUnknownVideoCodec, AudioCodecProfile::kUnknown,
- VIDEO_CODEC_PROFILE_UNKNOWN, "", "", EncryptionScheme::kUnencrypted,
+ VIDEO_CODEC_PROFILE_UNKNOWN, AudioDecoderType::kUnknown,
+ VideoDecoderType::kUnknown, EncryptionScheme::kUnencrypted,
EncryptionScheme::kUnencrypted, gfx::Size(800, 600));
Initialize(properties.Clone());
wtr_->UpdateSecondaryProperties(secondary_properties1.Clone());
@@ -1003,7 +1271,7 @@ TEST_F(WatchTimeRecorderTest, SingleSecondaryPropertiesUnknownToKnown) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
@@ -1023,6 +1291,8 @@ TEST_F(WatchTimeRecorderTest, SingleSecondaryPropertiesUnknownToKnown) {
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
@@ -1054,12 +1324,13 @@ TEST_F(WatchTimeRecorderTest, SingleSecondaryPropertiesUnknownToKnown) {
}
TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, true, true, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, true, true,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
- VP9PROFILE_PROFILE0, "MojoAudioDecoder", "MojoVideoDecoder",
+ VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(400, 300));
Initialize(properties.Clone());
@@ -1080,8 +1351,9 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
kCodecAAC, kCodecH264, AudioCodecProfile::kUnknown, H264PROFILE_MAIN,
- "FFmpegAudioDecoder", "FFmpegVideoDecoder", EncryptionScheme::kCenc,
- EncryptionScheme::kCenc, gfx::Size(800, 600));
+ AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
+ EncryptionScheme::kCenc, EncryptionScheme::kCenc,
+ gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
constexpr base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(25);
@@ -1114,6 +1386,8 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
EXPECT_UKM(UkmEntry::kDurationName, 5000000);
EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
@@ -1184,12 +1458,13 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalize) {
}
TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, true, true, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, true, true,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
- VP9PROFILE_PROFILE0, "MojoAudioDecoder", "MojoVideoDecoder",
+ VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(400, 300));
Initialize(properties.Clone());
@@ -1210,7 +1485,7 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
@@ -1232,6 +1507,8 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
EXPECT_UKM(UkmEntry::kLastPipelineStatusName, PIPELINE_OK);
EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
@@ -1297,12 +1574,13 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesNoFinalizeNo2ndWT) {
}
TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, true, true, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, true, true,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
- VP9PROFILE_PROFILE0, "MojoAudioDecoder", "MojoVideoDecoder",
+ VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kCbcs, EncryptionScheme::kCbcs,
gfx::Size(400, 300));
Initialize(properties.Clone());
@@ -1327,7 +1605,7 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
@@ -1353,6 +1631,8 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
EXPECT_NO_UKM(UkmEntry::kDurationName);
@@ -1422,12 +1702,13 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesWithFinalize) {
}
TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
- mojom::PlaybackPropertiesPtr properties = mojom::PlaybackProperties::New(
- true, true, false, false, true, true, false);
+ mojom::PlaybackPropertiesPtr properties =
+ mojom::PlaybackProperties::New(true, true, false, false, true, true,
+ false, mojom::MediaStreamType::kNone);
mojom::SecondaryPlaybackPropertiesPtr secondary_properties1 =
mojom::SecondaryPlaybackProperties::New(
kCodecOpus, kCodecVP9, AudioCodecProfile::kUnknown,
- VP9PROFILE_PROFILE0, "MojoAudioDecoder", "MojoVideoDecoder",
+ VP9PROFILE_PROFILE0, AudioDecoderType::kMojo, VideoDecoderType::kMojo,
EncryptionScheme::kCbcs, EncryptionScheme::kCbcs,
gfx::Size(400, 300));
Initialize(properties.Clone());
@@ -1446,7 +1727,7 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
mojom::SecondaryPlaybackPropertiesPtr secondary_properties2 =
mojom::SecondaryPlaybackProperties::New(
kCodecAAC, kCodecH264, AudioCodecProfile::kXHE_AAC, H264PROFILE_MAIN,
- "FFmpegAudioDecoder", "FFmpegVideoDecoder",
+ AudioDecoderType::kFFmpeg, VideoDecoderType::kFFmpeg,
EncryptionScheme::kUnencrypted, EncryptionScheme::kUnencrypted,
gfx::Size(800, 600));
wtr_->UpdateSecondaryProperties(secondary_properties2.Clone());
@@ -1482,6 +1763,8 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
EXPECT_UKM(UkmEntry::kHasVideoName, properties->has_video);
EXPECT_UKM(UkmEntry::kIsEMEName, properties->is_eme);
EXPECT_UKM(UkmEntry::kIsMSEName, properties->is_mse);
+ EXPECT_UKM(UkmEntry::kMediaStreamTypeName,
+ static_cast<int64_t>(properties->media_stream_type));
EXPECT_UKM(UkmEntry::kAutoplayInitiatedName, false);
EXPECT_UKM(UkmEntry::kDurationName, 5000000);
EXPECT_HAS_UKM(UkmEntry::kPlayerIDName);
@@ -1551,16 +1834,4 @@ TEST_F(WatchTimeRecorderTest, MultipleSecondaryPropertiesRebufferCarryover) {
#undef EXPECT_NO_UKM
#undef EXPECT_HAS_UKM
-TEST_F(WatchTimeRecorderTest, DISABLED_PrintExpectedDecoderNameHashes) {
- const std::string kDecoderNames[] = {
- "FFmpegAudioDecoder", "FFmpegVideoDecoder", "GpuVideoDecoder",
- "MojoVideoDecoder", "MojoAudioDecoder", "VpxVideoDecoder",
- "AomVideoDecoder", "DecryptingAudioDecoder", "DecryptingVideoDecoder",
- "Dav1dVideoDecoder", "FuchsiaVideoDecoder", "MediaPlayer",
- "Gav1VideoDecoder"};
- printf("%18s = 0\n", "None");
- for (const auto& name : kDecoderNames)
- printf("%18s = 0x%08x\n", name.c_str(), base::PersistentHash(name));
-}
-
} // namespace media
diff --git a/chromium/media/muxers/webm_muxer.cc b/chromium/media/muxers/webm_muxer.cc
index f4e9be0754a..4cfd31f79a2 100644
--- a/chromium/media/muxers/webm_muxer.cc
+++ b/chromium/media/muxers/webm_muxer.cc
@@ -10,6 +10,8 @@
#include "base/bind.h"
#include "base/logging.h"
#include "base/sequence_checker.h"
+#include "base/time/time.h"
+#include "base/time/time_override.h"
#include "media/base/audio_parameters.h"
#include "media/base/limits.h"
#include "media/base/video_frame.h"
@@ -19,6 +21,10 @@ namespace media {
namespace {
+// Force new clusters at a maximum rate of 10 Hz.
+constexpr base::TimeDelta kMinimumForcedClusterDuration =
+ base::TimeDelta::FromMilliseconds(100);
+
void WriteOpusHeader(const media::AudioParameters& params, uint8_t* header) {
// See https://wiki.xiph.org/OggOpus#ID_Header.
// Set magic signature.
@@ -139,7 +145,7 @@ base::Optional<mkvmuxer::Colour> ColorFromColorSpace(
WebmMuxer::VideoParameters::VideoParameters(
scoped_refptr<media::VideoFrame> frame)
: visible_rect_size(frame->visible_rect().size()),
- frame_rate(frame->metadata()->frame_rate.value_or(0.0)),
+ frame_rate(frame->metadata().frame_rate.value_or(0.0)),
codec(kUnknownVideoCodec),
color_space(frame->ColorSpace()) {}
@@ -192,12 +198,17 @@ WebmMuxer::~WebmMuxer() {
Flush();
}
+void WebmMuxer::SetMaximumDurationToForceDataOutput(base::TimeDelta interval) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ max_data_output_interval_ = std::max(interval, kMinimumForcedClusterDuration);
+}
+
bool WebmMuxer::OnEncodedVideo(const VideoParameters& params,
std::string encoded_data,
std::string encoded_alpha,
base::TimeTicks timestamp,
bool is_key_frame) {
- DVLOG(1) << __func__ << " - " << encoded_data.size() << "B";
+ DVLOG(2) << __func__ << " - " << encoded_data.size() << "B";
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(params.codec == kCodecVP8 || params.codec == kCodecVP9 ||
params.codec == kCodecH264)
@@ -244,6 +255,7 @@ bool WebmMuxer::OnEncodedAudio(const media::AudioParameters& params,
DVLOG(2) << __func__ << " - " << encoded_data.size() << "B";
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ MaybeForceNewCluster();
if (!audio_track_index_) {
AddAudioTrack(params);
if (first_frame_timestamp_audio_.is_null()) {
@@ -262,11 +274,22 @@ bool WebmMuxer::OnEncodedAudio(const media::AudioParameters& params,
return PartiallyFlushQueues();
}
+void WebmMuxer::SetLiveAndEnabled(bool track_live_and_enabled, bool is_video) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ bool& written_track_live_and_enabled =
+ is_video ? video_track_live_and_enabled_ : audio_track_live_and_enabled_;
+ if (written_track_live_and_enabled != track_live_and_enabled) {
+ DVLOG(1) << __func__ << (is_video ? " video " : " audio ")
+ << "track live-and-enabled changed to " << track_live_and_enabled;
+ }
+ written_track_live_and_enabled = track_live_and_enabled;
+}
+
void WebmMuxer::Pause() {
DVLOG(1) << __func__;
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!elapsed_time_in_pause_)
- elapsed_time_in_pause_.reset(new base::ElapsedTimer());
+ elapsed_time_in_pause_ = std::make_unique<base::ElapsedTimer>();
}
void WebmMuxer::Resume() {
@@ -377,7 +400,9 @@ void WebmMuxer::AddAudioTrack(const media::AudioParameters& params) {
mkvmuxer::int32 WebmMuxer::Write(const void* buf, mkvmuxer::uint32 len) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DVLOG(2) << __func__ << " len " << len;
DCHECK(buf);
+ last_data_output_timestamp_ = base::TimeTicks::Now();
write_data_callback_.Run(
base::StringPiece(reinterpret_cast<const char*>(buf), len));
position_ += len;
@@ -413,9 +438,23 @@ void WebmMuxer::FlushQueues() {
bool WebmMuxer::PartiallyFlushQueues() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ // Punt writing until all tracks have been created.
+ if ((has_audio_ && !audio_track_index_) ||
+ (has_video_ && !video_track_index_)) {
+ return true;
+ }
+
bool result = true;
- while (!(has_video_ && video_frames_.empty()) &&
- !(has_audio_ && audio_frames_.empty()) && result) {
+ // We strictly sort by timestamp unless a track is not live-and-enabled. In
+ // that case we relax this and allow drainage of the live-and-enabled leg.
+ while ((!has_video_ || !video_frames_.empty() ||
+ !video_track_live_and_enabled_) &&
+ (!has_audio_ || !audio_frames_.empty() ||
+ !audio_track_live_and_enabled_) &&
+ result) {
+ if (video_frames_.empty() && audio_frames_.empty())
+ return true;
result = FlushNextFrame();
}
return result;
@@ -437,7 +476,20 @@ bool WebmMuxer::FlushNextFrame() {
EncodedFrame frame = std::move(queue->front());
queue->pop_front();
- auto recorded_timestamp = frame.relative_timestamp.InMicroseconds() *
+ // The logic tracking live-and-enabled that temporarily relaxes the strict
+ // timestamp sorting allows for draining a track's queue completely in the
+ // presence of the other track being muted. When the muted track becomes
+ // live-and-enabled again the sorting recommences. However, tracks get encoded
+ // data before live-and-enabled transitions to true. This can lead to us
+ // emitting non-monotonic timestamps to the muxer, which results in an error
+ // return. Fix this by enforcing monotonicity by rewriting timestamps.
+ base::TimeDelta relative_timestamp = frame.relative_timestamp;
+ DLOG_IF(WARNING, relative_timestamp < last_timestamp_written_)
+ << "Enforced a monotonically increasing timestamp. Last written "
+ << last_timestamp_written_ << " new " << relative_timestamp;
+ relative_timestamp = std::max(relative_timestamp, last_timestamp_written_);
+ last_timestamp_written_ = relative_timestamp;
+ auto recorded_timestamp = relative_timestamp.InMicroseconds() *
base::Time::kNanosecondsPerMicrosecond;
if (force_one_libwebm_error_) {
@@ -475,4 +527,15 @@ base::TimeTicks WebmMuxer::UpdateLastTimestampMonotonically(
return *last_timestamp;
}
+void WebmMuxer::MaybeForceNewCluster() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (has_video_ && !max_data_output_interval_.is_zero() &&
+ !last_data_output_timestamp_.is_null()) {
+ base::TimeTicks now = base::TimeTicks::Now();
+ if (now - last_data_output_timestamp_ >= max_data_output_interval_) {
+ segment_.ForceNewClusterOnNextFrame();
+ }
+ }
+}
+
} // namespace media
diff --git a/chromium/media/muxers/webm_muxer.h b/chromium/media/muxers/webm_muxer.h
index f22f3c12438..63b6dc87194 100644
--- a/chromium/media/muxers/webm_muxer.h
+++ b/chromium/media/muxers/webm_muxer.h
@@ -53,7 +53,7 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
// Container for the parameters that muxer uses that is extracted from
// media::VideoFrame.
struct MEDIA_EXPORT VideoParameters {
- VideoParameters(scoped_refptr<media::VideoFrame> frame);
+ explicit VideoParameters(scoped_refptr<media::VideoFrame> frame);
VideoParameters(gfx::Size visible_rect_size,
double frame_rate,
VideoCodec codec,
@@ -73,6 +73,16 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
const WriteDataCB& write_data_callback);
~WebmMuxer() override;
+ // Sets the maximum duration interval to cause data output on
+ // |write_data_callback|, provided frames are delivered. The WebM muxer can
+ // hold on to audio frames almost indefinitely in the case video is recorded
+ // and video frames are temporarily not delivered. When this method is used, a
+ // new WebM cluster is forced when the next frame arrives |duration| after the
+ // last write.
+ // The maximum duration between forced clusters is internally limited to not
+ // go below 100 ms.
+ void SetMaximumDurationToForceDataOutput(base::TimeDelta interval);
+
// Functions to add video and audio frames with |encoded_data.data()|
// to WebM Segment. Either one returns true on success.
// |encoded_alpha| represents the encode output of alpha channel when
@@ -86,6 +96,12 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
std::string encoded_data,
base::TimeTicks timestamp);
+ // WebmMuxer may hold on to data. Make sure it gets out on the next frame.
+ void ForceDataOutputOnNextFrame();
+
+ // Call to handle mute and tracks getting disabled.
+ void SetLiveAndEnabled(bool track_live_and_enabled, bool is_video);
+
void Pause();
void Resume();
@@ -140,6 +156,10 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
base::TimeTicks UpdateLastTimestampMonotonically(
base::TimeTicks timestamp,
base::TimeTicks* last_timestamp);
+ // Forces data output from |segment_| on the next frame if recording video,
+ // and |min_data_output_interval_| was configured and has passed since the
+ // last received video frame.
+ void MaybeForceNewCluster();
// Audio codec configured on construction. Video codec is taken from first
// received frame.
@@ -166,6 +186,19 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
const bool has_video_;
const bool has_audio_;
+ // Variables to track live and enabled state of audio and video.
+ bool video_track_live_and_enabled_ = true;
+ bool audio_track_live_and_enabled_ = true;
+
+ // Maximum interval between data output callbacks (given frames arriving)
+ base::TimeDelta max_data_output_interval_;
+
+ // Last time data was output from |segment_|.
+ base::TimeTicks last_data_output_timestamp_;
+
+ // Last timestamp written into the segment.
+ base::TimeDelta last_timestamp_written_;
+
// Callback to dump written data as being called by libwebm.
const WriteDataCB write_data_callback_;
diff --git a/chromium/media/muxers/webm_muxer_unittest.cc b/chromium/media/muxers/webm_muxer_unittest.cc
index eb9b739a3af..3c92fa10d9a 100644
--- a/chromium/media/muxers/webm_muxer_unittest.cc
+++ b/chromium/media/muxers/webm_muxer_unittest.cc
@@ -65,16 +65,14 @@ class WebmMuxerTest : public TestWithParam<TestParams> {
GetParam().num_video_tracks,
GetParam().num_audio_tracks,
base::BindRepeating(&WebmMuxerTest::WriteCallback,
- base::Unretained(this)))),
- last_encoded_length_(0),
- accumulated_position_(0) {
+ base::Unretained(this)))) {
EXPECT_EQ(webm_muxer_->Position(), 0);
const mkvmuxer::int64 kRandomNewPosition = 333;
EXPECT_EQ(webm_muxer_->Position(kRandomNewPosition), -1);
EXPECT_FALSE(webm_muxer_->Seekable());
}
- MOCK_METHOD1(WriteCallback, void(base::StringPiece));
+ MOCK_METHOD(void, WriteCallback, (base::StringPiece));
void SaveEncodedDataLen(const base::StringPiece& encoded_data) {
last_encoded_length_ = encoded_data.size();
@@ -110,8 +108,8 @@ class WebmMuxerTest : public TestWithParam<TestParams> {
std::unique_ptr<WebmMuxer> webm_muxer_;
- size_t last_encoded_length_;
- int64_t accumulated_position_;
+ size_t last_encoded_length_ = 0;
+ int64_t accumulated_position_ = 0;
private:
DISALLOW_COPY_AND_ASSIGN(WebmMuxerTest);
@@ -504,6 +502,8 @@ class WebmMuxerTestUnparametrized : public testing::Test {
base::TimeDelta::FromMilliseconds(system_timestamp_offset_ms));
}
+ MOCK_METHOD(void, OnWrite, ());
+
base::test::TaskEnvironment environment_;
std::unique_ptr<WebmMuxer> webm_muxer_;
std::map<int, std::vector<int>> buffer_timestamps_ms_;
@@ -534,6 +534,7 @@ class WebmMuxerTestUnparametrized : public testing::Test {
static constexpr int kSentinelVideoBufferTimestampMs = 1000000;
void SaveChunkAndInvokeWriteCallback(base::StringPiece chunk) {
+ OnWrite();
std::copy(chunk.begin(), chunk.end(), std::back_inserter(muxed_data_));
}
@@ -601,4 +602,51 @@ TEST_F(WebmMuxerTestUnparametrized,
Pair(2, ElementsAre(0, 5))));
}
+TEST_F(WebmMuxerTestUnparametrized, HoldsDataUntilDurationExpiry) {
+ webm_muxer_->SetMaximumDurationToForceDataOutput(
+ base::TimeDelta::FromMilliseconds(200));
+ AddVideoAtOffset(0, /*is_key_frame=*/true);
+ AddAudioAtOffsetWithDuration(0, 10);
+ // Mute video. The muxer will hold on to audio data after this until the max
+ // data output duration is expired.
+ webm_muxer_->SetLiveAndEnabled(/*track_live_and_enabled=*/false,
+ /*is_video=*/true);
+ EXPECT_CALL(*this, OnWrite).Times(0);
+ AddAudioAtOffsetWithDuration(10, 10);
+ AddAudioAtOffsetWithDuration(20, 10);
+ AddAudioAtOffsetWithDuration(30, 10);
+ AddAudioAtOffsetWithDuration(40, 10);
+ Mock::VerifyAndClearExpectations(this);
+ environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(200));
+ EXPECT_CALL(*this, OnWrite).Times(AtLeast(1));
+ AddAudioAtOffsetWithDuration(50, 10);
+ Mock::VerifyAndClearExpectations(this);
+ // Stop mock dispatch from happening too late in the WebmMuxer's destructor.
+ webm_muxer_ = nullptr;
+}
+
+TEST_F(WebmMuxerTestUnparametrized, DurationExpiryLimitedByMaxFrequency) {
+ webm_muxer_->SetMaximumDurationToForceDataOutput(
+ base::TimeDelta::FromMilliseconds(
+ 50)); // This value is below the minimum limit of 100 ms.
+ AddVideoAtOffset(0, /*is_key_frame=*/true);
+ AddAudioAtOffsetWithDuration(0, 10);
+ // Mute video. The muxer will hold on to audio data after this until the max
+ // data output duration is expired.
+ webm_muxer_->SetLiveAndEnabled(/*track_live_and_enabled=*/false,
+ /*is_video=*/true);
+ EXPECT_CALL(*this, OnWrite).Times(0);
+ AddAudioAtOffsetWithDuration(10, 10);
+ AddAudioAtOffsetWithDuration(20, 10);
+ AddAudioAtOffsetWithDuration(30, 10);
+ AddAudioAtOffsetWithDuration(40, 10);
+ Mock::VerifyAndClearExpectations(this);
+ environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(100));
+ EXPECT_CALL(*this, OnWrite).Times(AtLeast(1));
+ AddAudioAtOffsetWithDuration(50, 10);
+ Mock::VerifyAndClearExpectations(this);
+ // Stop mock dispatch from happening too late in the WebmMuxer's destructor.
+ webm_muxer_ = nullptr;
+}
+
} // namespace media
diff --git a/chromium/media/remoting/BUILD.gn b/chromium/media/remoting/BUILD.gn
index b01ab5ffd6b..bbf4db8a62d 100644
--- a/chromium/media/remoting/BUILD.gn
+++ b/chromium/media/remoting/BUILD.gn
@@ -125,6 +125,7 @@ source_set("media_remoting_tests") {
"fake_media_resource.cc",
"fake_media_resource.h",
"integration_test.cc",
+ "metrics_unittest.cc",
"proto_utils_unittest.cc",
"rpc_broker_unittest.cc",
]
diff --git a/chromium/media/remoting/OWNERS b/chromium/media/remoting/OWNERS
index 124883ef14a..7d3110de24e 100644
--- a/chromium/media/remoting/OWNERS
+++ b/chromium/media/remoting/OWNERS
@@ -1,3 +1,3 @@
erickung@chromium.org
-miu@chromium.org
+jophba@chromium.org
mfoltz@chromium.org
diff --git a/chromium/media/remoting/courier_renderer.cc b/chromium/media/remoting/courier_renderer.cc
index c7d32b9cd5a..ac1dbba38d1 100644
--- a/chromium/media/remoting/courier_renderer.cc
+++ b/chromium/media/remoting/courier_renderer.cc
@@ -123,24 +123,14 @@ void CourierRenderer::Initialize(MediaResource* media_resource,
::media::DemuxerStream* video_demuxer_stream =
media_resource_->GetFirstStream(DemuxerStream::VIDEO);
- // Create audio mojo data pipe handles if audio is available.
- std::unique_ptr<mojo::DataPipe> audio_data_pipe;
- if (audio_demuxer_stream) {
- audio_data_pipe = base::WrapUnique(DemuxerStreamAdapter::CreateDataPipe());
- }
-
- // Create video mojo data pipe handles if video is available.
- std::unique_ptr<mojo::DataPipe> video_data_pipe;
- if (video_demuxer_stream) {
- video_data_pipe = base::WrapUnique(DemuxerStreamAdapter::CreateDataPipe());
- }
-
// Establish remoting data pipe connection using main thread.
+ uint32_t data_pipe_capacity =
+ DemuxerStreamAdapter::kMojoDataPipeCapacityInBytes;
main_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
- &RendererController::StartDataPipe, controller_,
- std::move(audio_data_pipe), std::move(video_data_pipe),
+ &RendererController::StartDataPipe, controller_, data_pipe_capacity,
+ audio_demuxer_stream, video_demuxer_stream,
base::BindOnce(&CourierRenderer::OnDataPipeCreatedOnMainThread,
media_task_runner_, weak_factory_.GetWeakPtr(),
rpc_broker_)));
diff --git a/chromium/media/remoting/courier_renderer_unittest.cc b/chromium/media/remoting/courier_renderer_unittest.cc
index e4c2ea91eea..edb73477889 100644
--- a/chromium/media/remoting/courier_renderer_unittest.cc
+++ b/chromium/media/remoting/courier_renderer_unittest.cc
@@ -53,8 +53,8 @@ PipelineStatistics DefaultStats() {
stats.audio_memory_usage = 5678;
stats.video_memory_usage = 6789;
stats.video_keyframe_distance_average = base::TimeDelta::Max();
- stats.audio_decoder_info = {false, false, "Default"};
- stats.video_decoder_info = {false, false, "Default"};
+ stats.audio_decoder_info = {false, false, AudioDecoderType::kUnknown};
+ stats.video_decoder_info = {false, false, VideoDecoderType::kUnknown};
return stats;
}
@@ -419,12 +419,12 @@ class CourierRendererTest : public testing::Test {
message->set_video_memory_usage(stats.video_memory_usage);
message->mutable_audio_decoder_info()->set_is_platform_decoder(
stats.audio_decoder_info.is_platform_decoder);
- message->mutable_audio_decoder_info()->set_decoder_name(
- stats.audio_decoder_info.decoder_name);
+ message->mutable_audio_decoder_info()->set_decoder_type(
+ static_cast<int64_t>(stats.audio_decoder_info.decoder_type));
message->mutable_video_decoder_info()->set_is_platform_decoder(
stats.video_decoder_info.is_platform_decoder);
- message->mutable_video_decoder_info()->set_decoder_name(
- stats.video_decoder_info.decoder_name);
+ message->mutable_video_decoder_info()->set_decoder_type(
+ static_cast<int64_t>(stats.video_decoder_info.decoder_type));
OnReceivedRpc(std::move(rpc));
RunPendingTasks();
}
diff --git a/chromium/media/remoting/demuxer_stream_adapter.cc b/chromium/media/remoting/demuxer_stream_adapter.cc
index 63b7b9d201d..d9e3e50f74a 100644
--- a/chromium/media/remoting/demuxer_stream_adapter.cc
+++ b/chromium/media/remoting/demuxer_stream_adapter.cc
@@ -22,13 +22,6 @@
namespace media {
namespace remoting {
-// static
-mojo::DataPipe* DemuxerStreamAdapter::CreateDataPipe() {
- // Capacity in bytes for Mojo data pipe.
- constexpr int kMojoDataPipeCapacityInBytes = 512 * 1024;
- return new mojo::DataPipe(kMojoDataPipeCapacityInBytes);
-}
-
DemuxerStreamAdapter::DemuxerStreamAdapter(
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
diff --git a/chromium/media/remoting/demuxer_stream_adapter.h b/chromium/media/remoting/demuxer_stream_adapter.h
index e2e1324070b..56436cfd6ec 100644
--- a/chromium/media/remoting/demuxer_stream_adapter.h
+++ b/chromium/media/remoting/demuxer_stream_adapter.h
@@ -44,6 +44,9 @@ class DemuxerStreamAdapter {
public:
using ErrorCallback = base::OnceCallback<void(StopTrigger)>;
+ // The capacity in bytes for the Mojo data pipes used with this class.
+ static constexpr uint32_t kMojoDataPipeCapacityInBytes = 512 * 1024;
+
// |main_task_runner|: Task runner to post RPC message on main thread
// |media_task_runner|: Task runner to run whole class on media thread.
// |name|: Demuxer stream name. For troubleshooting purposes.
@@ -96,10 +99,6 @@ class DemuxerStreamAdapter {
// pipe.
bool is_data_pending() const { return !pending_frame_.empty(); }
- // Creates a Mojo data pipe configured appropriately for use with a
- // DemuxerStreamAdapter.
- static mojo::DataPipe* CreateDataPipe();
-
private:
friend class MockDemuxerStreamAdapter;
diff --git a/chromium/media/remoting/demuxer_stream_adapter_unittest.cc b/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
index bdb15484f63..08452e44748 100644
--- a/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
+++ b/chromium/media/remoting/demuxer_stream_adapter_unittest.cc
@@ -122,9 +122,8 @@ class DemuxerStreamAdapterTest : public ::testing::Test {
mojo::PendingRemote<mojom::RemotingDataStreamSender> stream_sender;
mojo::ScopedDataPipeProducerHandle producer_end;
mojo::ScopedDataPipeConsumerHandle consumer_end;
- CHECK_EQ(
- MOJO_RESULT_OK,
- mojo::CreateDataPipe(&data_pipe_options, &producer_end, &consumer_end));
+ CHECK_EQ(MOJO_RESULT_OK, mojo::CreateDataPipe(&data_pipe_options,
+ producer_end, consumer_end));
data_stream_sender_.reset(new FakeRemotingDataStreamSender(
stream_sender.InitWithNewPipeAndPassReceiver(),
diff --git a/chromium/media/remoting/media_remoting_rpc.proto b/chromium/media/remoting/media_remoting_rpc.proto
index b194081063c..2375db483b7 100644
--- a/chromium/media/remoting/media_remoting_rpc.proto
+++ b/chromium/media/remoting/media_remoting_rpc.proto
@@ -271,8 +271,14 @@ message DecryptConfig {
optional uint32 skip_byte_block = 6 [deprecated = true];
}
-message PipelineDecoderInfo {
- optional string decoder_name = 1;
+message AudioDecoderInfo {
+ optional int64 decoder_type = 1;
+ optional bool is_platform_decoder = 2;
+ optional bool has_decrypting_demuxer_stream = 3 [deprecated = true];
+};
+
+message VideoDecoderInfo {
+ optional int64 decoder_type = 1;
optional bool is_platform_decoder = 2;
optional bool has_decrypting_demuxer_stream = 3 [deprecated = true];
};
@@ -285,8 +291,8 @@ message PipelineStatistics {
optional int64 audio_memory_usage = 5;
optional int64 video_memory_usage = 6;
optional int64 video_frame_duration_average_usec = 7;
- optional PipelineDecoderInfo audio_decoder_info = 8;
- optional PipelineDecoderInfo video_decoder_info = 9;
+ optional AudioDecoderInfo audio_decoder_info = 8;
+ optional VideoDecoderInfo video_decoder_info = 9;
};
message CdmKeyInformation {
diff --git a/chromium/media/remoting/metrics.cc b/chromium/media/remoting/metrics.cc
index 2cec63576c6..fd3252c558a 100644
--- a/chromium/media/remoting/metrics.cc
+++ b/chromium/media/remoting/metrics.cc
@@ -46,8 +46,7 @@ SessionMetricsRecorder::SessionMetricsRecorder()
last_channel_layout_(CHANNEL_LAYOUT_NONE),
last_sample_rate_(0),
last_video_codec_(kUnknownVideoCodec),
- last_video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN),
- remote_playback_is_disabled_(false) {}
+ last_video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN) {}
SessionMetricsRecorder::~SessionMetricsRecorder() = default;
@@ -171,6 +170,21 @@ void SessionMetricsRecorder::OnRemotePlaybackDisabled(bool disabled) {
remote_playback_is_disabled_ = disabled;
}
+void SessionMetricsRecorder::RecordVideoPixelRateSupport(
+ PixelRateSupport support) {
+ if (did_record_pixel_rate_support_) {
+ return;
+ }
+ did_record_pixel_rate_support_ = true;
+ base::UmaHistogramEnumeration("Media.Remoting.VideoPixelRateSupport",
+ support);
+}
+
+void SessionMetricsRecorder::RecordCompatibility(
+ RemotingCompatibility compatibility) {
+ base::UmaHistogramEnumeration("Media.Remoting.Compatibility", compatibility);
+}
+
void SessionMetricsRecorder::RecordAudioConfiguration() {
UMA_HISTOGRAM_ENUMERATION("Media.Remoting.AudioCodec", last_audio_codec_,
kAudioCodecMax + 1);
@@ -219,7 +233,7 @@ void SessionMetricsRecorder::RecordTrackConfiguration() {
}
RendererMetricsRecorder::RendererMetricsRecorder()
- : start_time_(base::TimeTicks::Now()), did_record_first_playout_(false) {}
+ : start_time_(base::TimeTicks::Now()) {}
RendererMetricsRecorder::~RendererMetricsRecorder() = default;
diff --git a/chromium/media/remoting/metrics.h b/chromium/media/remoting/metrics.h
index db7f3a6d734..017ad9de60f 100644
--- a/chromium/media/remoting/metrics.h
+++ b/chromium/media/remoting/metrics.h
@@ -15,6 +15,39 @@
namespace media {
namespace remoting {
+// The compatibility of a media content with remoting, and the reasons for
+// incompatibilities.
+// These values are persisted to logs. Entries should not be renumbered and
+// numeric values should never be reused.
+enum class RemotingCompatibility {
+ kCompatible = 0,
+ kNoAudioNorVideo = 1,
+ kEncryptedVideo = 2,
+ kIncompatibleVideoCodec = 3,
+ kEncryptedAudio = 4,
+ kIncompatibleAudioCodec = 5,
+ kDisabledByPage = 6,
+ kDurationBelowThreshold = 7,
+ // Add new values here. Don't re-number existing values.
+
+ kMaxValue = kDurationBelowThreshold,
+};
+
+// The rate of pixels in a video and whether the receiver supports its playback.
+// These values are persisted to logs. Entries should not be renumbered and
+// numeric values should never be reused.
+enum class PixelRateSupport {
+ // Pixels per second is at most the equivalent of 1080p 30fps.
+ k2kSupported = 0,
+ // More than 1080p 30fps and at most 2160p 30fps.
+ k4kSupported = 1,
+ k4kNotSupported = 2,
+ kOver4kNotSupported = 3,
+ // Add new values here. Don't re-number existing values.
+
+ kMaxValue = kOver4kNotSupported,
+};
+
class SessionMetricsRecorder {
public:
SessionMetricsRecorder();
@@ -32,6 +65,14 @@ class SessionMetricsRecorder {
void OnPipelineMetadataChanged(const PipelineMetadata& metadata);
void OnRemotePlaybackDisabled(bool disabled);
+ // Records the rate of pixels in a video (bucketed into FHD, 4K, etc.) and
+ // whether the receiver supports its playback. Records only on the first call
+ // for the recorder instance.
+ void RecordVideoPixelRateSupport(PixelRateSupport support);
+
+ // Records the compatibility of a media content with remoting.
+ void RecordCompatibility(RemotingCompatibility compatibility);
+
private:
// Whether audio only, video only, or both were played during the session.
//
@@ -70,7 +111,9 @@ class SessionMetricsRecorder {
// Last known disabled playback state. This can change before/after a remoting
// session as well as during one.
- bool remote_playback_is_disabled_;
+ bool remote_playback_is_disabled_ = false;
+
+ bool did_record_pixel_rate_support_ = false;
DISALLOW_COPY_AND_ASSIGN(SessionMetricsRecorder);
};
@@ -94,7 +137,7 @@ class RendererMetricsRecorder {
private:
const base::TimeTicks start_time_;
- bool did_record_first_playout_;
+ bool did_record_first_playout_ = false;
DISALLOW_COPY_AND_ASSIGN(RendererMetricsRecorder);
};
diff --git a/chromium/media/remoting/metrics_unittest.cc b/chromium/media/remoting/metrics_unittest.cc
new file mode 100644
index 00000000000..8ac9b0b6b76
--- /dev/null
+++ b/chromium/media/remoting/metrics_unittest.cc
@@ -0,0 +1,64 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/remoting/metrics.h"
+
+#include <string>
+
+#include "base/test/metrics/histogram_tester.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using base::Bucket;
+using testing::ElementsAre;
+
+namespace media {
+namespace remoting {
+
+namespace {
+
+class MediaRemotingMetricsTest : public testing::Test {
+ protected:
+ media::remoting::SessionMetricsRecorder recorder_;
+};
+
+} // namespace
+
+TEST_F(MediaRemotingMetricsTest, RecordVideoPixelRateSupport) {
+ constexpr char kPixelRateSupportHistogramName[] =
+ "Media.Remoting.VideoPixelRateSupport";
+ base::HistogramTester tester;
+ tester.ExpectTotalCount(kPixelRateSupportHistogramName, 0);
+
+ recorder_.RecordVideoPixelRateSupport(PixelRateSupport::k4kNotSupported);
+ recorder_.RecordVideoPixelRateSupport(PixelRateSupport::k2kSupported);
+ recorder_.RecordVideoPixelRateSupport(PixelRateSupport::k4kNotSupported);
+
+ // We record only for the first RecordVideoPixelRateSupport() call for the
+ // given SessionMetricsRecorder instance.
+ EXPECT_THAT(tester.GetAllSamples(kPixelRateSupportHistogramName),
+ ElementsAre(Bucket(
+ static_cast<int>(PixelRateSupport::k4kNotSupported), 1)));
+}
+
+TEST_F(MediaRemotingMetricsTest, RecordCompatibility) {
+ constexpr char kCompatibilityHistogramName[] = "Media.Remoting.Compatibility";
+ base::HistogramTester tester;
+ tester.ExpectTotalCount(kCompatibilityHistogramName, 0);
+
+ recorder_.RecordCompatibility(RemotingCompatibility::kIncompatibleVideoCodec);
+ recorder_.RecordCompatibility(RemotingCompatibility::kCompatible);
+ recorder_.RecordCompatibility(RemotingCompatibility::kIncompatibleVideoCodec);
+
+ EXPECT_THAT(
+ tester.GetAllSamples(kCompatibilityHistogramName),
+ ElementsAre(
+ Bucket(static_cast<int>(RemotingCompatibility::kCompatible), 1),
+ Bucket(
+ static_cast<int>(RemotingCompatibility::kIncompatibleVideoCodec),
+ 2)));
+}
+
+} // namespace remoting
+} // namespace media
diff --git a/chromium/media/remoting/proto_utils.cc b/chromium/media/remoting/proto_utils.cc
index 59270d9065f..17c864060ba 100644
--- a/chromium/media/remoting/proto_utils.cc
+++ b/chromium/media/remoting/proto_utils.cc
@@ -311,14 +311,16 @@ void ConvertProtoToPipelineStatistics(
// that sender provided the values.
if (stats_message.has_audio_decoder_info()) {
auto audio_info = stats_message.audio_decoder_info();
- stats->audio_decoder_info.decoder_name = audio_info.decoder_name();
+ stats->audio_decoder_info.decoder_type =
+ static_cast<AudioDecoderType>(audio_info.decoder_type());
stats->audio_decoder_info.is_platform_decoder =
audio_info.is_platform_decoder();
stats->audio_decoder_info.has_decrypting_demuxer_stream = false;
}
if (stats_message.has_video_decoder_info()) {
auto video_info = stats_message.video_decoder_info();
- stats->video_decoder_info.decoder_name = video_info.decoder_name();
+ stats->video_decoder_info.decoder_type =
+ static_cast<VideoDecoderType>(video_info.decoder_type());
stats->video_decoder_info.is_platform_decoder =
video_info.is_platform_decoder();
stats->video_decoder_info.has_decrypting_demuxer_stream = false;
diff --git a/chromium/media/remoting/proto_utils_unittest.cc b/chromium/media/remoting/proto_utils_unittest.cc
index a322fcc80d6..04e6fb3d02c 100644
--- a/chromium/media/remoting/proto_utils_unittest.cc
+++ b/chromium/media/remoting/proto_utils_unittest.cc
@@ -126,15 +126,15 @@ TEST_F(ProtoUtilsTest, PipelineStatisticsConversion) {
original.video_memory_usage = 43;
original.video_keyframe_distance_average = base::TimeDelta::Max();
original.video_frame_duration_average = base::TimeDelta::Max();
- original.audio_decoder_info = {false, false, "TestAudioDecoder"};
- original.video_decoder_info = {false, false, "TestVideoDecoder"};
+ original.audio_decoder_info = {false, false,
+ media::AudioDecoderType::kUnknown};
+ original.video_decoder_info = {false, false,
+ media::VideoDecoderType::kUnknown};
// There is no convert-to-proto function, so just do that here.
pb::PipelineStatistics pb_stats;
- pb::PipelineDecoderInfo* pb_video_info =
- pb_stats.mutable_video_decoder_info();
- pb::PipelineDecoderInfo* pb_audio_info =
- pb_stats.mutable_audio_decoder_info();
+ pb::VideoDecoderInfo* pb_video_info = pb_stats.mutable_video_decoder_info();
+ pb::AudioDecoderInfo* pb_audio_info = pb_stats.mutable_audio_decoder_info();
pb_stats.set_audio_bytes_decoded(original.audio_bytes_decoded);
pb_stats.set_video_bytes_decoded(original.video_bytes_decoded);
pb_stats.set_video_frames_decoded(original.video_frames_decoded);
@@ -144,11 +144,13 @@ TEST_F(ProtoUtilsTest, PipelineStatisticsConversion) {
pb_stats.set_video_frame_duration_average_usec(
original.video_frame_duration_average.InMicroseconds());
- pb_video_info->set_decoder_name(original.video_decoder_info.decoder_name);
+ pb_video_info->set_decoder_type(
+ static_cast<int64_t>(original.video_decoder_info.decoder_type));
pb_video_info->set_is_platform_decoder(
original.video_decoder_info.is_platform_decoder);
- pb_audio_info->set_decoder_name(original.audio_decoder_info.decoder_name);
+ pb_audio_info->set_decoder_type(
+ static_cast<int64_t>(original.audio_decoder_info.decoder_type));
pb_audio_info->set_is_platform_decoder(
original.audio_decoder_info.is_platform_decoder);
@@ -157,11 +159,11 @@ TEST_F(ProtoUtilsTest, PipelineStatisticsConversion) {
// NOTE: fields will all be initialized with 0xcd. Forcing the conversion to
// properly assigned them. Since nested structs have strings, memsetting must
// be done infividually for them.
- memset(&converted, 0xcd, sizeof(converted) - sizeof(PipelineDecoderInfo) * 2);
- memset(&converted.audio_decoder_info, 0xcd,
- sizeof(PipelineDecoderInfo) - sizeof(std::string));
- memset(&converted.video_decoder_info, 0xcd,
- sizeof(PipelineDecoderInfo) - sizeof(std::string));
+ memset(
+ &converted, 0xcd,
+ sizeof(converted) - sizeof(AudioDecoderInfo) - sizeof(VideoDecoderInfo));
+ memset(&converted.audio_decoder_info, 0xcd, sizeof(AudioDecoderInfo));
+ memset(&converted.video_decoder_info, 0xcd, sizeof(VideoDecoderInfo));
ConvertProtoToPipelineStatistics(pb_stats, &converted);
diff --git a/chromium/media/remoting/receiver.cc b/chromium/media/remoting/receiver.cc
index b5238b4b4bc..d93e781a7ac 100644
--- a/chromium/media/remoting/receiver.cc
+++ b/chromium/media/remoting/receiver.cc
@@ -7,11 +7,11 @@
#include <utility>
#include "base/bind.h"
+#include "base/bind_post_task.h"
#include "base/callback.h"
#include "base/notreached.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
-#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/renderer.h"
#include "media/remoting/proto_enum_utils.h"
@@ -53,7 +53,7 @@ Receiver::Receiver(
// Note: The constructor is running on the main thread, but will be destroyed
// on the media thread. Therefore, all weak pointers must be dereferenced on
// the media thread.
- const RpcBroker::ReceiveMessageCallback receive_callback = BindToLoop(
+ const RpcBroker::ReceiveMessageCallback receive_callback = base::BindPostTask(
media_task_runner_,
BindRepeating(&Receiver::OnReceivedRpc, weak_factory_.GetWeakPtr()));
diff --git a/chromium/media/remoting/renderer_controller.cc b/chromium/media/remoting/renderer_controller.cc
index ccbddb1b424..ae3e46b5d57 100644
--- a/chromium/media/remoting/renderer_controller.cc
+++ b/chromium/media/remoting/renderer_controller.cc
@@ -9,6 +9,7 @@
#include "base/time/default_tick_clock.h"
#include "base/time/tick_clock.h"
#include "base/time/time.h"
+#include "media/remoting/metrics.h"
#if defined(OS_ANDROID)
#include "media/base/android/media_codec_util.h"
@@ -17,14 +18,18 @@
namespace media {
namespace remoting {
+using mojom::RemotingSinkAudioCapability;
+using mojom::RemotingSinkFeature;
+using mojom::RemotingSinkVideoCapability;
+
namespace {
// The duration to delay the start of media remoting to ensure all preconditions
// are held stable before switching to media remoting.
constexpr base::TimeDelta kDelayedStart = base::TimeDelta::FromSeconds(5);
-constexpr int kPixelPerSec4K = 3840 * 2160 * 30; // 4k 30fps.
-constexpr int kPixelPerSec2K = 1920 * 1080 * 30; // 1080p 30fps.
+constexpr int kPixelsPerSec4k = 3840 * 2160 * 30; // 4k 30fps.
+constexpr int kPixelsPerSec2k = 1920 * 1080 * 30; // 1080p 30fps.
// The minimum media element duration that is allowed for media remoting.
// Frequent switching into and out of media remoting for short-duration media
@@ -125,7 +130,7 @@ void RendererController::OnSinkAvailable(
sink_metadata_ = *metadata;
- if (!HasFeatureCapability(mojom::RemotingSinkFeature::RENDERING)) {
+ if (!SinkSupportsRemoting()) {
OnSinkGone();
return;
}
@@ -214,16 +219,30 @@ base::WeakPtr<RpcBroker> RendererController::GetRpcBroker() {
}
#endif
-void RendererController::StartDataPipe(
- std::unique_ptr<mojo::DataPipe> audio_data_pipe,
- std::unique_ptr<mojo::DataPipe> video_data_pipe,
- DataPipeStartCallback done_callback) {
+void RendererController::StartDataPipe(uint32_t data_pipe_capacity,
+ bool audio,
+ bool video,
+ DataPipeStartCallback done_callback) {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(!done_callback.is_null());
- bool audio = audio_data_pipe != nullptr;
- bool video = video_data_pipe != nullptr;
- if (!audio && !video) {
+ bool ok = audio || video;
+
+ mojo::ScopedDataPipeProducerHandle audio_producer_handle;
+ mojo::ScopedDataPipeConsumerHandle audio_consumer_handle;
+ if (ok && audio) {
+ ok &= mojo::CreateDataPipe(data_pipe_capacity, audio_producer_handle,
+ audio_consumer_handle) == MOJO_RESULT_OK;
+ }
+
+ mojo::ScopedDataPipeProducerHandle video_producer_handle;
+ mojo::ScopedDataPipeConsumerHandle video_consumer_handle;
+ if (ok && video) {
+ ok &= mojo::CreateDataPipe(data_pipe_capacity, video_producer_handle,
+ video_consumer_handle) == MOJO_RESULT_OK;
+ }
+
+ if (!ok) {
LOG(ERROR) << "No audio nor video to establish data pipe";
std::move(done_callback)
.Run(mojo::NullRemote(), mojo::NullRemote(),
@@ -231,23 +250,18 @@ void RendererController::StartDataPipe(
mojo::ScopedDataPipeProducerHandle());
return;
}
+
mojo::PendingRemote<mojom::RemotingDataStreamSender> audio_stream_sender;
mojo::PendingRemote<mojom::RemotingDataStreamSender> video_stream_sender;
remoter_->StartDataStreams(
- audio ? std::move(audio_data_pipe->consumer_handle)
- : mojo::ScopedDataPipeConsumerHandle(),
- video ? std::move(video_data_pipe->consumer_handle)
- : mojo::ScopedDataPipeConsumerHandle(),
+ std::move(audio_consumer_handle), std::move(video_consumer_handle),
audio ? audio_stream_sender.InitWithNewPipeAndPassReceiver()
: mojo::NullReceiver(),
video ? video_stream_sender.InitWithNewPipeAndPassReceiver()
: mojo::NullReceiver());
std::move(done_callback)
.Run(std::move(audio_stream_sender), std::move(video_stream_sender),
- audio ? std::move(audio_data_pipe->producer_handle)
- : mojo::ScopedDataPipeProducerHandle(),
- video ? std::move(video_data_pipe->producer_handle)
- : mojo::ScopedDataPipeProducerHandle());
+ std::move(audio_producer_handle), std::move(video_producer_handle));
}
void RendererController::OnMetadataChanged(const PipelineMetadata& metadata) {
@@ -327,41 +341,75 @@ void RendererController::UpdateRemotePlaybackAvailabilityMonitoringState() {
bool RendererController::IsVideoCodecSupported() const {
DCHECK(thread_checker_.CalledOnValidThread());
+ return GetVideoCompatibility() == RemotingCompatibility::kCompatible;
+}
+
+bool RendererController::IsAudioCodecSupported() const {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ return GetAudioCompatibility() == RemotingCompatibility::kCompatible;
+}
+
+void RendererController::OnPlaying() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ is_paused_ = false;
+ UpdateAndMaybeSwitch(PLAY_COMMAND, UNKNOWN_STOP_TRIGGER);
+}
+
+void RendererController::OnPaused() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ is_paused_ = true;
+ // Cancel the start if in the middle of delayed start.
+ CancelDelayedStart();
+}
+
+RemotingCompatibility RendererController::GetVideoCompatibility() const {
+ DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(has_video());
// Media Remoting doesn't support encrypted media.
if (pipeline_metadata_.video_decoder_config.is_encrypted())
- return false;
+ return RemotingCompatibility::kEncryptedVideo;
+ bool compatible = false;
switch (pipeline_metadata_.video_decoder_config.codec()) {
case VideoCodec::kCodecH264:
- return HasVideoCapability(mojom::RemotingSinkVideoCapability::CODEC_H264);
+ compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_H264);
+ break;
case VideoCodec::kCodecVP8:
- return HasVideoCapability(mojom::RemotingSinkVideoCapability::CODEC_VP8);
+ compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_VP8);
+ break;
case VideoCodec::kCodecVP9:
- return HasVideoCapability(mojom::RemotingSinkVideoCapability::CODEC_VP9);
+ compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_VP9);
+ break;
case VideoCodec::kCodecHEVC:
- return HasVideoCapability(mojom::RemotingSinkVideoCapability::CODEC_HEVC);
+ compatible = HasVideoCapability(RemotingSinkVideoCapability::CODEC_HEVC);
+ break;
default:
VLOG(2) << "Remoting does not support video codec: "
<< pipeline_metadata_.video_decoder_config.codec();
- return false;
}
+ return compatible ? RemotingCompatibility::kCompatible
+ : RemotingCompatibility::kIncompatibleVideoCodec;
}
-bool RendererController::IsAudioCodecSupported() const {
+RemotingCompatibility RendererController::GetAudioCompatibility() const {
DCHECK(thread_checker_.CalledOnValidThread());
DCHECK(has_audio());
// Media Remoting doesn't support encrypted media.
if (pipeline_metadata_.audio_decoder_config.is_encrypted())
- return false;
+ return RemotingCompatibility::kEncryptedAudio;
+ bool compatible = false;
switch (pipeline_metadata_.audio_decoder_config.codec()) {
case AudioCodec::kCodecAAC:
- return HasAudioCapability(mojom::RemotingSinkAudioCapability::CODEC_AAC);
+ compatible = HasAudioCapability(RemotingSinkAudioCapability::CODEC_AAC);
+ break;
case AudioCodec::kCodecOpus:
- return HasAudioCapability(mojom::RemotingSinkAudioCapability::CODEC_OPUS);
+ compatible = HasAudioCapability(RemotingSinkAudioCapability::CODEC_OPUS);
+ break;
case AudioCodec::kCodecMP3:
case AudioCodec::kCodecPCM:
case AudioCodec::kCodecVorbis:
@@ -376,49 +424,43 @@ bool RendererController::IsAudioCodecSupported() const {
case AudioCodec::kCodecPCM_ALAW:
case AudioCodec::kCodecALAC:
case AudioCodec::kCodecAC3:
- return HasAudioCapability(
- mojom::RemotingSinkAudioCapability::CODEC_BASELINE_SET);
+ compatible =
+ HasAudioCapability(RemotingSinkAudioCapability::CODEC_BASELINE_SET);
+ break;
default:
VLOG(2) << "Remoting does not support audio codec: "
<< pipeline_metadata_.audio_decoder_config.codec();
- return false;
}
+ return compatible ? RemotingCompatibility::kCompatible
+ : RemotingCompatibility::kIncompatibleAudioCodec;
}
-void RendererController::OnPlaying() {
+RemotingCompatibility RendererController::GetCompatibility() const {
DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(client_);
- is_paused_ = false;
- UpdateAndMaybeSwitch(PLAY_COMMAND, UNKNOWN_STOP_TRIGGER);
-}
-
-void RendererController::OnPaused() {
- DCHECK(thread_checker_.CalledOnValidThread());
-
- is_paused_ = true;
- // Cancel the start if in the middle of delayed start.
- CancelDelayedStart();
-}
-
-bool RendererController::CanBeRemoting() const {
- DCHECK(thread_checker_.CalledOnValidThread());
+ if (is_remote_playback_disabled_)
+ return RemotingCompatibility::kDisabledByPage;
- if (!client_)
- return false; // No way to switch to the remoting renderer.
+ if (!has_video() && !has_audio())
+ return RemotingCompatibility::kNoAudioNorVideo;
- if (permanently_disable_remoting_)
- return false;
-
- if (!IsAudioOrVideoSupported())
- return false;
+ if (has_video()) {
+ RemotingCompatibility compatibility = GetVideoCompatibility();
+ if (compatibility != RemotingCompatibility::kCompatible)
+ return compatibility;
+ }
- if (is_remote_playback_disabled_)
- return false;
+ if (has_audio()) {
+ RemotingCompatibility compatibility = GetAudioCompatibility();
+ if (compatibility != RemotingCompatibility::kCompatible)
+ return compatibility;
+ }
if (client_->Duration() <= kMinRemotingMediaDurationInSec)
- return false;
+ return RemotingCompatibility::kDurationBelowThreshold;
- return true;
+ return RemotingCompatibility::kCompatible;
}
bool RendererController::IsAudioOrVideoSupported() const {
@@ -431,26 +473,26 @@ void RendererController::UpdateAndMaybeSwitch(StartTrigger start_trigger,
StopTrigger stop_trigger) {
DCHECK(thread_checker_.CalledOnValidThread());
- bool should_be_remoting = CanBeRemoting();
-
// Being the dominant visible content is the signal that starts remote
// rendering.
- should_be_remoting &=
- (is_dominant_content_ && !encountered_renderer_fatal_error_);
+ // Also, only switch to remoting when media is playing. Since the renderer is
+ // created when video starts loading, the receiver would display a black
+ // screen if switching to remoting while paused. Thus, the user experience is
+ // improved by not starting remoting until playback resumes.
+ bool should_be_remoting = client_ && !encountered_renderer_fatal_error_ &&
+ is_dominant_content_ && !is_paused_ &&
+ SinkSupportsRemoting();
+ if (should_be_remoting) {
+ const RemotingCompatibility compatibility = GetCompatibility();
+ metrics_recorder_.RecordCompatibility(compatibility);
+ should_be_remoting = compatibility == RemotingCompatibility::kCompatible;
+ }
if ((remote_rendering_started_ ||
delayed_start_stability_timer_.IsRunning()) == should_be_remoting) {
return;
}
- // Only switch to remoting when media is playing. Since the renderer is
- // created when video starts loading/playing, receiver will display a black
- // screen before video starts playing if switching to remoting when paused.
- // Thus, the user experience is improved by not starting remoting until
- // playback resumes.
- if (should_be_remoting && is_paused_)
- return;
-
if (should_be_remoting) {
WaitForStabilityBeforeStart(start_trigger);
} else if (delayed_start_stability_timer_.IsRunning()) {
@@ -498,13 +540,10 @@ void RendererController::OnDelayedStartTimerFired(
const double frame_rate =
(client_->DecodedFrameCount() - decoded_frame_count_before_delay) /
elapsed.InSecondsF();
- const double pixel_per_sec =
+ const double pixels_per_second =
frame_rate * pipeline_metadata_.natural_size.GetArea();
- if ((pixel_per_sec > kPixelPerSec4K) ||
- ((pixel_per_sec > kPixelPerSec2K) &&
- !HasVideoCapability(mojom::RemotingSinkVideoCapability::SUPPORT_4K))) {
- VLOG(1) << "Media remoting is not supported: frame_rate = " << frame_rate
- << " resolution = " << pipeline_metadata_.natural_size.ToString();
+ const bool supported = RecordPixelRateSupport(pixels_per_second);
+ if (!supported) {
permanently_disable_remoting_ = true;
return;
}
@@ -518,6 +557,28 @@ void RendererController::OnDelayedStartTimerFired(
remoter_->Start();
}
+bool RendererController::RecordPixelRateSupport(double pixels_per_second) {
+ if (pixels_per_second <= kPixelsPerSec2k) {
+ metrics_recorder_.RecordVideoPixelRateSupport(
+ PixelRateSupport::k2kSupported);
+ return true;
+ }
+ if (pixels_per_second <= kPixelsPerSec4k) {
+ if (HasVideoCapability(mojom::RemotingSinkVideoCapability::SUPPORT_4K)) {
+ metrics_recorder_.RecordVideoPixelRateSupport(
+ PixelRateSupport::k4kSupported);
+ return true;
+ } else {
+ metrics_recorder_.RecordVideoPixelRateSupport(
+ PixelRateSupport::k4kNotSupported);
+ return false;
+ }
+ }
+ metrics_recorder_.RecordVideoPixelRateSupport(
+ PixelRateSupport::kOver4kNotSupported);
+ return false;
+}
+
void RendererController::OnRendererFatalError(StopTrigger stop_trigger) {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -560,12 +621,16 @@ bool RendererController::HasAudioCapability(
}
bool RendererController::HasFeatureCapability(
- mojom::RemotingSinkFeature capability) const {
+ RemotingSinkFeature capability) const {
return std::find(std::begin(sink_metadata_.features),
std::end(sink_metadata_.features),
capability) != std::end(sink_metadata_.features);
}
+bool RendererController::SinkSupportsRemoting() const {
+ return HasFeatureCapability(RemotingSinkFeature::RENDERING);
+}
+
void RendererController::SendMessageToSink(
std::unique_ptr<std::vector<uint8_t>> message) {
DCHECK(thread_checker_.CalledOnValidThread());
diff --git a/chromium/media/remoting/renderer_controller.h b/chromium/media/remoting/renderer_controller.h
index 8c22bbf09eb..4bcc0e1c1b0 100644
--- a/chromium/media/remoting/renderer_controller.h
+++ b/chromium/media/remoting/renderer_controller.h
@@ -80,8 +80,13 @@ class RendererController final : public mojom::RemotingSource,
mojo::PendingRemote<mojom::RemotingDataStreamSender> video,
mojo::ScopedDataPipeProducerHandle audio_handle,
mojo::ScopedDataPipeProducerHandle video_handle)>;
- void StartDataPipe(std::unique_ptr<mojo::DataPipe> audio_data_pipe,
- std::unique_ptr<mojo::DataPipe> video_data_pipe,
+ // Creates up to two data pipes with a byte capacity of |data_pipe_capacity|:
+ // one for audio if |audio| is true and one for |video| if video is true. The
+ // controller then starts processing the consumer ends of the data pipes,
+ // with the producer ends supplied to the |done_callback|.
+ void StartDataPipe(uint32_t data_pipe_capacity,
+ bool audio,
+ bool video,
DataPipeStartCallback done_callback);
#if BUILDFLAG(ENABLE_MEDIA_REMOTING_RPC)
@@ -117,11 +122,14 @@ class RendererController final : public mojom::RemotingSource,
bool IsAudioCodecSupported() const;
bool IsAudioOrVideoSupported() const;
- // Returns true if all of the technical requirements for the media pipeline
- // and remote rendering are being met. This does not include environmental
- // conditions, such as the content being dominant in the viewport, available
- // network bandwidth, etc.
- bool CanBeRemoting() const;
+ // Returns |kCompatible| if all of the technical requirements for the media
+ // pipeline and remote rendering are being met, and the first detected
+ // reason if incompatible. This does not include environmental conditions,
+ // such as the content being dominant in the viewport, available network
+ // bandwidth, etc.
+ RemotingCompatibility GetVideoCompatibility() const;
+ RemotingCompatibility GetAudioCompatibility() const;
+ RemotingCompatibility GetCompatibility() const;
// Determines whether to enter or leave Remoting mode and switches if
// necessary. Each call to this method could cause a remoting session to be
@@ -146,10 +154,15 @@ class RendererController final : public mojom::RemotingSource,
unsigned decoded_frame_count_before_delay,
base::TimeTicks delayed_start_time);
+ // Records in a histogram and returns whether the receiver supports the given
+ // pixel rate.
+ bool RecordPixelRateSupport(double pixels_per_second);
+
// Queries on remoting sink capabilities.
bool HasVideoCapability(mojom::RemotingSinkVideoCapability capability) const;
bool HasAudioCapability(mojom::RemotingSinkAudioCapability capability) const;
bool HasFeatureCapability(mojom::RemotingSinkFeature capability) const;
+ bool SinkSupportsRemoting() const;
// Callback from RpcBroker when sending message to remote sink.
void SendMessageToSink(std::unique_ptr<std::vector<uint8_t>> message);
diff --git a/chromium/media/remoting/stream_provider.cc b/chromium/media/remoting/stream_provider.cc
index 72aeb9b7526..476d89e8ebb 100644
--- a/chromium/media/remoting/stream_provider.cc
+++ b/chromium/media/remoting/stream_provider.cc
@@ -6,6 +6,7 @@
#include <vector>
#include "base/bind.h"
+#include "base/bind_post_task.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
@@ -62,9 +63,9 @@ StreamProvider::MediaStream::MediaStream(
media_weak_this_ = media_weak_factory_.GetWeakPtr();
- const RpcBroker::ReceiveMessageCallback receive_callback =
- BindToLoop(media_task_runner_,
- BindRepeating(&MediaStream::OnReceivedRpc, media_weak_this_));
+ const RpcBroker::ReceiveMessageCallback receive_callback = base::BindPostTask(
+ media_task_runner_,
+ BindRepeating(&MediaStream::OnReceivedRpc, media_weak_this_));
rpc_broker_->RegisterMessageReceiverCallback(rpc_handle_, receive_callback);
}
@@ -424,7 +425,7 @@ StreamProvider::StreamProvider(
media_weak_this_ = media_weak_factory_.GetWeakPtr();
- auto callback = BindToLoop(
+ auto callback = base::BindPostTask(
media_task_runner_,
base::BindRepeating(&StreamProvider::OnReceivedRpc, media_weak_this_));
rpc_broker_->RegisterMessageReceiverCallback(RpcBroker::kAcquireDemuxerHandle,
diff --git a/chromium/media/renderers/BUILD.gn b/chromium/media/renderers/BUILD.gn
index bbbc19ade69..42981ccb8b3 100644
--- a/chromium/media/renderers/BUILD.gn
+++ b/chromium/media/renderers/BUILD.gn
@@ -2,8 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import("//third_party/libaom/options.gni")
-
source_set("renderers") {
# Do not expand the visibility here without double-checking with OWNERS, this
# is a roll-up target which is part of the //media component. Most other DEPs
@@ -50,7 +48,6 @@ source_set("renderers") {
"//media/base",
"//media/filters",
"//media/video",
- "//third_party/libaom:libaom_buildflags",
"//third_party/libyuv",
"//ui/gfx:geometry_skia",
"//ui/gfx:memory_buffer",
@@ -102,6 +99,27 @@ source_set("renderers") {
]
}
+# Note: This is a roll-up only target; do not expand the visibility. DEPS should
+# depend on the //media:test_support target instead.
+static_library("test_support") {
+ visibility = [ "//media:test_support" ]
+ testonly = true
+ sources = [
+ "shared_image_video_frame_test_utils.cc",
+ "shared_image_video_frame_test_utils.h",
+ ]
+ configs += [ "//media:media_config" ]
+ deps = [
+ "//base",
+ "//components/viz/common",
+ "//gpu:test_support",
+ "//gpu/command_buffer/client:gles2_interface",
+ "//gpu/command_buffer/common",
+ "//media/base:test_support",
+ "//ui/gfx",
+ ]
+}
+
source_set("unit_tests") {
testonly = true
sources = [
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index bf96c3f98d8..44f2e48a45b 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -727,6 +727,7 @@ void AudioRendererImpl::OnWaiting(WaitingReason reason) {
void AudioRendererImpl::SetVolume(float volume) {
DCHECK(task_runner_->BelongsToCurrentThread());
+ was_unmuted_ = was_unmuted_ || volume != 0;
if (state_ == kUninitialized || state_ == kInitializing) {
volume_ = volume;
return;
@@ -795,6 +796,12 @@ void AudioRendererImpl::SetPreservesPitch(bool preserves_pitch) {
algorithm_->SetPreservesPitch(preserves_pitch);
}
+void AudioRendererImpl::SetAutoplayInitiated(bool autoplay_initiated) {
+ base::AutoLock auto_lock(lock_);
+
+ autoplay_initiated_ = autoplay_initiated;
+}
+
void AudioRendererImpl::OnSuspend() {
base::AutoLock auto_lock(lock_);
is_suspending_ = true;
@@ -810,9 +817,9 @@ void AudioRendererImpl::SetPlayDelayCBForTesting(PlayDelayCBForTesting cb) {
play_delay_cb_for_testing_ = std::move(cb);
}
-void AudioRendererImpl::DecodedAudioReady(AudioDecoderStream::ReadStatus status,
- scoped_refptr<AudioBuffer> buffer) {
- DVLOG(2) << __func__ << "(" << status << ")";
+void AudioRendererImpl::DecodedAudioReady(
+ AudioDecoderStream::ReadResult result) {
+ DVLOG(2) << __func__ << "(" << result.code() << ")";
DCHECK(task_runner_->BelongsToCurrentThread());
base::AutoLock auto_lock(lock_);
@@ -821,18 +828,14 @@ void AudioRendererImpl::DecodedAudioReady(AudioDecoderStream::ReadStatus status,
CHECK(pending_read_);
pending_read_ = false;
- if (status == AudioDecoderStream::ABORTED ||
- status == AudioDecoderStream::DEMUXER_READ_ABORTED) {
- HandleAbortedReadOrDecodeError(PIPELINE_OK);
+ if (result.has_error()) {
+ HandleAbortedReadOrDecodeError(result.code() == StatusCode::kAborted
+ ? PIPELINE_OK
+ : PIPELINE_ERROR_DECODE);
return;
}
- if (status == AudioDecoderStream::DECODE_ERROR) {
- HandleAbortedReadOrDecodeError(PIPELINE_ERROR_DECODE);
- return;
- }
-
- DCHECK_EQ(status, AudioDecoderStream::OK);
+ scoped_refptr<AudioBuffer> buffer = std::move(result).value();
DCHECK(buffer);
if (state_ == kFlushing) {
@@ -959,8 +962,11 @@ bool AudioRendererImpl::HandleDecodedBuffer_Locked(
first_packet_timestamp_ = buffer->timestamp();
#if !defined(OS_ANDROID)
- if (transcribe_audio_callback_ && volume_ > 0)
+ // Do not transcribe muted streams initiated by autoplay if the stream was
+ // never unmuted.
+ if (transcribe_audio_callback_ && !(autoplay_initiated_ && !was_unmuted_)) {
transcribe_audio_callback_.Run(buffer);
+ }
#endif
if (state_ != kUninitialized)
diff --git a/chromium/media/renderers/audio_renderer_impl.h b/chromium/media/renderers/audio_renderer_impl.h
index 4a4a891554f..1d3202c2e8d 100644
--- a/chromium/media/renderers/audio_renderer_impl.h
+++ b/chromium/media/renderers/audio_renderer_impl.h
@@ -101,6 +101,7 @@ class MEDIA_EXPORT AudioRendererImpl
void SetVolume(float volume) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
void SetPreservesPitch(bool preserves_pitch) override;
+ void SetAutoplayInitiated(bool autoplay_initiated) override;
// base::PowerObserver implementation.
void OnSuspend() override;
@@ -138,8 +139,7 @@ class MEDIA_EXPORT AudioRendererImpl
OutputDeviceInfo output_device_info);
// Callback from the audio decoder delivering decoded audio samples.
- void DecodedAudioReady(AudioDecoderStream::ReadStatus status,
- scoped_refptr<AudioBuffer> buffer);
+ void DecodedAudioReady(AudioDecoderStream::ReadResult result);
// Handles buffers that come out of decoder (MSE: after passing through
// |buffer_converter_|).
@@ -297,6 +297,9 @@ class MEDIA_EXPORT AudioRendererImpl
// Cached volume provided by SetVolume().
float volume_;
+ // A flag indicating whether the audio stream was ever unmuted.
+ bool was_unmuted_ = false;
+
// After Initialize() has completed, all variables below must be accessed
// under |lock_|. ------------------------------------------------------------
base::Lock lock_;
@@ -313,6 +316,8 @@ class MEDIA_EXPORT AudioRendererImpl
// make pitch adjustments at playbacks other than 1.0.
bool preserves_pitch_ = true;
+ bool autoplay_initiated_ = false;
+
// Simple state tracking variable.
State state_;
diff --git a/chromium/media/renderers/audio_renderer_impl_unittest.cc b/chromium/media/renderers/audio_renderer_impl_unittest.cc
index 9b002c71159..7f25d00afee 100644
--- a/chromium/media/renderers/audio_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/audio_renderer_impl_unittest.cc
@@ -28,12 +28,14 @@
#include "media/base/mock_audio_renderer_sink.h"
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
+#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using ::base::TimeDelta;
using ::base::test::RunCallback;
using ::base::test::RunOnceCallback;
using ::testing::_;
+using ::testing::DoAll;
using ::testing::Return;
using ::testing::SaveArg;
diff --git a/chromium/media/renderers/decrypting_renderer.cc b/chromium/media/renderers/decrypting_renderer.cc
index eb0ca87f077..346a774f33e 100644
--- a/chromium/media/renderers/decrypting_renderer.cc
+++ b/chromium/media/renderers/decrypting_renderer.cc
@@ -115,6 +115,10 @@ void DecryptingRenderer::SetPreservesPitch(bool preserves_pitch) {
renderer_->SetPreservesPitch(preserves_pitch);
}
+void DecryptingRenderer::SetAutoplayInitiated(bool autoplay_initiated) {
+ renderer_->SetAutoplayInitiated(autoplay_initiated);
+}
+
void DecryptingRenderer::Flush(base::OnceClosure flush_cb) {
renderer_->Flush(std::move(flush_cb));
}
diff --git a/chromium/media/renderers/decrypting_renderer.h b/chromium/media/renderers/decrypting_renderer.h
index 84b9747b805..a51d2482bf3 100644
--- a/chromium/media/renderers/decrypting_renderer.h
+++ b/chromium/media/renderers/decrypting_renderer.h
@@ -47,6 +47,7 @@ class MEDIA_EXPORT DecryptingRenderer : public Renderer {
void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) override;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) override;
void SetPreservesPitch(bool preserves_pitch) override;
+ void SetAutoplayInitiated(bool autoplay_initiated) override;
void Flush(base::OnceClosure flush_cb) override;
void StartPlayingFrom(base::TimeDelta time) override;
diff --git a/chromium/media/renderers/default_decoder_factory.cc b/chromium/media/renderers/default_decoder_factory.cc
index 6e96a691b34..41cce189495 100644
--- a/chromium/media/renderers/default_decoder_factory.cc
+++ b/chromium/media/renderers/default_decoder_factory.cc
@@ -16,7 +16,6 @@
#include "media/base/media_switches.h"
#include "media/media_buildflags.h"
#include "media/video/gpu_video_accelerator_factories.h"
-#include "third_party/libaom/libaom_buildflags.h"
#if !defined(OS_ANDROID)
#include "media/filters/decrypting_audio_decoder.h"
@@ -86,6 +85,72 @@ void DefaultDecoderFactory::CreateAudioDecoders(
}
}
+SupportedVideoDecoderConfigs
+DefaultDecoderFactory::GetSupportedVideoDecoderConfigsForWebRTC() {
+ SupportedVideoDecoderConfigs supported_configs;
+
+ {
+ base::AutoLock auto_lock(shutdown_lock_);
+ if (external_decoder_factory_) {
+ SupportedVideoDecoderConfigs external_supported_configs =
+ external_decoder_factory_->GetSupportedVideoDecoderConfigsForWebRTC();
+ supported_configs.insert(supported_configs.end(),
+ external_supported_configs.begin(),
+ external_supported_configs.end());
+ }
+ }
+
+#if defined(OS_FUCHSIA)
+ // TODO(crbug.com/1173503): Implement capabilities for fuchsia.
+ if (base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kDisableSoftwareVideoDecoders)) {
+ // Bypass software codec registration.
+ return supported_configs;
+ }
+#endif
+
+ if (!base::FeatureList::IsEnabled(media::kExposeSwDecodersToWebRTC))
+ return supported_configs;
+
+#if BUILDFLAG(ENABLE_LIBVPX)
+ SupportedVideoDecoderConfigs vpx_configs =
+ VpxVideoDecoder::SupportedConfigs();
+
+ for (auto& config : vpx_configs) {
+ if (config.profile_min >= VP9PROFILE_MIN &&
+ config.profile_max <= VP9PROFILE_MAX) {
+ supported_configs.emplace_back(config);
+ }
+ }
+#endif
+
+#if BUILDFLAG(ENABLE_LIBGAV1_DECODER)
+ if (base::FeatureList::IsEnabled(kGav1VideoDecoder)) {
+ SupportedVideoDecoderConfigs gav1_configs =
+ Gav1VideoDecoder::SupportedConfigs();
+ supported_configs.insert(supported_configs.end(), gav1_configs.begin(),
+ gav1_configs.end());
+ } else
+#endif
+ {
+#if BUILDFLAG(ENABLE_DAV1D_DECODER)
+ SupportedVideoDecoderConfigs dav1d_configs =
+ Dav1dVideoDecoder::SupportedConfigs();
+ supported_configs.insert(supported_configs.end(), dav1d_configs.begin(),
+ dav1d_configs.end());
+#endif
+ }
+
+#if BUILDFLAG(ENABLE_FFMPEG_VIDEO_DECODERS)
+ SupportedVideoDecoderConfigs ffmpeg_configs =
+ FFmpegVideoDecoder::SupportedConfigsForWebRTC();
+ supported_configs.insert(supported_configs.end(), ffmpeg_configs.begin(),
+ ffmpeg_configs.end());
+#endif
+
+ return supported_configs;
+}
+
void DefaultDecoderFactory::CreateVideoDecoders(
scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
diff --git a/chromium/media/renderers/default_decoder_factory.h b/chromium/media/renderers/default_decoder_factory.h
index 966a81dbfbc..9283bd9cccc 100644
--- a/chromium/media/renderers/default_decoder_factory.h
+++ b/chromium/media/renderers/default_decoder_factory.h
@@ -26,6 +26,8 @@ class MEDIA_EXPORT DefaultDecoderFactory final : public DecoderFactory {
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) final;
+ SupportedVideoDecoderConfigs GetSupportedVideoDecoderConfigsForWebRTC() final;
+
void CreateVideoDecoders(
scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index 21133386d89..a1923e2d963 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -49,6 +49,7 @@
// shown here to indicate where ideal conversions are currently missing.
#if SK_B32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_R32_SHIFT == 16 && \
SK_A32_SHIFT == 24
+#define OUTPUT_ARGB 1
#define LIBYUV_I400_TO_ARGB libyuv::I400ToARGB
#define LIBYUV_I420_TO_ARGB libyuv::I420ToARGB
#define LIBYUV_I422_TO_ARGB libyuv::I422ToARGB
@@ -86,8 +87,11 @@
// #define LIBYUV_U410_TO_ARGB libyuv::U410ToARGB
#define LIBYUV_NV12_TO_ARGB libyuv::NV12ToARGB
+
+#define LIBYUV_ABGR_TO_ARGB libyuv::ABGRToARGB
#elif SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \
SK_A32_SHIFT == 24
+#define OUTPUT_ARGB 0
#define LIBYUV_I400_TO_ARGB libyuv::I400ToARGB
#define LIBYUV_I420_TO_ARGB libyuv::I420ToABGR
#define LIBYUV_I422_TO_ARGB libyuv::I422ToABGR
@@ -125,6 +129,8 @@
// #define LIBYUV_U410_TO_ARGB libyuv::U410ToABGR
#define LIBYUV_NV12_TO_ARGB libyuv::NV12ToABGR
+
+#define LIBYUV_ABGR_TO_ARGB libyuv::ARGBToABGR
#else
#error Unexpected Skia ARGB_8888 layout!
#endif
@@ -243,7 +249,6 @@ sk_sp<SkImage> WrapGLTexture(
GLenum target,
GLuint texture_id,
const gfx::Size& size,
- const gfx::ColorSpace& color_space,
viz::RasterContextProvider* raster_context_provider) {
GrGLTextureInfo texture_info;
texture_info.fID = texture_id;
@@ -254,10 +259,9 @@ sk_sp<SkImage> WrapGLTexture(
texture_info.fFormat = GL_RGBA8_OES;
GrBackendTexture backend_texture(size.width(), size.height(),
GrMipMapped::kNo, texture_info);
- return SkImage::MakeFromTexture(
+ return SkImage::MakeFromAdoptedTexture(
raster_context_provider->GrContext(), backend_texture,
- kTopLeft_GrSurfaceOrigin, kRGBA_8888_SkColorType, kPremul_SkAlphaType,
- color_space.ToSkColorSpace(), nullptr, nullptr);
+ kTopLeft_GrSurfaceOrigin, kRGBA_8888_SkColorType, kPremul_SkAlphaType);
}
void VideoFrameCopyTextureOrSubTexture(gpu::gles2::GLES2Interface* gl,
@@ -314,7 +318,7 @@ void SynchronizeVideoFrameRead(scoped_refptr<VideoFrame> video_frame,
WaitAndReplaceSyncTokenClient client(ri);
video_frame->UpdateReleaseSyncToken(&client);
- if (video_frame->metadata()->read_lock_fences_enabled) {
+ if (video_frame->metadata().read_lock_fences_enabled) {
// |video_frame| must be kept alive during read operations.
DCHECK(context_support);
unsigned query_id = 0;
@@ -386,6 +390,29 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
uint8_t* pixels = static_cast<uint8_t*>(rgb_pixels) +
row_bytes * chunk_start * rows_per_chunk;
+ if (format == PIXEL_FORMAT_ARGB || format == PIXEL_FORMAT_XRGB ||
+ format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_XBGR) {
+ DCHECK_LE(width, static_cast<int>(row_bytes));
+ const uint8_t* data = plane_meta[VideoFrame::kARGBPlane].data;
+
+ if ((OUTPUT_ARGB &&
+ (format == PIXEL_FORMAT_ARGB || format == PIXEL_FORMAT_XRGB)) ||
+ (!OUTPUT_ARGB &&
+ (format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_XBGR))) {
+ for (size_t i = 0; i < rows; i++) {
+ memcpy(pixels, data, width * 4);
+ pixels += row_bytes;
+ data += plane_meta[VideoFrame::kARGBPlane].stride;
+ }
+ } else {
+ LIBYUV_ABGR_TO_ARGB(plane_meta[VideoFrame::kARGBPlane].data,
+ plane_meta[VideoFrame::kARGBPlane].stride, pixels,
+ row_bytes, width, rows);
+ }
+ done->Run();
+ return;
+ }
+
// TODO(crbug.com/828599): This should default to BT.709 color space.
SkYUVColorSpace color_space = kRec601_SkYUVColorSpace;
video_frame->ColorSpace().ToSkYUVColorSpace(&color_space);
@@ -611,6 +638,7 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
case PIXEL_FORMAT_P016LE:
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
+ case PIXEL_FORMAT_RGBAF16:
case PIXEL_FORMAT_UNKNOWN:
NOTREACHED() << "Only YUV formats and Y16 are supported, got: "
<< media::VideoPixelFormatToString(format);
@@ -638,31 +666,24 @@ bool ValidFormatForDirectUploading(GrGLenum format, unsigned int type) {
}
}
-bool VideoPixelFormatAsSkYUVAInfoPlanarConfig(
- VideoPixelFormat format,
- SkYUVAInfo::PlanarConfig* config) {
- // TODO(skbug.com/10632): Add more formats, e.g. I420A, NV12, NV21 when Skia
- // equivalents are added.
+std::tuple<SkYUVAInfo::PlaneConfig, SkYUVAInfo::Subsampling>
+VideoPixelFormatAsSkYUVAInfoValues(VideoPixelFormat format) {
// The 9, 10, and 12 bit formats could be added here if GetYUVAPlanes() were
- // updated to convert data to unorm16/float16.
+ // updated to convert data to unorm16/float16. Similarly, alpha planes and
+ // formats with interleaved planes (e.g. NV12) could be supported if that
+ // function were updated to not assume 3 separate Y, U, and V planes. Also,
+ // GpuImageDecodeCache would need be able to handle plane configurations
+ // other than 3 separate y, u, and v planes (crbug.com/910276).
switch (format) {
case PIXEL_FORMAT_I420:
- if (config) {
- *config = SkYUVAInfo::PlanarConfig::kY_U_V_420;
- }
- return true;
+ return {SkYUVAInfo::PlaneConfig::kY_U_V, SkYUVAInfo::Subsampling::k420};
case PIXEL_FORMAT_I422:
- if (config) {
- *config = SkYUVAInfo::PlanarConfig::kY_U_V_422;
- }
- return true;
+ return {SkYUVAInfo::PlaneConfig::kY_U_V, SkYUVAInfo::Subsampling::k422};
case PIXEL_FORMAT_I444:
- if (config) {
- *config = SkYUVAInfo::PlanarConfig::kY_U_V_444;
- }
- return true;
+ return {SkYUVAInfo::PlaneConfig::kY_U_V, SkYUVAInfo::Subsampling::k444};
default:
- return false;
+ return {SkYUVAInfo::PlaneConfig::kUnknown,
+ SkYUVAInfo::Subsampling::kUnknown};
}
}
@@ -704,9 +725,11 @@ class VideoImageGenerator : public cc::PaintImageGenerator {
// is added for VideoImageGenerator.
return false;
#if 0
- SkYUVAInfo::PlanarConfig planar_config;
- if (!VideoPixelFormatAsSkYUVAInfoPlanarConfig(frame_->format(),
- &planar_config)) {
+ SkYUVAInfo::PlaneConfig plane_config;
+ SkYUVAInfo::Subsampling subsampling;
+ std::tie(plane_config, subsampling) =
+ VideoPixelFormatAsSkYUVAInfoValues(frame_->format());
+ if (plane_config == SkYUVAInfo::PlaneConfig::kUnknown) {
return false;
}
if (info) {
@@ -719,11 +742,12 @@ class VideoImageGenerator : public cc::PaintImageGenerator {
// We use the Y plane size because it may get rounded up to an even size.
// Our implementation of GetYUVAPlanes expects this.
gfx::Size y_size =
- VideoFrame::PlaneSize(frame_->format(), VideoFrame::kYPlane,
- gfx::Size(frame_->visible_rect().width(),
- frame_->visible_rect().height()));
- SkYUVAInfo yuva_info = SkYUVAInfo({y_size.width(), y_size.height()},
- planar_config, yuv_color_space);
+ VideoFrame::PlaneSizeInSamples(frame_->format(), VideoFrame::kYPlane,
+ gfx::Size(frame_->visible_rect().width(),
+ frame_->visible_rect().height()));
+ SkYUVAInfo yuva_info =
+ SkYUVAInfo({y_size.width(), y_size.height()}, plane_config,
+ subsampling, yuv_color_space);
*info = SkYUVAPixmapInfo(yuva_info, SkYUVAPixmapInfo::DataType::kUnorm8,
/* row bytes */ nullptr);
}
@@ -735,21 +759,23 @@ class VideoImageGenerator : public cc::PaintImageGenerator {
size_t frame_index,
uint32_t lazy_pixel_ref) override {
DCHECK_EQ(frame_index, 0u);
-
- if (!VideoPixelFormatAsSkYUVAInfoPlanarConfig(frame_->format(), nullptr)) {
- return false;
- }
-
- if (!pixmaps.plane(3).dimensions().isEmpty()) {
- return false;
+ DCHECK_EQ(pixmaps.numPlanes(), 3);
+
+ if (DCHECK_IS_ON()) {
+ SkYUVAInfo::PlaneConfig plane_config;
+ SkYUVAInfo::Subsampling subsampling;
+ std::tie(plane_config, subsampling) =
+ VideoPixelFormatAsSkYUVAInfoValues(frame_->format());
+ DCHECK_EQ(plane_config, pixmaps.yuvaInfo().planeConfig());
+ DCHECK_EQ(subsampling, pixmaps.yuvaInfo().subsampling());
}
for (int plane = VideoFrame::kYPlane; plane <= VideoFrame::kVPlane;
++plane) {
const gfx::Size size =
- VideoFrame::PlaneSize(frame_->format(), plane,
- gfx::Size(frame_->visible_rect().width(),
- frame_->visible_rect().height()));
+ VideoFrame::PlaneSizeInSamples(frame_->format(), plane,
+ gfx::Size(frame_->visible_rect().width(),
+ frame_->visible_rect().height()));
if (size.width() != pixmaps.plane(plane).width() ||
size.height() != pixmaps.plane(plane).height()) {
return false;
@@ -789,28 +815,67 @@ class VideoImageGenerator : public cc::PaintImageGenerator {
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoImageGenerator);
};
-// TODO(jochin): Add support for all OOP-R specific APIs (eg. GetMailbox() and
-// GetSkImageViaReadback())
class VideoTextureBacking : public cc::TextureBacking {
public:
explicit VideoTextureBacking(
sk_sp<SkImage> sk_image,
+ const gpu::Mailbox& mailbox,
+ bool wraps_video_frame_texture,
scoped_refptr<viz::RasterContextProvider> raster_context_provider)
- : sk_image_(std::move(sk_image)) {
+ : sk_image_(std::move(sk_image)),
+ sk_image_info_(sk_image_->imageInfo()),
+ mailbox_(mailbox),
+ wraps_video_frame_texture_(wraps_video_frame_texture) {
raster_context_provider_ = std::move(raster_context_provider);
}
- const SkImageInfo& GetSkImageInfo() override {
- return sk_image_->imageInfo();
+ explicit VideoTextureBacking(
+ const gpu::Mailbox& mailbox,
+ const SkImageInfo& info,
+ bool wraps_video_frame_texture,
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider)
+ : sk_image_info_(info),
+ mailbox_(mailbox),
+ wraps_video_frame_texture_(wraps_video_frame_texture) {
+ raster_context_provider_ = std::move(raster_context_provider);
+ }
+
+ ~VideoTextureBacking() override {
+ auto* ri = raster_context_provider_->RasterInterface();
+ if (!wraps_video_frame_texture_) {
+ gpu::SyncToken sync_token;
+ ri->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
+ auto* sii = raster_context_provider_->SharedImageInterface();
+ sii->DestroySharedImage(sync_token, mailbox_);
+ }
}
+
+ const SkImageInfo& GetSkImageInfo() override { return sk_image_info_; }
gpu::Mailbox GetMailbox() const override { return mailbox_; }
sk_sp<SkImage> GetAcceleratedSkImage() override { return sk_image_; }
+ bool wraps_video_frame_texture() const { return wraps_video_frame_texture_; }
+ const scoped_refptr<viz::RasterContextProvider>& raster_context_provider()
+ const {
+ return raster_context_provider_;
+ }
+
sk_sp<SkImage> GetSkImageViaReadback() override {
- if (sk_image_) {
+ if (sk_image_)
return sk_image_->makeNonTextureImage();
- }
- return nullptr;
+
+ sk_sp<SkData> image_pixels =
+ SkData::MakeUninitialized(sk_image_info_.computeMinByteSize());
+ uint8_t* writable_pixels =
+ static_cast<uint8_t*>(image_pixels->writable_data());
+ gpu::raster::RasterInterface* ri =
+ raster_context_provider_->RasterInterface();
+ ri->ReadbackImagePixels(mailbox_, sk_image_info_,
+ sk_image_info_.minRowBytes(), 0, 0,
+ writable_pixels);
+ return SkImage::MakeRasterData(sk_image_info_, std::move(image_pixels),
+ sk_image_info_.minRowBytes());
}
+
bool readPixels(const SkImageInfo& dst_info,
void* dst_pixels,
size_t dst_row_bytes,
@@ -820,8 +885,13 @@ class VideoTextureBacking : public cc::TextureBacking {
return sk_image_->readPixels(dst_info, dst_pixels, dst_row_bytes, src_x,
src_y);
}
- return false;
+ gpu::raster::RasterInterface* ri =
+ raster_context_provider_->RasterInterface();
+ ri->ReadbackImagePixels(mailbox_, dst_info, dst_info.minRowBytes(), src_x,
+ src_y, dst_pixels);
+ return true;
}
+
void FlushPendingSkiaOps() override {
if (!raster_context_provider_ || !sk_image_)
return;
@@ -829,9 +899,19 @@ class VideoTextureBacking : public cc::TextureBacking {
}
private:
- const sk_sp<SkImage> sk_image_;
- const gpu::Mailbox mailbox_;
+ sk_sp<SkImage> sk_image_;
+ SkImageInfo sk_image_info_;
scoped_refptr<viz::RasterContextProvider> raster_context_provider_;
+
+ // This can be either the source VideoFrame's texture (if
+ // |wraps_video_frame_texture_| is true) or a newly allocated shared image
+ // (if |wraps_video_frame_texture_| is false) if a copy or conversion was
+ // necessary.
+ const gpu::Mailbox mailbox_;
+
+ // Whether |mailbox_| directly points to a texture of the VideoFrame
+ // (if true), or to an allocated shared image (if false).
+ const bool wraps_video_frame_texture_;
};
PaintCanvasVideoRenderer::PaintCanvasVideoRenderer()
@@ -864,7 +944,11 @@ void PaintCanvasVideoRenderer::Paint(
// frame has an unexpected format.
if (!video_frame.get() || video_frame->natural_size().IsEmpty() ||
!(media::IsYuvPlanar(video_frame->format()) ||
- video_frame->format() == media::PIXEL_FORMAT_Y16 ||
+ video_frame->format() == PIXEL_FORMAT_Y16 ||
+ video_frame->format() == PIXEL_FORMAT_ARGB ||
+ video_frame->format() == PIXEL_FORMAT_XRGB ||
+ video_frame->format() == PIXEL_FORMAT_ABGR ||
+ video_frame->format() == PIXEL_FORMAT_XBGR ||
video_frame->HasTextures())) {
cc::PaintFlags black_with_alpha_flags;
black_with_alpha_flags.setAlpha(flags.getAlpha());
@@ -883,21 +967,20 @@ void PaintCanvasVideoRenderer::Paint(
DCHECK(image);
base::Optional<ScopedSharedImageAccess> source_access;
- if (video_frame->HasTextures()) {
- DCHECK(!cache_->source_mailbox.IsZero());
- DCHECK(cache_->source_texture);
+ if (video_frame->HasTextures() && cache_->source_texture) {
+ DCHECK(cache_->texture_backing);
source_access.emplace(raster_context_provider->RasterInterface(),
- cache_->source_texture, cache_->source_mailbox);
+ cache_->source_texture,
+ cache_->texture_backing->GetMailbox());
}
cc::PaintFlags video_flags;
video_flags.setAlpha(flags.getAlpha());
video_flags.setBlendMode(flags.getBlendMode());
- video_flags.setFilterQuality(flags.getFilterQuality());
const bool need_rotation = video_transformation.rotation != VIDEO_ROTATION_0;
const bool need_scaling =
- dest_rect.size() != gfx::SizeF(image.width(), image.height());
+ dest_rect.size() != gfx::SizeF(video_frame->visible_rect().size());
const bool need_translation = !dest_rect.origin().IsOrigin();
// TODO(tmathmeyer): apply horizontal / vertical mirroring if needed.
bool need_transform = need_rotation || need_scaling || need_translation;
@@ -928,10 +1011,13 @@ void PaintCanvasVideoRenderer::Paint(
rotated_dest_size =
gfx::SizeF(rotated_dest_size.height(), rotated_dest_size.width());
}
- canvas->scale(SkFloatToScalar(rotated_dest_size.width() / image.width()),
- SkFloatToScalar(rotated_dest_size.height() / image.height()));
- canvas->translate(-SkFloatToScalar(image.width() * 0.5f),
- -SkFloatToScalar(image.height() * 0.5f));
+ canvas->scale(SkFloatToScalar(rotated_dest_size.width() /
+ video_frame->visible_rect().width()),
+ SkFloatToScalar(rotated_dest_size.height() /
+ video_frame->visible_rect().height()));
+ canvas->translate(
+ -SkFloatToScalar(video_frame->visible_rect().width() * 0.5f),
+ -SkFloatToScalar(video_frame->visible_rect().height() * 0.5f));
}
SkImageInfo info;
@@ -950,8 +1036,24 @@ void PaintCanvasVideoRenderer::Paint(
const size_t offset = info.computeOffset(origin.x(), origin.y(), row_bytes);
void* const pixels_offset = reinterpret_cast<char*>(pixels) + offset;
ConvertVideoFrameToRGBPixels(video_frame.get(), pixels_offset, row_bytes);
+ } else if (video_frame->HasTextures()) {
+ DCHECK_EQ(video_frame->coded_size(),
+ gfx::Size(image.width(), image.height()));
+ canvas->drawImageRect(
+ image, gfx::RectToSkRect(video_frame->visible_rect()),
+ SkRect::MakeWH(video_frame->visible_rect().width(),
+ video_frame->visible_rect().height()),
+ SkSamplingOptions(flags.getFilterQuality(),
+ SkSamplingOptions::kMedium_asMipmapLinear),
+ &video_flags, SkCanvas::kStrict_SrcRectConstraint);
} else {
- canvas->drawImage(image, 0, 0, &video_flags);
+ DCHECK_EQ(video_frame->visible_rect().size(),
+ gfx::Size(image.width(), image.height()));
+ canvas->drawImage(
+ image, 0, 0,
+ SkSamplingOptions(flags.getFilterQuality(),
+ SkSamplingOptions::kMedium_asMipmapLinear),
+ &video_flags);
}
if (need_transform)
@@ -968,9 +1070,9 @@ void PaintCanvasVideoRenderer::Paint(
raster_context_provider->ContextSupport());
}
// Because we are not retaining a reference to the VideoFrame, it would be
- // invalid for the cache to directly wrap its texture(s), as they will be
- // recycled.
- DCHECK(!cache_ || !cache_->wraps_video_frame_texture);
+ // invalid for the texture_backing to directly wrap its texture(s), as they
+ // will be recycled.
+ DCHECK(!CacheBackingWrapsTexture());
}
void PaintCanvasVideoRenderer::Copy(
@@ -1024,7 +1126,7 @@ scoped_refptr<VideoFrame> DownShiftHighbitVideoFrame(
ret->set_color_space(video_frame->ColorSpace());
// Copy all metadata.
// (May be enough to copy color space)
- ret->metadata()->MergeMetadataFrom(video_frame->metadata());
+ ret->metadata().MergeMetadataFrom(video_frame->metadata());
for (int plane = VideoFrame::kYPlane; plane <= VideoFrame::kVPlane; ++plane) {
int width = ret->row_bytes(plane);
@@ -1296,17 +1398,23 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
DCHECK(video_frame);
DCHECK(video_frame->HasTextures());
if (video_frame->NumTextures() > 1 ||
- video_frame->metadata()->read_lock_fences_enabled) {
+ video_frame->metadata().read_lock_fences_enabled) {
if (!raster_context_provider)
return false;
GrDirectContext* gr_context = raster_context_provider->GrContext();
- if (!gr_context)
+ if (!gr_context &&
+ !raster_context_provider->ContextCapabilities().supports_oop_raster)
return false;
// TODO(crbug.com/1108154): Expand this uploading path to macOS, linux
// chromeOS after collecting perf data and resolve failure cases.
#if defined(OS_WIN)
- // Try direct uploading path
- if (premultiply_alpha && level == 0) {
+ // Since skia always produces premultiply alpha outputs,
+ // trying direct uploading path when video format is opaque or premultiply
+ // alpha been requested. And dst texture mipLevel must be 0.
+ // TODO(crbug.com/1155003): Figure out whether premultiply options here are
+ // accurate.
+ if ((media::IsOpaque(video_frame->format()) || premultiply_alpha) &&
+ level == 0) {
if (UploadVideoFrameToGLTexture(raster_context_provider, destination_gl,
video_frame, target, texture,
internal_format, format, type, flip_y)) {
@@ -1321,7 +1429,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
}
DCHECK(cache_);
- DCHECK(!cache_->source_mailbox.IsZero());
+ DCHECK(cache_->texture_backing);
gpu::raster::RasterInterface* canvas_ri =
raster_context_provider->RasterInterface();
@@ -1331,10 +1439,10 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
canvas_ri->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
uint32_t intermediate_texture = SynchronizeAndImportMailbox(
- destination_gl, sync_token, cache_->source_mailbox);
+ destination_gl, sync_token, cache_->texture_backing->GetMailbox());
{
ScopedSharedImageAccess access(destination_gl, intermediate_texture,
- cache_->source_mailbox);
+ cache_->texture_backing->GetMailbox());
VideoFrameCopyTextureOrSubTexture(
destination_gl, cache_->coded_size, cache_->visible_rect,
intermediate_texture, target, texture, internal_format, format, type,
@@ -1352,7 +1460,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
// Because we are not retaining a reference to the VideoFrame, it would be
// invalid to keep the cache around if it directly wraps the VideoFrame
// texture(s), as they will be recycled.
- if (cache_->wraps_video_frame_texture)
+ if (cache_->texture_backing->wraps_video_frame_texture())
cache_.reset();
// Synchronize |video_frame| with the read operations in UpdateLastImage(),
@@ -1367,8 +1475,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
WaitAndReplaceSyncTokenClient client(destination_gl);
video_frame->UpdateReleaseSyncToken(&client);
}
- DCHECK(!cache_ || !cache_->wraps_video_frame_texture);
-
+ DCHECK(!CacheBackingWrapsTexture());
return true;
}
@@ -1386,8 +1493,7 @@ bool PaintCanvasVideoRenderer::UploadVideoFrameToGLTexture(
DCHECK(video_frame);
DCHECK(video_frame->HasTextures());
// Support uploading for NV12 and I420 video frame only.
- if (video_frame->format() != PIXEL_FORMAT_I420 &&
- video_frame->format() != PIXEL_FORMAT_NV12) {
+ if (!VideoFrameYUVConverter::IsVideoFrameFormatSupported(*video_frame)) {
return false;
}
@@ -1402,6 +1508,7 @@ bool PaintCanvasVideoRenderer::UploadVideoFrameToGLTexture(
return false;
}
+ // TODO(nazabris): Support OOP-R code path here that does not have GrContext.
if (!raster_context_provider || !raster_context_provider->GrContext())
return false;
@@ -1420,7 +1527,7 @@ bool PaintCanvasVideoRenderer::UploadVideoFrameToGLTexture(
destination_gl->GenUnverifiedSyncTokenCHROMIUM(
mailbox_holder.sync_token.GetData());
- if (!VideoFrameYUVConverter::ConvertYUVVideoFrameWithSkSurfaceNoCaching(
+ if (!VideoFrameYUVConverter::ConvertYUVVideoFrameToDstTextureNoCaching(
video_frame.get(), raster_context_provider, mailbox_holder,
internal_format, type, flip_y, true /* use visible_rect */)) {
return false;
@@ -1461,6 +1568,7 @@ bool PaintCanvasVideoRenderer::PrepareVideoFrameForWebGL(
return false;
}
+ // TODO(nazabris): Support OOP-R code path here that does not have GrContext.
if (!raster_context_provider || !raster_context_provider->GrContext())
return false;
@@ -1498,7 +1606,7 @@ bool PaintCanvasVideoRenderer::PrepareVideoFrameForWebGL(
WaitAndReplaceSyncTokenClient client(source_ri);
video_frame->UpdateReleaseSyncToken(&client);
- DCHECK(!cache_ || !cache_->wraps_video_frame_texture);
+ DCHECK(!CacheBackingWrapsTexture());
return true;
}
@@ -1519,7 +1627,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
return false;
}
- if (video_frame.format() != media::PIXEL_FORMAT_I420) {
+ if (!VideoFrameYUVConverter::IsVideoFrameFormatSupported(video_frame)) {
return false;
}
// Could handle NV12 here as well. See NewSkImageFromVideoFrameYUV.
@@ -1672,38 +1780,17 @@ void PaintCanvasVideoRenderer::ResetCache() {
PaintCanvasVideoRenderer::Cache::Cache(int frame_id) : frame_id(frame_id) {}
-PaintCanvasVideoRenderer::Cache::~Cache() {
- if (!raster_context_provider)
- return;
-
- DCHECK(!source_mailbox.IsZero());
- DCHECK(source_texture);
- auto* ri = raster_context_provider->RasterInterface();
- if (!texture_ownership_in_skia)
- ri->DeleteGpuRasterTexture(source_texture);
- if (!wraps_video_frame_texture) {
- gpu::SyncToken sync_token;
- ri->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
- auto* sii = raster_context_provider->SharedImageInterface();
- sii->DestroySharedImage(sync_token, source_mailbox);
- }
-}
+PaintCanvasVideoRenderer::Cache::~Cache() = default;
bool PaintCanvasVideoRenderer::Cache::Recycle() {
- if (!texture_ownership_in_skia)
- return true;
+ DCHECK(!texture_backing->wraps_video_frame_texture());
- if (!paint_image.HasExclusiveTextureAccess())
+ paint_image = cc::PaintImage();
+ if (!texture_backing->unique())
return false;
// Flush any pending GPU work using this texture.
- paint_image.FlushPendingSkiaOps();
-
- paint_image = cc::PaintImage();
- // We need a new texture ID because skia will destroy the previous one with
- // the SkImage.
- texture_ownership_in_skia = false;
- source_texture = 0;
+ texture_backing->FlushPendingSkiaOps();
return true;
}
@@ -1711,9 +1798,9 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
scoped_refptr<VideoFrame> video_frame,
viz::RasterContextProvider* raster_context_provider,
bool allow_wrap_texture) {
- DCHECK(!cache_ || !cache_->wraps_video_frame_texture);
+ DCHECK(!CacheBackingWrapsTexture());
if (!cache_ || video_frame->unique_id() != cache_->frame_id ||
- cache_->source_mailbox.IsZero()) {
+ !cache_->paint_image) {
auto paint_image_builder =
cc::PaintImageBuilder::WithDefault()
.set_id(renderer_stable_id_)
@@ -1727,115 +1814,99 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
// could cause problems since the pool of VideoFrames has a fixed size.
if (video_frame->HasTextures()) {
DCHECK(raster_context_provider);
- DCHECK(raster_context_provider->GrContext());
+ bool supports_oop_raster =
+ raster_context_provider->ContextCapabilities().supports_oop_raster;
+ DCHECK(supports_oop_raster || raster_context_provider->GrContext());
auto* ri = raster_context_provider->RasterInterface();
DCHECK(ri);
+ bool wraps_video_frame_texture = false;
+ gpu::Mailbox mailbox;
if (allow_wrap_texture && video_frame->NumTextures() == 1) {
cache_.emplace(video_frame->unique_id());
const gpu::MailboxHolder& holder =
GetVideoFrameMailboxHolder(video_frame.get());
- cache_->source_mailbox = holder.mailbox;
+ mailbox = holder.mailbox;
ri->WaitSyncTokenCHROMIUM(holder.sync_token.GetConstData());
- cache_->wraps_video_frame_texture = true;
+ wraps_video_frame_texture = true;
} else {
- if (cache_ &&
- cache_->raster_context_provider == raster_context_provider &&
+ if (cache_ && cache_->texture_backing &&
+ cache_->texture_backing->raster_context_provider() ==
+ raster_context_provider &&
cache_->coded_size == video_frame->coded_size() &&
cache_->Recycle()) {
// We can reuse the shared image from the previous cache.
cache_->frame_id = video_frame->unique_id();
+ mailbox = cache_->texture_backing->GetMailbox();
} else {
cache_.emplace(video_frame->unique_id());
auto* sii = raster_context_provider->SharedImageInterface();
-
// TODO(nazabris): Sort out what to do when GLES2 is needed but the
// cached shared image is created without it.
uint32_t flags =
gpu::SHARED_IMAGE_USAGE_GLES2 | gpu::SHARED_IMAGE_USAGE_RASTER;
- if (raster_context_provider->ContextCapabilities()
- .supports_oop_raster) {
+ if (supports_oop_raster) {
flags |= gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION;
}
- cache_->source_mailbox = sii->CreateSharedImage(
+ mailbox = sii->CreateSharedImage(
viz::ResourceFormat::RGBA_8888, video_frame->coded_size(),
gfx::ColorSpace(), kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
flags, gpu::kNullSurfaceHandle);
ri->WaitSyncTokenCHROMIUM(
sii->GenUnverifiedSyncToken().GetConstData());
}
-
- DCHECK(!cache_->texture_ownership_in_skia);
if (video_frame->NumTextures() == 1) {
auto frame_mailbox =
SynchronizeVideoFrameSingleMailbox(ri, video_frame.get());
- ri->CopySubTexture(
- frame_mailbox, cache_->source_mailbox, GL_TEXTURE_2D, 0, 0, 0, 0,
- video_frame->coded_size().width(),
- video_frame->coded_size().height(), GL_FALSE, GL_FALSE);
+ ri->CopySubTexture(frame_mailbox, mailbox, GL_TEXTURE_2D, 0, 0, 0, 0,
+ video_frame->coded_size().width(),
+ video_frame->coded_size().height(), GL_FALSE,
+ GL_FALSE);
} else {
- gpu::MailboxHolder dest_holder{cache_->source_mailbox,
- gpu::SyncToken(), GL_TEXTURE_2D};
+ gpu::MailboxHolder dest_holder{mailbox, gpu::SyncToken(),
+ GL_TEXTURE_2D};
VideoFrameYUVConverter::ConvertYUVVideoFrameNoCaching(
video_frame.get(), raster_context_provider, dest_holder);
}
- raster_context_provider->GrContext()->flushAndSubmit();
+ if (!supports_oop_raster)
+ raster_context_provider->GrContext()->flushAndSubmit();
}
- // TODO(jochin): Don't always generate SkImage here.
- DCHECK(cache_->source_texture == 0);
- cache_->source_texture =
- ri->CreateAndConsumeForGpuRaster(cache_->source_mailbox);
-
- // TODO(nazabris): Handle scoped access correctly. This follows the
- // current pattern but is most likely bugged. Access should last for the
- // lifetime of the SkImage.
- ScopedSharedImageAccess(ri, cache_->source_texture,
- cache_->source_mailbox);
- auto source_image =
- WrapGLTexture(cache_->wraps_video_frame_texture
- ? video_frame->mailbox_holder(0).texture_target
- : GL_TEXTURE_2D,
- cache_->source_texture, video_frame->coded_size(),
- video_frame->ColorSpace(), raster_context_provider);
- if (!source_image) {
- // Couldn't create the SkImage.
- cache_.reset();
- return false;
- }
- cache_->raster_context_provider = raster_context_provider;
cache_->coded_size = video_frame->coded_size();
cache_->visible_rect = video_frame->visible_rect();
- GrDirectContext* direct =
- GrAsDirectContext(raster_context_provider->GrContext());
- sk_sp<SkImage> source_subset = source_image->makeSubset(
- gfx::RectToSkIRect(cache_->visible_rect), direct);
- if (source_subset) {
- // We use the flushPendingGrContextIO = true so we can flush any pending
- // GPU work on the GrContext to ensure that skia exectues the work for
- // generating the subset and it can be safely destroyed.
- GrBackendTexture image_backend =
- source_image->getBackendTexture(/*flushPendingGrContextIO*/ true);
- GrBackendTexture subset_backend =
- source_subset->getBackendTexture(/*flushPendingGrContextIO*/ true);
-#if DCHECK_IS_ON()
- GrGLTextureInfo backend_info;
- if (image_backend.getGLTextureInfo(&backend_info))
- DCHECK_EQ(backend_info.fID, cache_->source_texture);
-#endif
- if (subset_backend.isValid() &&
- subset_backend.isSameTexture(image_backend)) {
- cache_->texture_ownership_in_skia = true;
- source_subset = SkImage::MakeFromAdoptedTexture(
- cache_->raster_context_provider->GrContext(), image_backend,
- kTopLeft_GrSurfaceOrigin, kRGBA_8888_SkColorType,
- kPremul_SkAlphaType, source_image->imageInfo().refColorSpace());
+ if (!cache_->texture_backing) {
+ if (supports_oop_raster) {
+ SkImageInfo sk_image_info =
+ SkImageInfo::Make(gfx::SizeToSkISize(cache_->coded_size),
+ kRGBA_8888_SkColorType, kPremul_SkAlphaType);
+ cache_->texture_backing = sk_make_sp<VideoTextureBacking>(
+ mailbox, sk_image_info, wraps_video_frame_texture,
+ raster_context_provider);
+ } else {
+ cache_->source_texture = ri->CreateAndConsumeForGpuRaster(mailbox);
+
+ // TODO(nazabris): Handle scoped access correctly. This follows the
+ // current pattern but is most likely bugged. Access should last for
+ // the lifetime of the SkImage.
+ ScopedSharedImageAccess(ri, cache_->source_texture, mailbox);
+ auto source_image =
+ WrapGLTexture(wraps_video_frame_texture
+ ? video_frame->mailbox_holder(0).texture_target
+ : GL_TEXTURE_2D,
+ cache_->source_texture, video_frame->coded_size(),
+ raster_context_provider);
+ if (!source_image) {
+ // Couldn't create the SkImage.
+ cache_.reset();
+ return false;
+ }
+ cache_->texture_backing = sk_make_sp<VideoTextureBacking>(
+ std::move(source_image), mailbox, wraps_video_frame_texture,
+ raster_context_provider);
}
}
paint_image_builder.set_texture_backing(
- sk_sp<VideoTextureBacking>(new VideoTextureBacking(
- std::move(source_subset), raster_context_provider)),
- cc::PaintImage::GetNextContentId());
+ cache_->texture_backing, cc::PaintImage::GetNextContentId());
} else {
cache_.emplace(video_frame->unique_id());
paint_image_builder.set_paint_image_generator(
@@ -1843,7 +1914,7 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
}
cache_->paint_image = paint_image_builder.TakePaintImage();
if (!cache_->paint_image) {
- // Couldn't create the SkImage.
+ // Couldn't create the PaintImage.
cache_.reset();
return false;
}
@@ -1876,7 +1947,9 @@ bool PaintCanvasVideoRenderer::PrepareVideoFrame(
}
PaintCanvasVideoRenderer::YUVTextureCache::YUVTextureCache() = default;
-PaintCanvasVideoRenderer::YUVTextureCache::~YUVTextureCache() = default;
+PaintCanvasVideoRenderer::YUVTextureCache::~YUVTextureCache() {
+ Reset();
+}
void PaintCanvasVideoRenderer::YUVTextureCache::Reset() {
if (mailbox.IsZero())
@@ -1906,4 +1979,9 @@ gfx::Size PaintCanvasVideoRenderer::LastImageDimensionsForTesting() {
return gfx::Size(cache_->paint_image.width(), cache_->paint_image.height());
}
+bool PaintCanvasVideoRenderer::CacheBackingWrapsTexture() const {
+ return cache_ && cache_->texture_backing &&
+ cache_->texture_backing->wraps_video_frame_texture();
+}
+
} // namespace media
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.h b/chromium/media/renderers/paint_canvas_video_renderer.h
index 943cc378ac5..0c6b4062d88 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.h
+++ b/chromium/media/renderers/paint_canvas_video_renderer.h
@@ -41,6 +41,7 @@ class RasterContextProvider;
}
namespace media {
+class VideoTextureBacking;
// Handles rendering of VideoFrames to PaintCanvases.
class MEDIA_EXPORT PaintCanvasVideoRenderer {
@@ -211,22 +212,15 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// to the visible size of the VideoFrame. Its contents are generated lazily.
cc::PaintImage paint_image;
- // The context provider used to generate |source_mailbox| and
- // |source_texture|. This is only set if the VideoFrame was texture-backed.
- scoped_refptr<viz::RasterContextProvider> raster_context_provider;
-
- // The mailbox for the source texture. This can be either the source
- // VideoFrame's texture (if |wraps_video_frame_texture| is true) or a newly
- // allocated shared image (if |wraps_video_frame_texture| is false) if a
- // copy or conversion was necessary.
- // This is only set if the VideoFrame was texture-backed.
- gpu::Mailbox source_mailbox;
+ // The backing for the source texture. This is also responsible for managing
+ // the lifetime of the texture.
+ sk_sp<VideoTextureBacking> texture_backing;
- // The texture ID created when importing |source_mailbox|.
+ // The GL texture ID used in non-OOP code path.
// This is only set if the VideoFrame was texture-backed.
uint32_t source_texture = 0;
- // The allocated size of |source_mailbox|.
+ // The allocated size of VideoFrame texture.
// This is only set if the VideoFrame was texture-backed.
gfx::Size coded_size;
@@ -235,13 +229,6 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// This is only set if the VideoFrame was texture-backed.
gfx::Rect visible_rect;
- // Whether |source_mailbox| directly points to a texture of the VideoFrame
- // (if true), or to an allocated shared image (if false).
- bool wraps_video_frame_texture = false;
-
- // Whether the texture pointed by |paint_image| is owned by skia or not.
- bool texture_ownership_in_skia = false;
-
// Used to allow recycling of the previous shared image. This requires that
// no external users have access to this resource via SkImage. Returns true
// if the existing resource can be recycled.
@@ -269,6 +256,8 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
unsigned int type,
bool flip_y);
+ bool CacheBackingWrapsTexture() const;
+
base::Optional<Cache> cache_;
// If |cache_| is not used for a while, it's deleted to save memory.
diff --git a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
index 108617d53b9..b8657b9269c 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
@@ -11,6 +11,7 @@
#include "base/memory/aligned_memory.h"
#include "base/sys_byteorder.h"
#include "base/test/task_environment.h"
+#include "build/build_config.h"
#include "cc/paint/paint_flags.h"
#include "cc/paint/skia_paint_canvas.h"
#include "components/viz/common/gpu/context_provider.h"
@@ -26,6 +27,7 @@
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "media/renderers/paint_canvas_video_renderer.h"
+#include "media/renderers/shared_image_video_frame_test_utils.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/scale.h"
@@ -70,252 +72,6 @@ scoped_refptr<VideoFrame> CreateTestY16Frame(const gfx::Size& coded_size,
static_cast<uint8_t*>(external_memory), byte_size, timestamp);
}
-// Destroys a list of shared images after a sync token is passed. Also runs
-// |callback|.
-static void DestroySharedImages(
- scoped_refptr<viz::ContextProvider> context_provider,
- std::vector<gpu::Mailbox> mailboxes,
- base::OnceClosure callback,
- const gpu::SyncToken& sync_token) {
- auto* sii = context_provider->SharedImageInterface();
- for (const auto& mailbox : mailboxes)
- sii->DestroySharedImage(sync_token, mailbox);
- std::move(callback).Run();
-}
-
-// Creates a video frame from a set of shared images with a common texture
-// target and sync token.
-static scoped_refptr<VideoFrame> CreateSharedImageFrame(
- scoped_refptr<viz::ContextProvider> context_provider,
- VideoPixelFormat format,
- std::vector<gpu::Mailbox> mailboxes,
- const gpu::SyncToken& sync_token,
- GLenum texture_target,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- base::TimeDelta timestamp,
- base::OnceClosure destroyed_callback) {
- gpu::MailboxHolder mailboxes_for_frame[VideoFrame::kMaxPlanes] = {};
- size_t i = 0;
- for (const auto& mailbox : mailboxes) {
- mailboxes_for_frame[i++] =
- gpu::MailboxHolder(mailbox, sync_token, texture_target);
- }
- auto callback =
- base::BindOnce(&DestroySharedImages, std::move(context_provider),
- std::move(mailboxes), std::move(destroyed_callback));
- return VideoFrame::WrapNativeTextures(format, mailboxes_for_frame,
- std::move(callback), coded_size,
- visible_rect, natural_size, timestamp);
-}
-
-// Upload pixels to a shared image using GL.
-static void UploadPixels(gpu::gles2::GLES2Interface* gl,
- const gpu::Mailbox& mailbox,
- const gfx::Size& size,
- GLenum format,
- GLenum type,
- const uint8_t* data) {
- GLuint texture = gl->CreateAndTexStorage2DSharedImageCHROMIUM(mailbox.name);
- gl->BeginSharedImageAccessDirectCHROMIUM(
- texture, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
- gl->BindTexture(GL_TEXTURE_2D, texture);
- gl->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, size.width(), size.height(), format,
- type, data);
- gl->EndSharedImageAccessDirectCHROMIUM(texture);
- gl->DeleteTextures(1, &texture);
-}
-
-// Creates a shared image backed frame in RGBA format, with colors on the shared
-// image mapped as follow.
-// Bk | R | G | Y
-// ---+---+---+---
-// Bl | M | C | W
-static scoped_refptr<VideoFrame> CreateSharedImageRGBAFrame(
- scoped_refptr<viz::ContextProvider> context_provider,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- base::OnceClosure destroyed_callback) {
- DCHECK_EQ(coded_size.width() % 4, 0);
- DCHECK_EQ(coded_size.height() % 2, 0);
- size_t pixels_size = coded_size.GetArea() * 4;
- auto pixels = std::make_unique<uint8_t[]>(pixels_size);
- size_t i = 0;
- for (size_t block_y = 0; block_y < 2u; ++block_y) {
- for (int y = 0; y < coded_size.height() / 2; ++y) {
- for (size_t block_x = 0; block_x < 4u; ++block_x) {
- for (int x = 0; x < coded_size.width() / 4; ++x) {
- pixels[i++] = 0xffu * (block_x % 2); // R
- pixels[i++] = 0xffu * (block_x / 2); // G
- pixels[i++] = 0xffu * block_y; // B
- pixels[i++] = 0xffu; // A
- }
- }
- }
- }
- DCHECK_EQ(i, pixels_size);
-
- auto* sii = context_provider->SharedImageInterface();
- gpu::Mailbox mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::RGBA_8888, coded_size, gfx::ColorSpace(),
- kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
- gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
- auto* gl = context_provider->ContextGL();
- gl->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
- UploadPixels(gl, mailbox, coded_size, GL_RGBA, GL_UNSIGNED_BYTE,
- pixels.get());
- gpu::SyncToken sync_token;
- gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
-
- return CreateSharedImageFrame(
- std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_ABGR,
- {mailbox}, sync_token, GL_TEXTURE_2D, coded_size, visible_rect,
- visible_rect.size(), base::TimeDelta::FromSeconds(1),
- std::move(destroyed_callback));
-}
-
-static constexpr const uint8_t kYuvColors[8][3] = {
- {0x00, 0x80, 0x80}, // Black
- {0x4c, 0x54, 0xff}, // Red
- {0x95, 0x2b, 0x15}, // Green
- {0xe1, 0x00, 0x94}, // Yellow
- {0x1d, 0xff, 0x6b}, // Blue
- {0x69, 0xd3, 0xec}, // Magenta
- {0xb3, 0xaa, 0x00}, // Cyan
- {0xff, 0x80, 0x80}, // White
-};
-
-// Creates a shared image backed frame in I420 format, with colors mapped
-// exactly like CreateSharedImageRGBAFrame above, noting that subsamples may get
-// interpolated leading to inconsistent colors around the "seams".
-static scoped_refptr<VideoFrame> CreateSharedImageI420Frame(
- scoped_refptr<viz::ContextProvider> context_provider,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- base::OnceClosure destroyed_callback) {
- DCHECK_EQ(coded_size.width() % 8, 0);
- DCHECK_EQ(coded_size.height() % 4, 0);
- gfx::Size uv_size(coded_size.width() / 2, coded_size.height() / 2);
- size_t y_pixels_size = coded_size.GetArea();
- size_t uv_pixels_size = uv_size.GetArea();
- auto y_pixels = std::make_unique<uint8_t[]>(y_pixels_size);
- auto u_pixels = std::make_unique<uint8_t[]>(uv_pixels_size);
- auto v_pixels = std::make_unique<uint8_t[]>(uv_pixels_size);
- size_t y_i = 0;
- size_t uv_i = 0;
- for (size_t block_y = 0; block_y < 2u; ++block_y) {
- for (int y = 0; y < coded_size.height() / 2; ++y) {
- for (size_t block_x = 0; block_x < 4u; ++block_x) {
- size_t color_index = block_x + block_y * 4;
- const uint8_t* yuv = kYuvColors[color_index];
- for (int x = 0; x < coded_size.width() / 4; ++x) {
- y_pixels[y_i++] = yuv[0];
- if ((x % 2) && (y % 2)) {
- u_pixels[uv_i] = yuv[1];
- v_pixels[uv_i++] = yuv[2];
- }
- }
- }
- }
- }
- DCHECK_EQ(y_i, y_pixels_size);
- DCHECK_EQ(uv_i, uv_pixels_size);
-
- auto* sii = context_provider->SharedImageInterface();
- gpu::Mailbox y_mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::LUMINANCE_8, coded_size, gfx::ColorSpace(),
- kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
- gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
- gpu::Mailbox u_mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::LUMINANCE_8, uv_size, gfx::ColorSpace(),
- kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
- gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
- gpu::Mailbox v_mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::LUMINANCE_8, uv_size, gfx::ColorSpace(),
- kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
- gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
- auto* gl = context_provider->ContextGL();
- gl->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
- UploadPixels(gl, y_mailbox, coded_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
- y_pixels.get());
- UploadPixels(gl, u_mailbox, uv_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
- u_pixels.get());
- UploadPixels(gl, v_mailbox, uv_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
- v_pixels.get());
- gpu::SyncToken sync_token;
- gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
-
- return CreateSharedImageFrame(
- std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_I420,
- {y_mailbox, u_mailbox, v_mailbox}, sync_token, GL_TEXTURE_2D, coded_size,
- visible_rect, visible_rect.size(), base::TimeDelta::FromSeconds(1),
- std::move(destroyed_callback));
-}
-
-// Creates a shared image backed frame in NV12 format, with colors mapped
-// exactly like CreateSharedImageRGBAFrame above.
-// This will return nullptr if the necessary extension is not available for NV12
-// support.
-static scoped_refptr<VideoFrame> CreateSharedImageNV12Frame(
- scoped_refptr<viz::ContextProvider> context_provider,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- base::OnceClosure destroyed_callback) {
- DCHECK_EQ(coded_size.width() % 8, 0);
- DCHECK_EQ(coded_size.height() % 4, 0);
- if (!context_provider->ContextCapabilities().texture_rg)
- return {};
- gfx::Size uv_size(coded_size.width() / 2, coded_size.height() / 2);
- size_t y_pixels_size = coded_size.GetArea();
- size_t uv_pixels_size = uv_size.GetArea() * 2;
- auto y_pixels = std::make_unique<uint8_t[]>(y_pixels_size);
- auto uv_pixels = std::make_unique<uint8_t[]>(uv_pixels_size);
- size_t y_i = 0;
- size_t uv_i = 0;
- for (size_t block_y = 0; block_y < 2u; ++block_y) {
- for (int y = 0; y < coded_size.height() / 2; ++y) {
- for (size_t block_x = 0; block_x < 4u; ++block_x) {
- size_t color_index = block_x + block_y * 4;
- const uint8_t* yuv = kYuvColors[color_index];
- for (int x = 0; x < coded_size.width() / 4; ++x) {
- y_pixels[y_i++] = yuv[0];
- if ((x % 2) && (y % 2)) {
- uv_pixels[uv_i++] = yuv[1];
- uv_pixels[uv_i++] = yuv[2];
- }
- }
- }
- }
- }
- DCHECK_EQ(y_i, y_pixels_size);
- DCHECK_EQ(uv_i, uv_pixels_size);
-
- auto* sii = context_provider->SharedImageInterface();
- gpu::Mailbox y_mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::LUMINANCE_8, coded_size, gfx::ColorSpace(),
- kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
- gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
- gpu::Mailbox uv_mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::RG_88, uv_size, gfx::ColorSpace(),
- kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
- gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
- auto* gl = context_provider->ContextGL();
- gl->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
- UploadPixels(gl, y_mailbox, coded_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
- y_pixels.get());
- UploadPixels(gl, uv_mailbox, uv_size, GL_RG, GL_UNSIGNED_BYTE,
- uv_pixels.get());
- gpu::SyncToken sync_token;
- gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
-
- return CreateSharedImageFrame(
- std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_NV12,
- {y_mailbox, uv_mailbox}, sync_token, GL_TEXTURE_2D, coded_size,
- visible_rect, visible_rect.size(), base::TimeDelta::FromSeconds(1),
- std::move(destroyed_callback));
-}
-
// Readback the contents of a RGBA texture into an array of RGBA values.
static std::unique_ptr<uint8_t[]> ReadbackTexture(
gpu::gles2::GLES2Interface* gl,
@@ -405,29 +161,15 @@ static SkBitmap AllocBitmap(int width, int height) {
return bitmap;
}
-PaintCanvasVideoRendererTest::PaintCanvasVideoRendererTest()
- : natural_frame_(VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight))),
- larger_frame_(
- VideoFrame::CreateBlackFrame(gfx::Size(kWidth * 2, kHeight * 2))),
- smaller_frame_(
- VideoFrame::CreateBlackFrame(gfx::Size(kWidth / 2, kHeight / 2))),
- cropped_frame_(
- VideoFrame::CreateFrame(PIXEL_FORMAT_I420,
- gfx::Size(16, 16),
- gfx::Rect(6, 6, 8, 6),
- gfx::Size(8, 6),
- base::TimeDelta::FromMilliseconds(4))),
- bitmap_(AllocBitmap(kWidth, kHeight)),
- target_canvas_(bitmap_) {
- // Give each frame a unique timestamp.
- natural_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(1));
- larger_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(2));
- smaller_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(3));
-
+static scoped_refptr<VideoFrame> CreateCroppedFrame() {
+ scoped_refptr<VideoFrame> cropped_frame;
+ cropped_frame = VideoFrame::CreateFrame(
+ PIXEL_FORMAT_I420, gfx::Size(16, 16), gfx::Rect(6, 6, 8, 6),
+ gfx::Size(8, 6), base::TimeDelta::FromMilliseconds(4));
// Make sure the cropped video frame's aspect ratio matches the output device.
// Update cropped_frame_'s crop dimensions if this is not the case.
- EXPECT_EQ(cropped_frame()->visible_rect().width() * kHeight,
- cropped_frame()->visible_rect().height() * kWidth);
+ EXPECT_EQ(cropped_frame->visible_rect().width() * kHeight,
+ cropped_frame->visible_rect().height() * kWidth);
// Fill in the cropped frame's entire data with colors:
//
@@ -495,12 +237,29 @@ PaintCanvasVideoRendererTest::PaintCanvasVideoRendererTest()
};
libyuv::I420Copy(cropped_y_plane, 16, cropped_u_plane, 8, cropped_v_plane, 8,
- cropped_frame()->data(VideoFrame::kYPlane),
- cropped_frame()->stride(VideoFrame::kYPlane),
- cropped_frame()->data(VideoFrame::kUPlane),
- cropped_frame()->stride(VideoFrame::kUPlane),
- cropped_frame()->data(VideoFrame::kVPlane),
- cropped_frame()->stride(VideoFrame::kVPlane), 16, 16);
+ cropped_frame->data(VideoFrame::kYPlane),
+ cropped_frame->stride(VideoFrame::kYPlane),
+ cropped_frame->data(VideoFrame::kUPlane),
+ cropped_frame->stride(VideoFrame::kUPlane),
+ cropped_frame->data(VideoFrame::kVPlane),
+ cropped_frame->stride(VideoFrame::kVPlane), 16, 16);
+
+ return cropped_frame;
+}
+
+PaintCanvasVideoRendererTest::PaintCanvasVideoRendererTest()
+ : natural_frame_(VideoFrame::CreateBlackFrame(gfx::Size(kWidth, kHeight))),
+ larger_frame_(
+ VideoFrame::CreateBlackFrame(gfx::Size(kWidth * 2, kHeight * 2))),
+ smaller_frame_(
+ VideoFrame::CreateBlackFrame(gfx::Size(kWidth / 2, kHeight / 2))),
+ cropped_frame_(CreateCroppedFrame()),
+ bitmap_(AllocBitmap(kWidth, kHeight)),
+ target_canvas_(bitmap_) {
+ // Give each frame a unique timestamp.
+ natural_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(1));
+ larger_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(2));
+ smaller_frame_->set_timestamp(base::TimeDelta::FromMilliseconds(3));
}
PaintCanvasVideoRendererTest::~PaintCanvasVideoRendererTest() = default;
@@ -1127,8 +886,9 @@ TEST_F(PaintCanvasVideoRendererTest, TexSubImage2D_Y16_R32F) {
2 /*xoffset*/, 1 /*yoffset*/, false /*flip_y*/, true);
}
-// Fixture for tests that require a GL context.
-class PaintCanvasVideoRendererWithGLTest : public PaintCanvasVideoRendererTest {
+// Fixture for tests that require a GL context. The input parameter indicates
+// whether OOPR mode is enabled.
+class PaintCanvasVideoRendererWithGLTest : public testing::TestWithParam<bool> {
public:
using GetColorCallback = base::RepeatingCallback<SkColor(int, int)>;
@@ -1137,21 +897,29 @@ class PaintCanvasVideoRendererWithGLTest : public PaintCanvasVideoRendererTest {
enable_pixels_.emplace();
media_context_ = base::MakeRefCounted<viz::TestInProcessContextProvider>(
/*enable_gpu_rasterization=*/false,
- /*enable_oop_rasterization=*/false, /*support_locking=*/false);
+ /*enable_oop_rasterization=*/GetParam(), /*support_locking=*/false);
gpu::ContextResult result = media_context_->BindToCurrentThread();
ASSERT_EQ(result, gpu::ContextResult::kSuccess);
+ gles2_context_ = base::MakeRefCounted<viz::TestInProcessContextProvider>(
+ /*enable_gpu_rasterization=*/false,
+ /*enable_oop_rasterization=*/false, /*support_locking=*/false);
+ result = gles2_context_->BindToCurrentThread();
+ ASSERT_EQ(result, gpu::ContextResult::kSuccess);
+
destination_context_ =
base::MakeRefCounted<viz::TestInProcessContextProvider>(
/*enable_gpu_rasterization=*/false,
/*enable_oop_rasterization=*/false, /*support_locking=*/false);
result = destination_context_->BindToCurrentThread();
ASSERT_EQ(result, gpu::ContextResult::kSuccess);
+ cropped_frame_ = CreateCroppedFrame();
}
void TearDown() override {
renderer_.ResetCache();
destination_context_.reset();
+ gles2_context_.reset();
media_context_.reset();
enable_pixels_.reset();
viz::TestGpuServiceHolder::ResetInstance();
@@ -1211,7 +979,7 @@ class PaintCanvasVideoRendererWithGLTest : public PaintCanvasVideoRendererTest {
// Creates a cropped RGBA VideoFrame. |closure| is run once the shared images
// backing the VideoFrame have been destroyed.
scoped_refptr<VideoFrame> CreateTestRGBAFrame(base::OnceClosure closure) {
- return CreateSharedImageRGBAFrame(media_context_, gfx::Size(16, 8),
+ return CreateSharedImageRGBAFrame(gles2_context_, gfx::Size(16, 8),
gfx::Rect(3, 3, 12, 4),
std::move(closure));
}
@@ -1243,7 +1011,7 @@ class PaintCanvasVideoRendererWithGLTest : public PaintCanvasVideoRendererTest {
// Creates a cropped I420 VideoFrame. |closure| is run once the shared images
// backing the VideoFrame have been destroyed.
scoped_refptr<VideoFrame> CreateTestI420Frame(base::OnceClosure closure) {
- return CreateSharedImageI420Frame(media_context_, gfx::Size(16, 8),
+ return CreateSharedImageI420Frame(gles2_context_, gfx::Size(16, 8),
gfx::Rect(2, 2, 12, 4),
std::move(closure));
}
@@ -1251,7 +1019,7 @@ class PaintCanvasVideoRendererWithGLTest : public PaintCanvasVideoRendererTest {
// backing the VideoFrame have been destroyed.
scoped_refptr<VideoFrame> CreateTestI420FrameNotSubset(
base::OnceClosure closure) {
- return CreateSharedImageI420Frame(media_context_, gfx::Size(16, 8),
+ return CreateSharedImageI420Frame(gles2_context_, gfx::Size(16, 8),
gfx::Rect(0, 0, 16, 8),
std::move(closure));
}
@@ -1294,7 +1062,7 @@ class PaintCanvasVideoRendererWithGLTest : public PaintCanvasVideoRendererTest {
// not available. |closure| is run once the shared images backing the
// VideoFrame have been destroyed.
scoped_refptr<VideoFrame> CreateTestNV12Frame(base::OnceClosure closure) {
- return CreateSharedImageNV12Frame(media_context_, gfx::Size(16, 8),
+ return CreateSharedImageNV12Frame(gles2_context_, gfx::Size(16, 8),
gfx::Rect(2, 2, 12, 4),
std::move(closure));
}
@@ -1307,13 +1075,24 @@ class PaintCanvasVideoRendererWithGLTest : public PaintCanvasVideoRendererTest {
CheckI420FramePixels(std::move(get_color));
}
+ scoped_refptr<VideoFrame> cropped_frame() { return cropped_frame_; }
+
protected:
base::Optional<gl::DisableNullDrawGLBindings> enable_pixels_;
scoped_refptr<viz::TestInProcessContextProvider> media_context_;
+ scoped_refptr<viz::TestInProcessContextProvider> gles2_context_;
scoped_refptr<viz::TestInProcessContextProvider> destination_context_;
+
+ PaintCanvasVideoRenderer renderer_;
+ scoped_refptr<VideoFrame> cropped_frame_;
+ base::test::TaskEnvironment task_environment_;
};
-TEST_F(PaintCanvasVideoRendererWithGLTest, CopyVideoFrameYUVDataToGLTexture) {
+INSTANTIATE_TEST_SUITE_P(OopRasterMode,
+ PaintCanvasVideoRendererWithGLTest,
+ testing::Bool());
+
+TEST_P(PaintCanvasVideoRendererWithGLTest, CopyVideoFrameYUVDataToGLTexture) {
auto* destination_gl = destination_context_->ContextGL();
DCHECK(destination_gl);
GLenum target = GL_TEXTURE_2D;
@@ -1344,7 +1123,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest, CopyVideoFrameYUVDataToGLTexture) {
destination_gl->DeleteTextures(1, &texture);
}
-TEST_F(PaintCanvasVideoRendererWithGLTest,
+TEST_P(PaintCanvasVideoRendererWithGLTest,
CopyVideoFrameYUVDataToGLTexture_FlipY) {
auto* destination_gl = destination_context_->ContextGL();
DCHECK(destination_gl);
@@ -1378,7 +1157,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest,
// Checks that we correctly copy a RGBA shared image VideoFrame when using
// CopyVideoFrameYUVDataToGLTexture, including correct cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest,
+TEST_P(PaintCanvasVideoRendererWithGLTest,
CopyVideoFrameTexturesToGLTextureRGBA) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestRGBAFrame(run_loop.QuitClosure());
@@ -1392,11 +1171,11 @@ TEST_F(PaintCanvasVideoRendererWithGLTest,
// Checks that we correctly copy a RGBA shared image VideoFrame that needs read
// lock fences, when using CopyVideoFrameYUVDataToGLTexture, including correct
// cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest,
+TEST_P(PaintCanvasVideoRendererWithGLTest,
CopyVideoFrameTexturesToGLTextureRGBA_ReadLockFence) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestRGBAFrame(run_loop.QuitClosure());
- frame->metadata()->read_lock_fences_enabled = true;
+ frame->metadata().read_lock_fences_enabled = true;
CopyVideoFrameTexturesAndCheckPixels(frame, &CheckRGBAFramePixels);
@@ -1406,7 +1185,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest,
// Checks that we correctly paint a RGBA shared image VideoFrame, including
// correct cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest, PaintRGBA) {
+TEST_P(PaintCanvasVideoRendererWithGLTest, PaintRGBA) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestRGBAFrame(run_loop.QuitClosure());
@@ -1418,7 +1197,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest, PaintRGBA) {
// Checks that we correctly copy an I420 shared image VideoFrame when using
// CopyVideoFrameYUVDataToGLTexture, including correct cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest,
+TEST_P(PaintCanvasVideoRendererWithGLTest,
CopyVideoFrameTexturesToGLTextureI420) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestI420Frame(run_loop.QuitClosure());
@@ -1431,7 +1210,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest,
// Checks that we correctly paint a I420 shared image VideoFrame, including
// correct cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest, PaintI420) {
+TEST_P(PaintCanvasVideoRendererWithGLTest, PaintI420) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestI420Frame(run_loop.QuitClosure());
@@ -1443,7 +1222,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest, PaintI420) {
// Checks that we correctly paint a I420 shared image VideoFrame, including
// correct cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest, PaintI420NotSubset) {
+TEST_P(PaintCanvasVideoRendererWithGLTest, PaintI420NotSubset) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame =
CreateTestI420FrameNotSubset(run_loop.QuitClosure());
@@ -1456,7 +1235,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest, PaintI420NotSubset) {
// Checks that we correctly copy a NV12 shared image VideoFrame when using
// CopyVideoFrameYUVDataToGLTexture, including correct cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest,
+TEST_P(PaintCanvasVideoRendererWithGLTest,
CopyVideoFrameTexturesToGLTextureNV12) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestNV12Frame(run_loop.QuitClosure());
@@ -1473,7 +1252,7 @@ TEST_F(PaintCanvasVideoRendererWithGLTest,
// Checks that we correctly paint a NV12 shared image VideoFrame, including
// correct cropping.
-TEST_F(PaintCanvasVideoRendererWithGLTest, PaintNV12) {
+TEST_P(PaintCanvasVideoRendererWithGLTest, PaintNV12) {
base::RunLoop run_loop;
scoped_refptr<VideoFrame> frame = CreateTestNV12Frame(run_loop.QuitClosure());
if (!frame) {
diff --git a/chromium/media/renderers/renderer_impl.cc b/chromium/media/renderers/renderer_impl.cc
index d69595d7718..0ab6a9ca798 100644
--- a/chromium/media/renderers/renderer_impl.cc
+++ b/chromium/media/renderers/renderer_impl.cc
@@ -197,6 +197,14 @@ void RendererImpl::SetPreservesPitch(bool preserves_pitch) {
audio_renderer_->SetPreservesPitch(preserves_pitch);
}
+void RendererImpl::SetAutoplayInitiated(bool autoplay_initiated) {
+ DVLOG(1) << __func__;
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (audio_renderer_)
+ audio_renderer_->SetAutoplayInitiated(autoplay_initiated);
+}
+
void RendererImpl::Flush(base::OnceClosure flush_cb) {
DVLOG(1) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -518,7 +526,7 @@ void RendererImpl::OnAudioRendererFlushDone() {
// If we had a deferred video renderer underflow prior to the flush, it should
// have been cleared by the audio renderer changing to BUFFERING_HAVE_NOTHING.
- DCHECK(deferred_video_underflow_cb_.IsCancelled());
+ DCHECK(!has_deferred_buffering_state_change_);
DCHECK_EQ(audio_buffering_state_, BUFFERING_HAVE_NOTHING);
audio_ended_ = false;
audio_playing_ = false;
@@ -719,11 +727,12 @@ void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type,
if (video_buffering_state_ == BUFFERING_HAVE_ENOUGH &&
audio_buffering_state_ == BUFFERING_HAVE_ENOUGH &&
new_buffering_state == BUFFERING_HAVE_NOTHING &&
- deferred_video_underflow_cb_.IsCancelled()) {
+ !has_deferred_buffering_state_change_) {
DVLOG(4) << __func__ << " Deferring HAVE_NOTHING for video stream.";
deferred_video_underflow_cb_.Reset(
- base::BindRepeating(&RendererImpl::OnBufferingStateChange, weak_this_,
- type, new_buffering_state, reason));
+ base::BindOnce(&RendererImpl::OnBufferingStateChange, weak_this_,
+ type, new_buffering_state, reason));
+ has_deferred_buffering_state_change_ = true;
task_runner_->PostDelayedTask(FROM_HERE,
deferred_video_underflow_cb_.callback(),
video_underflow_threshold_.value());
@@ -732,12 +741,14 @@ void RendererImpl::OnBufferingStateChange(DemuxerStream::Type type,
DVLOG(4) << "deferred_video_underflow_cb_.Cancel()";
deferred_video_underflow_cb_.Cancel();
- } else if (!deferred_video_underflow_cb_.IsCancelled() &&
+ has_deferred_buffering_state_change_ = false;
+ } else if (has_deferred_buffering_state_change_ &&
type == DemuxerStream::AUDIO &&
new_buffering_state == BUFFERING_HAVE_NOTHING) {
// If audio underflows while we have a deferred video underflow in progress
// we want to mark video as underflowed immediately and cancel the deferral.
deferred_video_underflow_cb_.Cancel();
+ has_deferred_buffering_state_change_ = false;
video_buffering_state_ = BUFFERING_HAVE_NOTHING;
}
@@ -943,7 +954,7 @@ void RendererImpl::OnVideoFrameRateChange(base::Optional<int> fps) {
client_->OnVideoFrameRateChange(fps);
}
-void RendererImpl::CleanUpTrackChange(base::RepeatingClosure on_finished,
+void RendererImpl::CleanUpTrackChange(base::OnceClosure on_finished,
bool* ended,
bool* playing) {
*playing = false;
@@ -970,15 +981,15 @@ void RendererImpl::OnSelectedVideoTracksChanged(
// 'fixing' the stream -> restarting if its the same stream,
// reinitializing if it is different.
- base::RepeatingClosure fix_stream_cb;
+ base::OnceClosure fix_stream_cb;
if (stream && stream != current_video_stream_) {
- fix_stream_cb = base::BindRepeating(
- &RendererImpl::ReinitializeVideoRenderer, weak_this_, stream,
- GetMediaTime(), base::Passed(&change_completed_cb));
+ fix_stream_cb =
+ base::BindOnce(&RendererImpl::ReinitializeVideoRenderer, weak_this_,
+ stream, GetMediaTime(), std::move(change_completed_cb));
} else {
- fix_stream_cb = base::BindRepeating(
+ fix_stream_cb = base::BindOnce(
&RendererImpl::RestartVideoRenderer, weak_this_, current_video_stream_,
- GetMediaTime(), base::Passed(&change_completed_cb));
+ GetMediaTime(), std::move(change_completed_cb));
}
pending_video_track_change_ = true;
@@ -1003,16 +1014,16 @@ void RendererImpl::OnEnabledAudioTracksChanged(
// 'fixing' the stream -> restarting if its the same stream,
// reinitializing if it is different.
- base::RepeatingClosure fix_stream_cb;
+ base::OnceClosure fix_stream_cb;
if (stream && stream != current_audio_stream_) {
- fix_stream_cb = base::BindRepeating(
- &RendererImpl::ReinitializeAudioRenderer, weak_this_, stream,
- GetMediaTime(), base::Passed(&change_completed_cb));
+ fix_stream_cb =
+ base::BindOnce(&RendererImpl::ReinitializeAudioRenderer, weak_this_,
+ stream, GetMediaTime(), std::move(change_completed_cb));
} else {
- fix_stream_cb = base::BindRepeating(
+ fix_stream_cb = base::BindOnce(
&RendererImpl::RestartAudioRenderer, weak_this_, current_audio_stream_,
- GetMediaTime(), base::Passed(&change_completed_cb));
+ GetMediaTime(), std::move(change_completed_cb));
}
{
diff --git a/chromium/media/renderers/renderer_impl.h b/chromium/media/renderers/renderer_impl.h
index c33d290bfc9..5c0d688aa6e 100644
--- a/chromium/media/renderers/renderer_impl.h
+++ b/chromium/media/renderers/renderer_impl.h
@@ -60,6 +60,7 @@ class MEDIA_EXPORT RendererImpl final : public Renderer {
void SetCdm(CdmContext* cdm_context, CdmAttachedCB cdm_attached_cb) final;
void SetLatencyHint(base::Optional<base::TimeDelta> latency_hint) final;
void SetPreservesPitch(bool preserves_pitch) final;
+ void SetAutoplayInitiated(bool autoplay_initiated) final;
void Flush(base::OnceClosure flush_cb) final;
void StartPlayingFrom(base::TimeDelta time) final;
void SetPlaybackRate(double playback_rate) final;
@@ -151,7 +152,7 @@ class MEDIA_EXPORT RendererImpl final : public Renderer {
base::OnceClosure restart_completed_cb);
// Fix state booleans after the stream switching is finished.
- void CleanUpTrackChange(base::RepeatingClosure on_finished,
+ void CleanUpTrackChange(base::OnceClosure on_finished,
bool* ended,
bool* playing);
@@ -246,10 +247,11 @@ class MEDIA_EXPORT RendererImpl final : public Renderer {
bool clockless_video_playback_enabled_for_testing_;
// Used to defer underflow for video when audio is present.
- base::CancelableClosure deferred_video_underflow_cb_;
+ base::CancelableOnceClosure deferred_video_underflow_cb_;
- // Used to defer underflow for audio when restarting audio playback.
- base::CancelableClosure deferred_audio_restart_underflow_cb_;
+ // We cannot use `!deferred_video_underflow_cb_.IsCancelled()` as that changes
+ // when the callback is run, even if not explicitly cancelled.
+ bool has_deferred_buffering_state_change_ = false;
// The amount of time to wait before declaring underflow if the video renderer
// runs out of data but the audio renderer still has enough.
diff --git a/chromium/media/renderers/shared_image_video_frame_test_utils.cc b/chromium/media/renderers/shared_image_video_frame_test_utils.cc
new file mode 100644
index 00000000000..9b36170d6c1
--- /dev/null
+++ b/chromium/media/renderers/shared_image_video_frame_test_utils.cc
@@ -0,0 +1,254 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/renderers/shared_image_video_frame_test_utils.h"
+
+#include "base/logging.h"
+#include "components/viz/common/gpu/context_provider.h"
+#include "gpu/GLES2/gl2extchromium.h"
+#include "gpu/command_buffer/client/gles2_interface_stub.h"
+#include "gpu/command_buffer/client/shared_image_interface.h"
+#include "gpu/command_buffer/common/capabilities.h"
+#include "gpu/command_buffer/common/shared_image_usage.h"
+
+namespace media {
+
+namespace {
+
+static constexpr const uint8_t kYuvColors[8][3] = {
+ {0x00, 0x80, 0x80}, // Black
+ {0x4c, 0x54, 0xff}, // Red
+ {0x95, 0x2b, 0x15}, // Green
+ {0xe1, 0x00, 0x94}, // Yellow
+ {0x1d, 0xff, 0x6b}, // Blue
+ {0x69, 0xd3, 0xec}, // Magenta
+ {0xb3, 0xaa, 0x00}, // Cyan
+ {0xff, 0x80, 0x80}, // White
+};
+
+// Destroys a list of shared images after a sync token is passed. Also runs
+// |callback|.
+void DestroySharedImages(scoped_refptr<viz::ContextProvider> context_provider,
+ std::vector<gpu::Mailbox> mailboxes,
+ base::OnceClosure callback,
+ const gpu::SyncToken& sync_token) {
+ auto* sii = context_provider->SharedImageInterface();
+ for (const auto& mailbox : mailboxes)
+ sii->DestroySharedImage(sync_token, mailbox);
+ std::move(callback).Run();
+}
+
+// Upload pixels to a shared image using GL.
+void UploadPixels(gpu::gles2::GLES2Interface* gl,
+ const gpu::Mailbox& mailbox,
+ const gfx::Size& size,
+ GLenum format,
+ GLenum type,
+ const uint8_t* data) {
+ GLuint texture = gl->CreateAndTexStorage2DSharedImageCHROMIUM(mailbox.name);
+ gl->BeginSharedImageAccessDirectCHROMIUM(
+ texture, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
+ gl->BindTexture(GL_TEXTURE_2D, texture);
+ gl->TexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, size.width(), size.height(), format,
+ type, data);
+ gl->EndSharedImageAccessDirectCHROMIUM(texture);
+ gl->DeleteTextures(1, &texture);
+}
+
+} // namespace
+
+scoped_refptr<VideoFrame> CreateSharedImageFrame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ VideoPixelFormat format,
+ std::vector<gpu::Mailbox> mailboxes,
+ const gpu::SyncToken& sync_token,
+ GLenum texture_target,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
+ base::OnceClosure destroyed_callback) {
+ gpu::MailboxHolder mailboxes_for_frame[VideoFrame::kMaxPlanes] = {};
+ size_t i = 0;
+ for (const auto& mailbox : mailboxes) {
+ mailboxes_for_frame[i++] =
+ gpu::MailboxHolder(mailbox, sync_token, texture_target);
+ }
+ auto callback =
+ base::BindOnce(&DestroySharedImages, std::move(context_provider),
+ std::move(mailboxes), std::move(destroyed_callback));
+ return VideoFrame::WrapNativeTextures(format, mailboxes_for_frame,
+ std::move(callback), coded_size,
+ visible_rect, natural_size, timestamp);
+}
+
+scoped_refptr<VideoFrame> CreateSharedImageRGBAFrame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ base::OnceClosure destroyed_callback) {
+ DCHECK_EQ(coded_size.width() % 4, 0);
+ DCHECK_EQ(coded_size.height() % 2, 0);
+ size_t pixels_size = coded_size.GetArea() * 4;
+ auto pixels = std::make_unique<uint8_t[]>(pixels_size);
+ size_t i = 0;
+ for (size_t block_y = 0; block_y < 2u; ++block_y) {
+ for (int y = 0; y < coded_size.height() / 2; ++y) {
+ for (size_t block_x = 0; block_x < 4u; ++block_x) {
+ for (int x = 0; x < coded_size.width() / 4; ++x) {
+ pixels[i++] = 0xffu * (block_x % 2); // R
+ pixels[i++] = 0xffu * (block_x / 2); // G
+ pixels[i++] = 0xffu * block_y; // B
+ pixels[i++] = 0xffu; // A
+ }
+ }
+ }
+ }
+ DCHECK_EQ(i, pixels_size);
+
+ auto* sii = context_provider->SharedImageInterface();
+ gpu::Mailbox mailbox = sii->CreateSharedImage(
+ viz::ResourceFormat::RGBA_8888, coded_size, gfx::ColorSpace(),
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
+ gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
+ auto* gl = context_provider->ContextGL();
+ gl->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
+ UploadPixels(gl, mailbox, coded_size, GL_RGBA, GL_UNSIGNED_BYTE,
+ pixels.get());
+ gpu::SyncToken sync_token;
+ gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
+
+ return CreateSharedImageFrame(
+ std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_ABGR,
+ {mailbox}, sync_token, GL_TEXTURE_2D, coded_size, visible_rect,
+ visible_rect.size(), base::TimeDelta::FromSeconds(1),
+ std::move(destroyed_callback));
+}
+
+scoped_refptr<VideoFrame> CreateSharedImageI420Frame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ base::OnceClosure destroyed_callback) {
+ DCHECK_EQ(coded_size.width() % 8, 0);
+ DCHECK_EQ(coded_size.height() % 4, 0);
+ gfx::Size uv_size(coded_size.width() / 2, coded_size.height() / 2);
+ size_t y_pixels_size = coded_size.GetArea();
+ size_t uv_pixels_size = uv_size.GetArea();
+ auto y_pixels = std::make_unique<uint8_t[]>(y_pixels_size);
+ auto u_pixels = std::make_unique<uint8_t[]>(uv_pixels_size);
+ auto v_pixels = std::make_unique<uint8_t[]>(uv_pixels_size);
+ size_t y_i = 0;
+ size_t uv_i = 0;
+ for (size_t block_y = 0; block_y < 2u; ++block_y) {
+ for (int y = 0; y < coded_size.height() / 2; ++y) {
+ for (size_t block_x = 0; block_x < 4u; ++block_x) {
+ size_t color_index = block_x + block_y * 4;
+ const uint8_t* yuv = kYuvColors[color_index];
+ for (int x = 0; x < coded_size.width() / 4; ++x) {
+ y_pixels[y_i++] = yuv[0];
+ if ((x % 2) && (y % 2)) {
+ u_pixels[uv_i] = yuv[1];
+ v_pixels[uv_i++] = yuv[2];
+ }
+ }
+ }
+ }
+ }
+ DCHECK_EQ(y_i, y_pixels_size);
+ DCHECK_EQ(uv_i, uv_pixels_size);
+
+ auto* sii = context_provider->SharedImageInterface();
+ gpu::Mailbox y_mailbox = sii->CreateSharedImage(
+ viz::ResourceFormat::LUMINANCE_8, coded_size, gfx::ColorSpace(),
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
+ gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
+ gpu::Mailbox u_mailbox = sii->CreateSharedImage(
+ viz::ResourceFormat::LUMINANCE_8, uv_size, gfx::ColorSpace(),
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
+ gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
+ gpu::Mailbox v_mailbox = sii->CreateSharedImage(
+ viz::ResourceFormat::LUMINANCE_8, uv_size, gfx::ColorSpace(),
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
+ gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
+ auto* gl = context_provider->ContextGL();
+ gl->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
+ UploadPixels(gl, y_mailbox, coded_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
+ y_pixels.get());
+ UploadPixels(gl, u_mailbox, uv_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
+ u_pixels.get());
+ UploadPixels(gl, v_mailbox, uv_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
+ v_pixels.get());
+ gpu::SyncToken sync_token;
+ gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
+
+ return CreateSharedImageFrame(
+ std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_I420,
+ {y_mailbox, u_mailbox, v_mailbox}, sync_token, GL_TEXTURE_2D, coded_size,
+ visible_rect, visible_rect.size(), base::TimeDelta::FromSeconds(1),
+ std::move(destroyed_callback));
+}
+
+scoped_refptr<VideoFrame> CreateSharedImageNV12Frame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ base::OnceClosure destroyed_callback) {
+ DCHECK_EQ(coded_size.width() % 8, 0);
+ DCHECK_EQ(coded_size.height() % 4, 0);
+ if (!context_provider->ContextCapabilities().texture_rg) {
+ LOG(ERROR) << "GL_EXT_texture_rg not supported";
+ return {};
+ }
+ gfx::Size uv_size(coded_size.width() / 2, coded_size.height() / 2);
+ size_t y_pixels_size = coded_size.GetArea();
+ size_t uv_pixels_size = uv_size.GetArea() * 2;
+ auto y_pixels = std::make_unique<uint8_t[]>(y_pixels_size);
+ auto uv_pixels = std::make_unique<uint8_t[]>(uv_pixels_size);
+ size_t y_i = 0;
+ size_t uv_i = 0;
+ for (size_t block_y = 0; block_y < 2u; ++block_y) {
+ for (int y = 0; y < coded_size.height() / 2; ++y) {
+ for (size_t block_x = 0; block_x < 4u; ++block_x) {
+ size_t color_index = block_x + block_y * 4;
+ const uint8_t* yuv = kYuvColors[color_index];
+ for (int x = 0; x < coded_size.width() / 4; ++x) {
+ y_pixels[y_i++] = yuv[0];
+ if ((x % 2) && (y % 2)) {
+ uv_pixels[uv_i++] = yuv[1];
+ uv_pixels[uv_i++] = yuv[2];
+ }
+ }
+ }
+ }
+ }
+ DCHECK_EQ(y_i, y_pixels_size);
+ DCHECK_EQ(uv_i, uv_pixels_size);
+
+ auto* sii = context_provider->SharedImageInterface();
+ gpu::Mailbox y_mailbox = sii->CreateSharedImage(
+ viz::ResourceFormat::LUMINANCE_8, coded_size, gfx::ColorSpace(),
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
+ gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
+ gpu::Mailbox uv_mailbox = sii->CreateSharedImage(
+ viz::ResourceFormat::RG_88, uv_size, gfx::ColorSpace(),
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType,
+ gpu::SHARED_IMAGE_USAGE_GLES2, gpu::kNullSurfaceHandle);
+ auto* gl = context_provider->ContextGL();
+ gl->WaitSyncTokenCHROMIUM(sii->GenUnverifiedSyncToken().GetConstData());
+ UploadPixels(gl, y_mailbox, coded_size, GL_LUMINANCE, GL_UNSIGNED_BYTE,
+ y_pixels.get());
+ UploadPixels(gl, uv_mailbox, uv_size, GL_RG, GL_UNSIGNED_BYTE,
+ uv_pixels.get());
+ gpu::SyncToken sync_token;
+ gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
+
+ return CreateSharedImageFrame(
+ std::move(context_provider), VideoPixelFormat::PIXEL_FORMAT_NV12,
+ {y_mailbox, uv_mailbox}, sync_token, GL_TEXTURE_2D, coded_size,
+ visible_rect, visible_rect.size(), base::TimeDelta::FromSeconds(1),
+ std::move(destroyed_callback));
+}
+
+} // namespace media
diff --git a/chromium/media/renderers/shared_image_video_frame_test_utils.h b/chromium/media/renderers/shared_image_video_frame_test_utils.h
new file mode 100644
index 00000000000..b8fc8816850
--- /dev/null
+++ b/chromium/media/renderers/shared_image_video_frame_test_utils.h
@@ -0,0 +1,63 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_RENDERERS_SHARED_IMAGE_VIDEO_FRAME_TEST_UTILS_H_
+#define MEDIA_RENDERERS_SHARED_IMAGE_VIDEO_FRAME_TEST_UTILS_H_
+
+#include <GLES3/gl3.h>
+#include <stdint.h>
+
+#include "base/bind.h"
+#include "components/viz/common/gpu/context_provider.h"
+#include "media/base/video_frame.h"
+
+namespace media {
+
+// Creates a video frame from a set of shared images with a common texture
+// target and sync token.
+scoped_refptr<VideoFrame> CreateSharedImageFrame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ VideoPixelFormat format,
+ std::vector<gpu::Mailbox> mailboxes,
+ const gpu::SyncToken& sync_token,
+ GLenum texture_target,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
+ base::OnceClosure destroyed_callback);
+
+// Creates a shared image backed frame in RGBA format, with colors on the shared
+// image mapped as follow.
+// Bk | R | G | Y
+// ---+---+---+---
+// Bl | M | C | W
+scoped_refptr<VideoFrame> CreateSharedImageRGBAFrame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ base::OnceClosure destroyed_callback);
+
+// Creates a shared image backed frame in I420 format, with colors mapped
+// exactly like CreateSharedImageRGBAFrame above, noting that subsamples may get
+// interpolated leading to inconsistent colors around the "seams".
+scoped_refptr<VideoFrame> CreateSharedImageI420Frame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ base::OnceClosure destroyed_callback);
+
+// Creates a shared image backed frame in NV12 format, with colors mapped
+// exactly like CreateSharedImageRGBAFrame above.
+// This will return nullptr if the necessary extension is not available for NV12
+// support.
+scoped_refptr<VideoFrame> CreateSharedImageNV12Frame(
+ scoped_refptr<viz::ContextProvider> context_provider,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ base::OnceClosure destroyed_callback);
+
+} // namespace media
+
+#endif // MEDIA_RENDERERS_SHARED_IMAGE_VIDEO_FRAME_TEST_UTILS_H_
diff --git a/chromium/media/renderers/video_frame_yuv_converter.cc b/chromium/media/renderers/video_frame_yuv_converter.cc
index f9197b6ed73..1aef7ffc4a7 100644
--- a/chromium/media/renderers/video_frame_yuv_converter.cc
+++ b/chromium/media/renderers/video_frame_yuv_converter.cc
@@ -7,7 +7,7 @@
#include <GLES3/gl3.h>
#include "components/viz/common/gpu/raster_context_provider.h"
-#include "gpu/GLES2/gl2extchromium.h"
+#include "components/viz/common/resources/resource_format_utils.h"
#include "gpu/command_buffer/client/raster_interface.h"
#include "gpu/command_buffer/client/shared_image_interface.h"
#include "gpu/command_buffer/common/shared_image_usage.h"
@@ -17,6 +17,7 @@
#include "third_party/skia/include/core/SkRefCnt.h"
#include "third_party/skia/include/core/SkSurface.h"
#include "third_party/skia/include/core/SkYUVAInfo.h"
+#include "third_party/skia/include/core/SkYUVAPixmaps.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
#include "third_party/skia/include/gpu/GrYUVABackendTextures.h"
#include "third_party/skia/include/gpu/gl/GrGLTypes.h"
@@ -27,48 +28,46 @@ namespace {
SkYUVColorSpace ColorSpaceToSkYUVColorSpace(
const gfx::ColorSpace& color_space) {
- // TODO(hubbe): This should really default to rec709.
- // https://crbug.com/828599
+ // TODO(crbug.com/828599): This should really default to rec709.
SkYUVColorSpace sk_color_space = kRec601_SkYUVColorSpace;
color_space.ToSkYUVColorSpace(&sk_color_space);
return sk_color_space;
}
-sk_sp<SkImage> YUVGrBackendTexturesToSkImage(
- GrDirectContext* gr_context,
- gfx::ColorSpace video_color_space,
- VideoPixelFormat video_format,
- GrBackendTexture* yuv_textures,
- const GrBackendTexture& result_texture) {
- SkYUVAInfo::PlanarConfig planar_config;
- switch (video_format) {
- case PIXEL_FORMAT_NV12:
- planar_config = SkYUVAInfo::PlanarConfig::kY_UV_420;
- break;
- case PIXEL_FORMAT_I420:
- planar_config = SkYUVAInfo::PlanarConfig::kY_U_V_420;
- break;
- default:
- NOTREACHED();
- return nullptr;
+viz::ResourceFormat PlaneResourceFormat(int num_channels) {
+ switch (num_channels) {
+ case 1:
+ return viz::LUMINANCE_8;
+ case 2:
+ return viz::RG_88;
+ case 3:
+ return viz::RGBX_8888;
+ case 4:
+ return viz::RGBA_8888;
}
- SkYUVColorSpace color_space = ColorSpaceToSkYUVColorSpace(video_color_space);
- SkYUVAInfo yuva_info(result_texture.dimensions(), planar_config, color_space);
- GrYUVABackendTextures yuva_backend_textures(yuva_info, yuv_textures,
- kTopLeft_GrSurfaceOrigin);
- return SkImage::MakeFromYUVATexturesCopyToExternal(
- gr_context, yuva_backend_textures, result_texture,
- kRGBA_8888_SkColorType);
+ NOTREACHED();
+ return viz::RGBA_8888;
}
-gfx::Size GetVideoYSize(const VideoFrame* video_frame) {
- DCHECK(video_frame);
- return video_frame->coded_size();
+GLenum PlaneGLFormat(int num_channels) {
+ return viz::TextureStorageFormat(PlaneResourceFormat(num_channels));
}
-gfx::Size GetVideoUVSize(const VideoFrame* video_frame) {
- gfx::Size y_size = GetVideoYSize(video_frame);
- return gfx::Size((y_size.width() + 1) / 2, (y_size.height() + 1) / 2);
+std::tuple<SkYUVAInfo::PlaneConfig, SkYUVAInfo::Subsampling>
+VideoPixelFormatToSkiaValues(VideoPixelFormat video_format) {
+ // To expand support for additional VideoFormats expand this switch. Note that
+ // we do assume 8 bit formats. With that exception, anything else should work.
+ switch (video_format) {
+ case PIXEL_FORMAT_NV12:
+ return {SkYUVAInfo::PlaneConfig::kY_UV, SkYUVAInfo::Subsampling::k420};
+ case PIXEL_FORMAT_I420:
+ return {SkYUVAInfo::PlaneConfig::kY_U_V, SkYUVAInfo::Subsampling::k420};
+ case PIXEL_FORMAT_I420A:
+ return {SkYUVAInfo::PlaneConfig::kY_U_V_A, SkYUVAInfo::Subsampling::k420};
+ default:
+ return {SkYUVAInfo::PlaneConfig::kUnknown,
+ SkYUVAInfo::Subsampling::kUnknown};
+ }
}
SkColorType GetCompatibleSurfaceColorType(GrGLenum format) {
@@ -109,31 +108,14 @@ GrGLenum GetSurfaceColorFormat(GrGLenum format, GrGLenum type) {
return format;
}
-bool YUVGrBackendTexturesToSkSurface(GrDirectContext* gr_context,
- const VideoFrame* video_frame,
- GrBackendTexture* yuv_textures,
- sk_sp<SkSurface> surface,
- bool flip_y,
- bool use_visible_rect) {
- SkYUVAInfo::PlanarConfig planar_config;
- switch (video_frame->format()) {
- case PIXEL_FORMAT_NV12:
- planar_config = SkYUVAInfo::PlanarConfig::kY_UV_420;
- break;
- case PIXEL_FORMAT_I420:
- planar_config = SkYUVAInfo::PlanarConfig::kY_U_V_420;
- break;
- default:
- NOTREACHED();
- return false;
- }
- SkYUVAInfo yuva_info(
- {video_frame->coded_size().width(), video_frame->coded_size().height()},
- planar_config, ColorSpaceToSkYUVColorSpace(video_frame->ColorSpace()));
- auto image = SkImage::MakeFromYUVATextures(
- gr_context,
- GrYUVABackendTextures(yuva_info, yuv_textures, kTopLeft_GrSurfaceOrigin),
- SkColorSpace::MakeSRGB());
+bool YUVGrBackendTexturesToSkSurface(
+ GrDirectContext* gr_context,
+ const VideoFrame* video_frame,
+ const GrYUVABackendTextures& yuva_backend_textures,
+ sk_sp<SkSurface> surface,
+ bool use_visible_rect) {
+ auto image = SkImage::MakeFromYUVATextures(gr_context, yuva_backend_textures,
+ SkColorSpace::MakeSRGB());
if (!image) {
return false;
@@ -150,39 +132,19 @@ bool YUVGrBackendTexturesToSkSurface(GrDirectContext* gr_context,
visible_rect.width(), visible_rect.height());
const SkRect dst_rect =
SkRect::MakeWH(visible_rect.width(), visible_rect.height());
- surface->getCanvas()->drawImageRect(image, src_rect, dst_rect, nullptr);
+ surface->getCanvas()->drawImageRect(image, src_rect, dst_rect,
+ SkSamplingOptions(), nullptr,
+ SkCanvas::kStrict_SrcRectConstraint);
}
surface->flushAndSubmit();
return true;
}
-void FinishRasterTextureAccess(
- const gpu::MailboxHolder& dest_mailbox_holder,
- viz::RasterContextProvider* raster_context_provider,
- GLuint tex_id) {
- DCHECK(raster_context_provider);
-
- auto* ri = raster_context_provider->RasterInterface();
- DCHECK(ri);
-
- if (dest_mailbox_holder.mailbox.IsSharedImage())
- ri->EndSharedImageAccessDirectCHROMIUM(tex_id);
- ri->DeleteGpuRasterTexture(tex_id);
-}
-
} // namespace
class VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder {
public:
- enum YUVIndex : size_t {
- kYIndex = 0,
- kUIndex = 1,
- kVIndex = 2,
- };
- static constexpr size_t kNumNV12Planes = kUIndex + 1;
- static constexpr size_t kNumYUVPlanes = kVIndex + 1;
-
VideoFrameYUVMailboxesHolder() = default;
~VideoFrameYUVMailboxesHolder() { ReleaseCachedData(); }
@@ -191,47 +153,50 @@ class VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder {
// Extracts shared image information if |video_frame| is texture backed or
// creates new shared images and uploads YUV data to GPU if |video_frame| is
- // mappable. If |import_textures| is true also obtains GL texture IDs for each
- // plane. This function can be called repeatedly to re-use shared images in
- // the case of CPU backed VideoFrames.
- void SetVideoFrame(const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider,
- bool import_textures);
-
- bool is_nv12() { return is_nv12_; }
-
- const gpu::Mailbox& mailbox(size_t plane) {
- DCHECK_LT(plane, holders_.size());
- return holders_[plane].mailbox;
- }
+ // mappable. This function can be called repeatedly to re-use shared images in
+ // the case of CPU backed VideoFrames. The planes are returned in |mailboxes|.
+ void VideoFrameToMailboxes(
+ const VideoFrame* video_frame,
+ viz::RasterContextProvider* raster_context_provider,
+ gpu::Mailbox mailboxes[]);
- const GrGLTextureInfo& texture(size_t plane) {
- DCHECK_LT(plane, holders_.size());
- DCHECK(imported_textures_);
- return textures_[plane].texture;
- }
+ // Like VideoFrameToMailboxes but imports the textures from the mailboxes and
+ // returns the planes as a set of YUVA GrBackendTextures.
+ GrYUVABackendTextures VideoFrameToSkiaTextures(
+ const VideoFrame* video_frame,
+ viz::RasterContextProvider* raster_context_provider);
+
+ SkYUVAInfo::PlaneConfig plane_config() const { return plane_config_; }
+
+ SkYUVAInfo::Subsampling subsampling() const { return subsampling_; }
private:
+ static constexpr size_t kMaxPlanes =
+ static_cast<size_t>(SkYUVAInfo::kMaxPlanes);
+
void ImportTextures();
- size_t NumPlanes() { return is_nv12_ ? kNumNV12Planes : kNumYUVPlanes; }
+ size_t NumPlanes() {
+ return static_cast<size_t>(SkYUVAInfo::NumPlanes(plane_config_));
+ }
scoped_refptr<viz::RasterContextProvider> provider_;
bool imported_textures_ = false;
- bool is_nv12_ = false;
+ SkYUVAInfo::PlaneConfig plane_config_ = SkYUVAInfo::PlaneConfig::kUnknown;
+ SkYUVAInfo::Subsampling subsampling_ = SkYUVAInfo::Subsampling::kUnknown;
bool created_shared_images_ = false;
gfx::Size cached_video_size_;
gfx::ColorSpace cached_video_color_space_;
- std::array<gpu::MailboxHolder, kNumYUVPlanes> holders_;
+ std::array<gpu::MailboxHolder, kMaxPlanes> holders_;
struct YUVPlaneTextureInfo {
GrGLTextureInfo texture = {0, 0};
bool is_shared_image = false;
};
- std::array<YUVPlaneTextureInfo, kNumYUVPlanes> textures_;
+ std::array<YUVPlaneTextureInfo, kMaxPlanes> textures_;
};
void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::ReleaseCachedData() {
- if (holders_[kYIndex].mailbox.IsZero())
+ if (holders_[0].mailbox.IsZero())
return;
ReleaseTextures();
@@ -256,11 +221,13 @@ void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::ReleaseCachedData() {
created_shared_images_ = false;
}
-void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::SetVideoFrame(
- const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider,
- bool import_textures) {
- is_nv12_ = video_frame->format() == PIXEL_FORMAT_NV12;
+void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::
+ VideoFrameToMailboxes(const VideoFrame* video_frame,
+ viz::RasterContextProvider* raster_context_provider,
+ gpu::Mailbox mailboxes[]) {
+ std::tie(plane_config_, subsampling_) =
+ VideoPixelFormatToSkiaValues(video_frame->format());
+ DCHECK_NE(plane_config_, SkYUVAInfo::PlaneConfig::kUnknown);
// If we have cached shared images but the provider or video has changed we
// need to release shared images created on the old context and recreate them.
@@ -274,7 +241,14 @@ void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::SetVideoFrame(
auto* ri = provider_->RasterInterface();
DCHECK(ri);
+ gfx::Size video_size = video_frame->coded_size();
+ SkISize plane_sizes[SkYUVAInfo::kMaxPlanes];
+ size_t num_planes = SkYUVAInfo::PlaneDimensions(
+ {video_size.width(), video_size.height()}, plane_config_, subsampling_,
+ kTopLeft_SkEncodedOrigin, plane_sizes);
+
if (video_frame->HasTextures()) {
+ DCHECK_EQ(num_planes, video_frame->NumTextures());
for (size_t plane = 0; plane < video_frame->NumTextures(); ++plane) {
holders_[plane] = video_frame->mailbox_holder(plane);
DCHECK(holders_[plane].texture_target == GL_TEXTURE_2D ||
@@ -283,12 +257,9 @@ void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::SetVideoFrame(
<< "Unsupported texture target " << std::hex << std::showbase
<< holders_[plane].texture_target;
ri->WaitSyncTokenCHROMIUM(holders_[plane].sync_token.GetConstData());
+ mailboxes[plane] = holders_[plane].mailbox;
}
} else {
- DCHECK(!is_nv12_) << "NV12 CPU backed VideoFrames aren't supported.";
- gfx::Size y_size = GetVideoYSize(video_frame);
- gfx::Size uv_size = GetVideoUVSize(video_frame);
-
if (!created_shared_images_) {
auto* sii = provider_->SharedImageInterface();
DCHECK(sii);
@@ -299,12 +270,15 @@ void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::SetVideoFrame(
} else {
mailbox_usage = gpu::SHARED_IMAGE_USAGE_GLES2;
}
- for (size_t plane = 0; plane < kNumYUVPlanes; ++plane) {
- gfx::Size tex_size = plane == kYIndex ? y_size : uv_size;
+ for (size_t plane = 0; plane < num_planes; ++plane) {
+ gfx::Size tex_size = {plane_sizes[plane].width(),
+ plane_sizes[plane].height()};
+ int num_channels = SkYUVAInfo::NumChannelsInPlane(plane_config_, plane);
+ viz::ResourceFormat format = PlaneResourceFormat(num_channels);
holders_[plane].mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::LUMINANCE_8, tex_size,
- video_frame->ColorSpace(), kTopLeft_GrSurfaceOrigin,
- kPremul_SkAlphaType, mailbox_usage, gpu::kNullSurfaceHandle);
+ format, tex_size, video_frame->ColorSpace(),
+ kTopLeft_GrSurfaceOrigin, kPremul_SkAlphaType, mailbox_usage,
+ gpu::kNullSurfaceHandle);
holders_[plane].texture_target = GL_TEXTURE_2D;
}
@@ -321,29 +295,48 @@ void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::SetVideoFrame(
// prevent writing to a shared image for which we're holding read access.
ReleaseTextures();
- for (size_t plane = 0; plane < kNumYUVPlanes; ++plane) {
- gfx::Size tex_size = plane == kYIndex ? y_size : uv_size;
- SkImageInfo info =
- SkImageInfo::Make(tex_size.width(), tex_size.height(),
- kGray_8_SkColorType, kUnknown_SkAlphaType);
+ for (size_t plane = 0; plane < num_planes; ++plane) {
+ int num_channels = SkYUVAInfo::NumChannelsInPlane(plane_config_, plane);
+ SkColorType color_type = SkYUVAPixmapInfo::DefaultColorTypeForDataType(
+ SkYUVAPixmaps::DataType::kUnorm8, num_channels);
+ SkImageInfo info = SkImageInfo::Make(plane_sizes[plane], color_type,
+ kUnknown_SkAlphaType);
ri->WritePixels(holders_[plane].mailbox, 0, 0, GL_TEXTURE_2D,
video_frame->stride(plane), info,
video_frame->data(plane));
+ mailboxes[plane] = holders_[plane].mailbox;
}
}
+}
- if (import_textures)
- ImportTextures();
+GrYUVABackendTextures
+VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::VideoFrameToSkiaTextures(
+ const VideoFrame* video_frame,
+ viz::RasterContextProvider* raster_context_provider) {
+ gpu::Mailbox mailboxes[kMaxPlanes];
+ VideoFrameToMailboxes(video_frame, raster_context_provider, mailboxes);
+ ImportTextures();
+ SkISize video_size{video_frame->coded_size().width(),
+ video_frame->coded_size().height()};
+ SkYUVAInfo yuva_info(video_size, plane_config_, subsampling_,
+ ColorSpaceToSkYUVColorSpace(video_frame->ColorSpace()));
+ GrBackendTexture backend_textures[SkYUVAInfo::kMaxPlanes];
+ SkISize plane_sizes[SkYUVAInfo::kMaxPlanes];
+ int num_planes = yuva_info.planeDimensions(plane_sizes);
+ for (int i = 0; i < num_planes; ++i) {
+ backend_textures[i] = {plane_sizes[i].width(), plane_sizes[i].height(),
+ GrMipmapped::kNo, textures_[i].texture};
+ }
+ return GrYUVABackendTextures(yuva_info, backend_textures,
+ kTopLeft_GrSurfaceOrigin);
}
void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::ImportTextures() {
DCHECK(!imported_textures_)
<< "Textures should always be released after converting video frame. "
- "Call ReleaseTextures() for each call to SetVideoFrame() with "
- "import_textures=true";
+ "Call ReleaseTextures() for each call to VideoFrameToSkiaTextures()";
auto* ri = provider_->RasterInterface();
- GrGLenum skia_texture_format = is_nv12_ ? GL_RGB8 : GL_LUMINANCE8_EXT;
for (size_t plane = 0; plane < NumPlanes(); ++plane) {
textures_[plane].texture.fID =
ri->CreateAndConsumeForGpuRaster(holders_[plane].mailbox);
@@ -354,8 +347,9 @@ void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::ImportTextures() {
GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
}
+ int num_channels = SkYUVAInfo::NumChannelsInPlane(plane_config_, plane);
textures_[plane].texture.fTarget = holders_[plane].texture_target;
- textures_[plane].texture.fFormat = skia_texture_format;
+ textures_[plane].texture.fFormat = PlaneGLFormat(num_channels);
}
imported_textures_ = true;
@@ -384,27 +378,32 @@ void VideoFrameYUVConverter::VideoFrameYUVMailboxesHolder::ReleaseTextures() {
VideoFrameYUVConverter::VideoFrameYUVConverter() = default;
VideoFrameYUVConverter::~VideoFrameYUVConverter() = default;
-void VideoFrameYUVConverter::ConvertYUVVideoFrameNoCaching(
+bool VideoFrameYUVConverter::IsVideoFrameFormatSupported(
+ const VideoFrame& video_frame) {
+ return std::get<0>(VideoPixelFormatToSkiaValues(video_frame.format())) !=
+ SkYUVAInfo::PlaneConfig::kUnknown;
+}
+
+bool VideoFrameYUVConverter::ConvertYUVVideoFrameNoCaching(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
const gpu::MailboxHolder& dest_mailbox_holder) {
VideoFrameYUVConverter converter;
- converter.ConvertYUVVideoFrame(video_frame, raster_context_provider,
- dest_mailbox_holder);
+ return converter.ConvertYUVVideoFrame(video_frame, raster_context_provider,
+ dest_mailbox_holder);
}
-void VideoFrameYUVConverter::ConvertYUVVideoFrame(
+bool VideoFrameYUVConverter::ConvertYUVVideoFrame(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
- const gpu::MailboxHolder& dest_mailbox_holder) {
+ const gpu::MailboxHolder& dest_mailbox_holder,
+ unsigned int internal_format,
+ unsigned int type,
+ bool flip_y,
+ bool use_visible_rect) {
DCHECK(video_frame);
- DCHECK(video_frame->format() == PIXEL_FORMAT_I420 ||
- video_frame->format() == PIXEL_FORMAT_NV12)
+ DCHECK(IsVideoFrameFormatSupported(*video_frame))
<< "VideoFrame has an unsupported YUV format " << video_frame->format();
- DCHECK(
- video_frame->HasTextures() ||
- (video_frame->IsMappable() && video_frame->format() == PIXEL_FORMAT_I420))
- << "CPU backed VideoFrames must have PIXEL_FORMAT_I420";
DCHECK(!video_frame->coded_size().IsEmpty())
<< "|video_frame| must have an area > 0";
DCHECK(raster_context_provider);
@@ -413,9 +412,9 @@ void VideoFrameYUVConverter::ConvertYUVVideoFrame(
holder_ = std::make_unique<VideoFrameYUVMailboxesHolder>();
if (raster_context_provider->GrContext()) {
- ConvertFromVideoFrameYUVWithGrContext(video_frame, raster_context_provider,
- dest_mailbox_holder);
- return;
+ return ConvertFromVideoFrameYUVWithGrContext(
+ video_frame, raster_context_provider, dest_mailbox_holder,
+ internal_format, type, flip_y, use_visible_rect);
}
auto* ri = raster_context_provider->RasterInterface();
@@ -424,25 +423,16 @@ void VideoFrameYUVConverter::ConvertYUVVideoFrame(
SkYUVColorSpace color_space =
ColorSpaceToSkYUVColorSpace(video_frame->ColorSpace());
- holder_->SetVideoFrame(video_frame, raster_context_provider, false);
-
- if (holder_->is_nv12()) {
- ri->ConvertNV12MailboxesToRGB(
- dest_mailbox_holder.mailbox, color_space,
- holder_->mailbox(VideoFrameYUVMailboxesHolder::kYIndex),
- holder_->mailbox(VideoFrameYUVMailboxesHolder::kUIndex));
- } else {
- DCHECK_EQ(video_frame->NumTextures(),
- VideoFrameYUVMailboxesHolder::kNumYUVPlanes);
- ri->ConvertYUVMailboxesToRGB(
- dest_mailbox_holder.mailbox, color_space,
- holder_->mailbox(VideoFrameYUVMailboxesHolder::kYIndex),
- holder_->mailbox(VideoFrameYUVMailboxesHolder::kUIndex),
- holder_->mailbox(VideoFrameYUVMailboxesHolder::kVIndex));
- }
+ gpu::Mailbox mailboxes[SkYUVAInfo::kMaxPlanes]{};
+ holder_->VideoFrameToMailboxes(video_frame, raster_context_provider,
+ mailboxes);
+ ri->ConvertYUVAMailboxesToRGB(dest_mailbox_holder.mailbox, color_space,
+ holder_->plane_config(), holder_->subsampling(),
+ mailboxes);
+ return true;
}
-bool VideoFrameYUVConverter::ConvertYUVVideoFrameWithSkSurface(
+bool VideoFrameYUVConverter::ConvertYUVVideoFrameToDstTextureNoCaching(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
const gpu::MailboxHolder& dest_mailbox_holder,
@@ -450,95 +440,17 @@ bool VideoFrameYUVConverter::ConvertYUVVideoFrameWithSkSurface(
unsigned int type,
bool flip_y,
bool use_visible_rect) {
- DCHECK(video_frame);
- DCHECK(video_frame->format() == PIXEL_FORMAT_I420 ||
- video_frame->format() == PIXEL_FORMAT_NV12)
- << "VideoFrame has an unsupported YUV format " << video_frame->format();
- DCHECK(video_frame->HasTextures())
- << "CPU backed VideoFrames must have PIXEL_FORMAT_I420";
- DCHECK(!video_frame->coded_size().IsEmpty())
- << "|video_frame| must have an area > 0";
- DCHECK(raster_context_provider);
- DCHECK(raster_context_provider->GrContext());
-
- if (!holder_)
- holder_ = std::make_unique<VideoFrameYUVMailboxesHolder>();
-
- gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
- DCHECK(ri);
- ri->WaitSyncTokenCHROMIUM(dest_mailbox_holder.sync_token.GetConstData());
-
- // Consume mailbox to get dst texture.
- GLuint dest_tex_id =
- ri->CreateAndConsumeForGpuRaster(dest_mailbox_holder.mailbox);
-
- if (dest_mailbox_holder.mailbox.IsSharedImage()) {
- ri->BeginSharedImageAccessDirectCHROMIUM(
- dest_tex_id, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
- }
-
- // Rendering YUV textures to SkSurface by dst texture
- GrDirectContext* gr_context = raster_context_provider->GrContext();
-
- gfx::Size ya_tex_size = video_frame->coded_size();
- gfx::Size uv_tex_size((ya_tex_size.width() + 1) / 2,
- (ya_tex_size.height() + 1) / 2);
-
- GrGLTextureInfo backend_texture{};
-
- holder_->SetVideoFrame(video_frame, raster_context_provider, true);
-
- GrBackendTexture yuv_textures[3] = {
- GrBackendTexture(ya_tex_size.width(), ya_tex_size.height(),
- GrMipMapped::kNo,
- holder_->texture(VideoFrameYUVMailboxesHolder::kYIndex)),
- GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo,
- holder_->texture(VideoFrameYUVMailboxesHolder::kUIndex)),
- GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo,
- holder_->texture(VideoFrameYUVMailboxesHolder::kVIndex)),
- };
-
- backend_texture.fID = dest_tex_id;
- backend_texture.fTarget = dest_mailbox_holder.texture_target;
- backend_texture.fFormat = GetSurfaceColorFormat(internal_format, type);
-
- int backend_texture_width = use_visible_rect
- ? video_frame->visible_rect().width()
- : video_frame->coded_size().width();
- int backend_texture_height = use_visible_rect
- ? video_frame->visible_rect().height()
- : video_frame->coded_size().height();
-
- GrBackendTexture result_texture(backend_texture_width, backend_texture_height,
- GrMipMapped::kNo, backend_texture);
-
- // Use dst texture as SkSurface back resource.
- auto surface = SkSurface::MakeFromBackendTexture(
- gr_context, result_texture,
- flip_y ? kBottomLeft_GrSurfaceOrigin : kTopLeft_GrSurfaceOrigin, 1,
- GetCompatibleSurfaceColorType(backend_texture.fFormat),
- SkColorSpace::MakeSRGB(), nullptr);
-
- // Terminate if surface cannot be created.
- if (!surface) {
- FinishRasterTextureAccess(dest_mailbox_holder, raster_context_provider,
- dest_tex_id);
- return false;
- }
-
- bool result = YUVGrBackendTexturesToSkSurface(
- gr_context, video_frame, yuv_textures, surface, flip_y, use_visible_rect);
-
- // Finish access of dest_tex_id
- FinishRasterTextureAccess(dest_mailbox_holder, raster_context_provider,
- dest_tex_id);
+ VideoFrameYUVConverter converter;
+ return converter.ConvertYUVVideoFrame(video_frame, raster_context_provider,
+ dest_mailbox_holder, internal_format,
+ type, flip_y, use_visible_rect);
+}
- return result;
+void VideoFrameYUVConverter::ReleaseCachedData() {
+ holder_.reset();
}
-bool VideoFrameYUVConverter::ConvertYUVVideoFrameWithSkSurfaceNoCaching(
+bool VideoFrameYUVConverter::ConvertFromVideoFrameYUVWithGrContext(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
const gpu::MailboxHolder& dest_mailbox_holder,
@@ -546,20 +458,6 @@ bool VideoFrameYUVConverter::ConvertYUVVideoFrameWithSkSurfaceNoCaching(
unsigned int type,
bool flip_y,
bool use_visible_rect) {
- VideoFrameYUVConverter converter;
- return converter.ConvertYUVVideoFrameWithSkSurface(
- video_frame, raster_context_provider, dest_mailbox_holder,
- internal_format, type, flip_y, use_visible_rect);
-}
-
-void VideoFrameYUVConverter::ReleaseCachedData() {
- holder_.reset();
-}
-
-void VideoFrameYUVConverter::ConvertFromVideoFrameYUVWithGrContext(
- const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider,
- const gpu::MailboxHolder& dest_mailbox_holder) {
gpu::raster::RasterInterface* ri = raster_context_provider->RasterInterface();
DCHECK(ri);
ri->WaitSyncTokenCHROMIUM(dest_mailbox_holder.sync_token.GetConstData());
@@ -570,61 +468,71 @@ void VideoFrameYUVConverter::ConvertFromVideoFrameYUVWithGrContext(
dest_tex_id, GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
}
- ConvertFromVideoFrameYUVSkia(video_frame, raster_context_provider,
- dest_mailbox_holder.texture_target, dest_tex_id);
+ bool result = ConvertFromVideoFrameYUVSkia(
+ video_frame, raster_context_provider, dest_mailbox_holder.texture_target,
+ dest_tex_id, internal_format, type, flip_y, use_visible_rect);
if (dest_mailbox_holder.mailbox.IsSharedImage())
ri->EndSharedImageAccessDirectCHROMIUM(dest_tex_id);
ri->DeleteGpuRasterTexture(dest_tex_id);
+
+ return result;
}
-void VideoFrameYUVConverter::ConvertFromVideoFrameYUVSkia(
+bool VideoFrameYUVConverter::ConvertFromVideoFrameYUVSkia(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
unsigned int texture_target,
- unsigned int texture_id) {
+ unsigned int texture_id,
+ unsigned int internal_format,
+ unsigned int type,
+ bool flip_y,
+ bool use_visible_rect) {
+ // Rendering YUV textures to SkSurface by dst texture
GrDirectContext* gr_context = raster_context_provider->GrContext();
DCHECK(gr_context);
- // TODO: We should compare the DCHECK vs when UpdateLastImage calls this
- // function. (https://crbug.com/674185)
- DCHECK(video_frame->format() == PIXEL_FORMAT_I420 ||
- video_frame->format() == PIXEL_FORMAT_NV12);
-
- gfx::Size ya_tex_size = GetVideoYSize(video_frame);
- gfx::Size uv_tex_size = GetVideoUVSize(video_frame);
-
- GrGLTextureInfo backend_texture{};
-
- holder_->SetVideoFrame(video_frame, raster_context_provider, true);
-
- GrBackendTexture yuv_textures[3] = {
- GrBackendTexture(ya_tex_size.width(), ya_tex_size.height(),
- GrMipMapped::kNo,
- holder_->texture(VideoFrameYUVMailboxesHolder::kYIndex)),
- GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo,
- holder_->texture(VideoFrameYUVMailboxesHolder::kUIndex)),
- GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
- GrMipMapped::kNo,
- holder_->texture(VideoFrameYUVMailboxesHolder::kVIndex)),
- };
- backend_texture.fID = texture_id;
- backend_texture.fTarget = texture_target;
- backend_texture.fFormat = GL_RGBA8;
- GrBackendTexture result_texture(video_frame->coded_size().width(),
- video_frame->coded_size().height(),
- GrMipMapped::kNo, backend_texture);
-
- // Creating the SkImage triggers conversion into the dest texture. Let the
- // image fall out of scope and track the result using |dest_mailbox_holder|
- YUVGrBackendTexturesToSkImage(gr_context, video_frame->ColorSpace(),
- video_frame->format(), yuv_textures,
- result_texture);
- gr_context->flushAndSubmit();
+ // TODO(crbug.com/674185): We should compare the DCHECK vs when
+ // UpdateLastImage calls this function.
+ DCHECK(IsVideoFrameFormatSupported(*video_frame));
+
+ GrYUVABackendTextures yuva_backend_textures =
+ holder_->VideoFrameToSkiaTextures(video_frame, raster_context_provider);
+ DCHECK(yuva_backend_textures.isValid());
+
+ GrGLTextureInfo result_gl_texture_info{};
+ result_gl_texture_info.fID = texture_id;
+ result_gl_texture_info.fTarget = texture_target;
+ result_gl_texture_info.fFormat = GetSurfaceColorFormat(internal_format, type);
+
+ int result_width = use_visible_rect ? video_frame->visible_rect().width()
+ : video_frame->coded_size().width();
+ int result_height = use_visible_rect ? video_frame->visible_rect().height()
+ : video_frame->coded_size().height();
+
+ GrBackendTexture result_texture(result_width, result_height, GrMipMapped::kNo,
+ result_gl_texture_info);
+
+ // Use dst texture as SkSurface back resource.
+ auto surface = SkSurface::MakeFromBackendTexture(
+ gr_context, result_texture,
+ flip_y ? kBottomLeft_GrSurfaceOrigin : kTopLeft_GrSurfaceOrigin, 1,
+ GetCompatibleSurfaceColorType(result_gl_texture_info.fFormat),
+ SkColorSpace::MakeSRGB(), nullptr);
+
+ // Terminate if surface cannot be created.
+ if (!surface) {
+ return false;
+ }
+
+ bool result = YUVGrBackendTexturesToSkSurface(gr_context, video_frame,
+ yuva_backend_textures, surface,
+ use_visible_rect);
// Release textures to guarantee |holder_| doesn't hold read access on
// textures it doesn't own.
holder_->ReleaseTextures();
+
+ return result;
}
} // namespace media
diff --git a/chromium/media/renderers/video_frame_yuv_converter.h b/chromium/media/renderers/video_frame_yuv_converter.h
index 899936e01aa..12566671662 100644
--- a/chromium/media/renderers/video_frame_yuv_converter.h
+++ b/chromium/media/renderers/video_frame_yuv_converter.h
@@ -7,6 +7,7 @@
#include <array>
+#include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/media_export.h"
#include "ui/gfx/color_space.h"
@@ -32,7 +33,9 @@ class VideoFrame;
// images.
class MEDIA_EXPORT VideoFrameYUVConverter {
public:
- static void ConvertYUVVideoFrameNoCaching(
+ static bool IsVideoFrameFormatSupported(const VideoFrame& video_frame);
+
+ static bool ConvertYUVVideoFrameNoCaching(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
const gpu::MailboxHolder& dest_mailbox_holder);
@@ -40,7 +43,7 @@ class MEDIA_EXPORT VideoFrameYUVConverter {
// TODO(crbug.com/1108154): Will merge this uploading path
// with ConvertYUVVideoFrameYUVWithGrContext after solving
// issue 1120911, 1120912
- static bool ConvertYUVVideoFrameWithSkSurfaceNoCaching(
+ static bool ConvertYUVVideoFrameToDstTextureNoCaching(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
const gpu::MailboxHolder& dest_mailbox_holder,
@@ -52,25 +55,29 @@ class MEDIA_EXPORT VideoFrameYUVConverter {
VideoFrameYUVConverter();
~VideoFrameYUVConverter();
- void ConvertYUVVideoFrame(const VideoFrame* video_frame,
+ bool ConvertYUVVideoFrame(const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
- const gpu::MailboxHolder& dest_mailbox_holder);
+ const gpu::MailboxHolder& dest_mailbox_holder,
+ unsigned int internal_format = GL_RGBA,
+ unsigned int type = GL_UNSIGNED_BYTE,
+ bool flip_y = false,
+ bool use_visible_rect = false);
void ReleaseCachedData();
private:
- void ConvertFromVideoFrameYUVWithGrContext(
+ bool ConvertFromVideoFrameYUVWithGrContext(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
- const gpu::MailboxHolder& dest_mailbox_holder);
- void ConvertFromVideoFrameYUVSkia(
+ const gpu::MailboxHolder& dest_mailbox_holder,
+ unsigned int internal_format,
+ unsigned int type,
+ bool flip_y,
+ bool use_visible_rect);
+ bool ConvertFromVideoFrameYUVSkia(
const VideoFrame* video_frame,
viz::RasterContextProvider* raster_context_provider,
unsigned int texture_target,
- unsigned int texture_id);
- bool ConvertYUVVideoFrameWithSkSurface(
- const VideoFrame* video_frame,
- viz::RasterContextProvider* raster_context_provider,
- const gpu::MailboxHolder& dest_mailbox_holder,
+ unsigned int texture_id,
unsigned int internal_format,
unsigned int type,
bool flip_y,
diff --git a/chromium/media/renderers/video_renderer_impl.cc b/chromium/media/renderers/video_renderer_impl.cc
index c25efa80169..10568c1df88 100644
--- a/chromium/media/renderers/video_renderer_impl.cc
+++ b/chromium/media/renderers/video_renderer_impl.cc
@@ -548,33 +548,40 @@ void VideoRendererImpl::UpdateLatencyHintBufferingCaps_Locked(
}
}
-void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadStatus status,
- scoped_refptr<VideoFrame> frame) {
+void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadResult result) {
DCHECK(task_runner_->BelongsToCurrentThread());
base::AutoLock auto_lock(lock_);
DCHECK_EQ(state_, kPlaying);
CHECK(pending_read_);
pending_read_ = false;
- if (status == VideoDecoderStream::DECODE_ERROR) {
- DCHECK(!frame);
- task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&VideoRendererImpl::OnPlaybackError,
- weak_factory_.GetWeakPtr(), PIPELINE_ERROR_DECODE));
- return;
- }
-
// Can happen when demuxers are preparing for a new Seek().
- if (!frame) {
- DCHECK_EQ(status, VideoDecoderStream::DEMUXER_READ_ABORTED);
- return;
+ switch (result.code()) {
+ case StatusCode::kOk:
+ break;
+ case StatusCode::kAborted:
+ // TODO(liberato): This used to check specifically for the value
+ // DEMUXER_READ_ABORTED, which was more specific than |kAborted|.
+ // However, since it's a dcheck, this seems okay.
+ return;
+ default:
+ DCHECK(result.has_error());
+ // Anything other than `kOk` or `kAborted` is treated as an error.
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VideoRendererImpl::OnPlaybackError,
+ weak_factory_.GetWeakPtr(), PIPELINE_ERROR_DECODE));
+ return;
}
+ DCHECK(result.has_value());
+ scoped_refptr<VideoFrame> frame = std::move(result).value();
+ DCHECK(frame);
+
last_frame_ready_time_ = tick_clock_->NowTicks();
last_decoder_stream_avg_duration_ = video_decoder_stream_->AverageDuration();
- const bool is_eos = frame->metadata()->end_of_stream;
+ const bool is_eos = frame->metadata().end_of_stream;
const bool is_before_start_time = !is_eos && IsBeforeStartTime(*frame);
const bool cant_read = !video_decoder_stream_->CanReadWithoutStalling();
@@ -606,8 +613,8 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadStatus status,
// RemoveFramesForUnderflowOrBackgroundRendering() below to actually expire
// this frame if it's too far behind the current media time. Without this,
// we may resume too soon after a track change in the low delay case.
- if (!frame->metadata()->frame_duration.has_value())
- frame->metadata()->frame_duration = last_decoder_stream_avg_duration_;
+ if (!frame->metadata().frame_duration.has_value())
+ frame->metadata().frame_duration = last_decoder_stream_avg_duration_;
AddReadyFrame_Locked(std::move(frame));
}
@@ -736,11 +743,11 @@ void VideoRendererImpl::TransitionToHaveNothing_Locked() {
void VideoRendererImpl::AddReadyFrame_Locked(scoped_refptr<VideoFrame> frame) {
DCHECK(task_runner_->BelongsToCurrentThread());
lock_.AssertAcquired();
- DCHECK(!frame->metadata()->end_of_stream);
+ DCHECK(!frame->metadata().end_of_stream);
++stats_.video_frames_decoded;
- if (frame->metadata()->power_efficient)
+ if (frame->metadata().power_efficient)
++stats_.video_frames_decoded_power_efficient;
algorithm_->EnqueueFrame(std::move(frame));
@@ -930,7 +937,7 @@ base::TimeTicks VideoRendererImpl::GetCurrentMediaTimeAsWallClockTime() {
bool VideoRendererImpl::IsBeforeStartTime(const VideoFrame& frame) {
// Prefer the actual frame duration over the average if available.
- return frame.timestamp() + frame.metadata()->frame_duration.value_or(
+ return frame.timestamp() + frame.metadata().frame_duration.value_or(
last_decoder_stream_avg_duration_) <
start_timestamp_;
}
diff --git a/chromium/media/renderers/video_renderer_impl.h b/chromium/media/renderers/video_renderer_impl.h
index bd449072b72..27f61ec2c72 100644
--- a/chromium/media/renderers/video_renderer_impl.h
+++ b/chromium/media/renderers/video_renderer_impl.h
@@ -112,8 +112,7 @@ class MEDIA_EXPORT VideoRendererImpl
// Callback for |video_decoder_stream_| to deliver decoded video frames and
// report video decoding status.
- void FrameReady(VideoDecoderStream::ReadStatus status,
- scoped_refptr<VideoFrame> frame);
+ void FrameReady(VideoDecoderStream::ReadResult result);
// Helper method for enqueueing a frame to |alogorithm_|.
void AddReadyFrame_Locked(scoped_refptr<VideoFrame> frame);
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
index 2197db9298f..6169cc98e13 100644
--- a/chromium/media/renderers/video_resource_updater.cc
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -133,6 +133,11 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
buffer_formats[1] = gfx::BufferFormat::RG_88;
return VideoFrameResourceType::YUV;
+ case PIXEL_FORMAT_RGBAF16:
+ DCHECK_EQ(num_textures, 1);
+ buffer_formats[0] = gfx::BufferFormat::RGBA_F16;
+ return VideoFrameResourceType::RGBA;
+
case PIXEL_FORMAT_UYVY:
NOTREACHED();
FALLTHROUGH;
@@ -495,9 +500,9 @@ VideoResourceUpdater::~VideoResourceUpdater() {
void VideoResourceUpdater::ObtainFrameResources(
scoped_refptr<VideoFrame> video_frame) {
- if (video_frame->metadata()->overlay_plane_id.has_value()) {
+ if (video_frame->metadata().overlay_plane_id.has_value()) {
// This is a hole punching VideoFrame, there is nothing to display.
- overlay_plane_id_ = *video_frame->metadata()->overlay_plane_id;
+ overlay_plane_id_ = *video_frame->metadata().overlay_plane_id;
frame_resource_type_ = VideoFrameResourceType::VIDEO_HOLE;
return;
}
@@ -519,8 +524,8 @@ void VideoResourceUpdater::ObtainFrameResources(
external_resources.resources[i],
viz::SingleReleaseCallback::Create(
std::move(external_resources.release_callbacks[i])));
- frame_resources_.push_back(
- {resource_id, external_resources.resources[i].size});
+ frame_resources_.emplace_back(resource_id,
+ external_resources.resources[i].size);
}
TRACE_EVENT_INSTANT1("media", "VideoResourceUpdater::ObtainFrameResources",
TRACE_EVENT_SCOPE_THREAD, "Timestamp",
@@ -613,13 +618,14 @@ void VideoResourceUpdater::AppendQuads(
frame_resources_[0].id, frame_resources_[1].id,
frame_resources_.size() > 2 ? frame_resources_[2].id
: frame_resources_[1].id,
- frame_resources_.size() > 3 ? frame_resources_[3].id : 0,
+ frame_resources_.size() > 3 ? frame_resources_[3].id
+ : viz::kInvalidResourceId,
frame->ColorSpace(), frame_resource_offset_,
frame_resource_multiplier_, frame_bits_per_channel_);
if (frame->hdr_metadata().has_value())
yuv_video_quad->hdr_metadata = frame->hdr_metadata().value();
- if (frame->metadata()->protected_video) {
- if (frame->metadata()->hw_protected) {
+ if (frame->metadata().protected_video) {
+ if (frame->metadata().hw_protected) {
yuv_video_quad->protected_video_type =
gfx::ProtectedVideoType::kHardwareProtected;
} else {
@@ -647,8 +653,8 @@ void VideoResourceUpdater::AppendQuads(
bool nearest_neighbor = false;
gfx::ProtectedVideoType protected_video_type =
gfx::ProtectedVideoType::kClear;
- if (frame->metadata()->protected_video) {
- if (frame->metadata()->hw_protected)
+ if (frame->metadata().protected_video) {
+ if (frame->metadata().hw_protected)
protected_video_type = gfx::ProtectedVideoType::kHardwareProtected;
else
protected_video_type = gfx::ProtectedVideoType::kSoftwareProtected;
@@ -663,6 +669,8 @@ void VideoResourceUpdater::AppendQuads(
nearest_neighbor, false, protected_video_type);
texture_quad->set_resource_size_in_pixels(coded_size);
texture_quad->is_video_frame = true;
+ texture_quad->hw_protected_validation_id =
+ frame->metadata().hw_protected_validation_id;
for (viz::ResourceId resource_id : texture_quad->resources) {
resource_provider_->ValidateResource(resource_id);
}
@@ -848,7 +856,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
VideoFrameExternalResources external_resources;
gfx::ColorSpace resource_color_space = video_frame->ColorSpace();
- const auto& copy_mode = video_frame->metadata()->copy_mode;
+ const auto& copy_mode = video_frame->metadata().copy_mode;
GLuint target = video_frame->mailbox_holder(0).texture_target;
// If texture copy is required, then we will copy into a GL_TEXTURE_2D target.
if (copy_mode == VideoFrameMetadata::CopyMode::kCopyToNewTexture)
@@ -902,18 +910,18 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes(
const gfx::Size plane_size(width, height);
auto transfer_resource = viz::TransferableResource::MakeGL(
mailbox, GL_LINEAR, mailbox_holder.texture_target, sync_token,
- plane_size, video_frame->metadata()->allow_overlay);
+ plane_size, video_frame->metadata().allow_overlay);
transfer_resource.color_space = resource_color_space;
transfer_resource.read_lock_fences_enabled =
- video_frame->metadata()->read_lock_fences_enabled;
+ video_frame->metadata().read_lock_fences_enabled;
transfer_resource.format = viz::GetResourceFormat(buffer_formats[i]);
transfer_resource.ycbcr_info = video_frame->ycbcr_info();
#if defined(OS_ANDROID)
transfer_resource.is_backed_by_surface_texture =
- video_frame->metadata()->texture_owner;
+ video_frame->metadata().texture_owner;
transfer_resource.wants_promotion_hint =
- video_frame->metadata()->wants_promotion_hint;
+ video_frame->metadata().wants_promotion_hint;
#endif
external_resources.resources.push_back(std::move(transfer_resource));
if (copy_mode == VideoFrameMetadata::CopyMode::kCopyMailboxesOnly) {
@@ -943,13 +951,25 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
size_t bits_per_channel = video_frame->BitDepth();
- // Only YUV and Y16 software video frames are supported.
+ const bool is_rgb = input_frame_format == PIXEL_FORMAT_XBGR ||
+ input_frame_format == PIXEL_FORMAT_XRGB ||
+ input_frame_format == PIXEL_FORMAT_ABGR ||
+ input_frame_format == PIXEL_FORMAT_ARGB;
+
DCHECK(IsYuvPlanar(input_frame_format) ||
- input_frame_format == PIXEL_FORMAT_Y16);
+ input_frame_format == PIXEL_FORMAT_Y16 || is_rgb);
viz::ResourceFormat output_resource_format;
gfx::ColorSpace output_color_space = video_frame->ColorSpace();
- if (input_frame_format == PIXEL_FORMAT_Y16) {
+ if (input_frame_format == PIXEL_FORMAT_XBGR) {
+ output_resource_format = viz::RGBX_8888;
+ } else if (input_frame_format == PIXEL_FORMAT_XRGB) {
+ output_resource_format = viz::BGRX_8888;
+ } else if (input_frame_format == PIXEL_FORMAT_ABGR) {
+ output_resource_format = viz::RGBA_8888;
+ } else if (input_frame_format == PIXEL_FORMAT_ARGB) {
+ output_resource_format = viz::BGRA_8888;
+ } else if (input_frame_format == PIXEL_FORMAT_Y16) {
// Unable to display directly as yuv planes so convert it to RGBA for
// compositing.
output_resource_format = viz::RGBA_8888;
@@ -963,9 +983,10 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
// returned by the resource provider is viz::RGBA_8888, then a GPU driver
// bug workaround requires that YUV frames must be converted to RGB
// before texture upload.
- bool texture_needs_rgb_conversion =
- !software_compositor() &&
- output_resource_format == viz::ResourceFormat::RGBA_8888;
+ const bool texture_needs_rgb_conversion =
+ input_frame_format == PIXEL_FORMAT_Y16 ||
+ (!software_compositor() && IsYuvPlanar(input_frame_format) &&
+ output_resource_format == viz::ResourceFormat::RGBA_8888);
size_t output_plane_count = VideoFrame::NumPlanes(input_frame_format);
@@ -1026,14 +1047,15 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
external_resources.bits_per_channel = bits_per_channel;
- if (software_compositor() || texture_needs_rgb_conversion) {
+ if (software_compositor() || texture_needs_rgb_conversion || is_rgb) {
DCHECK_EQ(plane_resources.size(), 1u);
PlaneResource* plane_resource = plane_resources[0];
- DCHECK_EQ(plane_resource->resource_format(), viz::RGBA_8888);
if (!plane_resource->Matches(video_frame->unique_id(), 0)) {
// We need to transfer data from |video_frame| to the plane resource.
if (software_compositor()) {
+ DCHECK_EQ(plane_resource->resource_format(), viz::RGBA_8888);
+
if (!video_renderer_)
video_renderer_ = std::make_unique<PaintCanvasVideoRenderer>();
@@ -1062,7 +1084,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
} else {
HardwarePlaneResource* hardware_resource = plane_resource->AsHardware();
size_t bytes_per_row = viz::ResourceSizes::CheckedWidthInBytes<size_t>(
- video_frame->coded_size().width(), viz::ResourceFormat::RGBA_8888);
+ video_frame->coded_size().width(), output_resource_format);
size_t needed_size = bytes_per_row * video_frame->coded_size().height();
if (upload_pixels_size_ < needed_size) {
// Free the existing data first so that the memory can be reused,
@@ -1084,8 +1106,8 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
scope.texture_id());
gl->TexSubImage2D(
hardware_resource->texture_target(), 0, 0, 0, plane_size.width(),
- plane_size.height(), GLDataFormat(viz::ResourceFormat::RGBA_8888),
- GLDataType(viz::ResourceFormat::RGBA_8888), upload_pixels_.get());
+ plane_size.height(), GLDataFormat(output_resource_format),
+ GLDataType(output_resource_format), upload_pixels_.get());
}
}
plane_resource->SetUniqueId(video_frame->unique_id(), 0);
@@ -1113,7 +1135,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
}
transferable_resource.color_space = output_color_space;
- transferable_resource.format = viz::ResourceFormat::RGBA_8888;
+ transferable_resource.format = output_resource_format;
external_resources.resources.push_back(std::move(transferable_resource));
external_resources.release_callbacks.push_back(base::BindOnce(
&VideoResourceUpdater::RecycleResource, weak_ptr_factory_.GetWeakPtr(),
@@ -1391,4 +1413,10 @@ gpu::SharedImageInterface* VideoResourceUpdater::SharedImageInterface() const {
return sii;
}
+VideoResourceUpdater::FrameResource::FrameResource() = default;
+
+VideoResourceUpdater::FrameResource::FrameResource(viz::ResourceId id,
+ const gfx::Size& size)
+ : id(id), size_in_pixels(size) {}
+
} // namespace media
diff --git a/chromium/media/renderers/video_resource_updater.h b/chromium/media/renderers/video_resource_updater.h
index ab49f3c2fa5..e23887c3fe2 100644
--- a/chromium/media/renderers/video_resource_updater.h
+++ b/chromium/media/renderers/video_resource_updater.h
@@ -144,6 +144,9 @@ class MEDIA_EXPORT VideoResourceUpdater
// A resource that will be embedded in a DrawQuad in the next CompositorFrame.
// Each video plane will correspond to one FrameResource.
struct FrameResource {
+ FrameResource();
+ FrameResource(viz::ResourceId id, const gfx::Size& size);
+
viz::ResourceId id;
gfx::Size size_in_pixels;
};
diff --git a/chromium/media/renderers/video_resource_updater_unittest.cc b/chromium/media/renderers/video_resource_updater_unittest.cc
index 055a5de03d7..65814ed93cf 100644
--- a/chromium/media/renderers/video_resource_updater_unittest.cc
+++ b/chromium/media/renderers/video_resource_updater_unittest.cc
@@ -105,7 +105,7 @@ class VideoResourceUpdaterTest : public testing::Test {
// Note that the number of pixels needed for |size| must be less than or equal
// to the number of pixels needed for size of 100x100.
- scoped_refptr<media::VideoFrame> CreateTestYUVVideoFrame(
+ scoped_refptr<VideoFrame> CreateTestYUVVideoFrame(
const gfx::Size& size = gfx::Size(10, 10)) {
constexpr int kMaxDimension = 100;
static uint8_t y_data[kMaxDimension * kMaxDimension] = {0};
@@ -114,24 +114,23 @@ class VideoResourceUpdaterTest : public testing::Test {
CHECK_LE(size.width() * size.height(), kMaxDimension * kMaxDimension);
- scoped_refptr<media::VideoFrame> video_frame =
- media::VideoFrame::WrapExternalYuvData(
- media::PIXEL_FORMAT_I422, // format
- size, // coded_size
- gfx::Rect(size), // visible_rect
- size, // natural_size
- size.width(), // y_stride
- size.width() / 2, // u_stride
- size.width() / 2, // v_stride
- y_data, // y_data
- u_data, // u_data
- v_data, // v_data
- base::TimeDelta()); // timestamp
+ scoped_refptr<VideoFrame> video_frame =
+ VideoFrame::WrapExternalYuvData(PIXEL_FORMAT_I422, // format
+ size, // coded_size
+ gfx::Rect(size), // visible_rect
+ size, // natural_size
+ size.width(), // y_stride
+ size.width() / 2, // u_stride
+ size.width() / 2, // v_stride
+ y_data, // y_data
+ u_data, // u_data
+ v_data, // v_data
+ base::TimeDelta()); // timestamp
EXPECT_TRUE(video_frame);
return video_frame;
}
- scoped_refptr<media::VideoFrame> CreateWonkyTestYUVVideoFrame() {
+ scoped_refptr<VideoFrame> CreateWonkyTestYUVVideoFrame() {
const int kDimension = 10;
const int kYWidth = kDimension + 5;
const int kUWidth = (kYWidth + 1) / 2 + 200;
@@ -140,30 +139,45 @@ class VideoResourceUpdaterTest : public testing::Test {
static uint8_t u_data[kUWidth * kDimension] = {0};
static uint8_t v_data[kVWidth * kDimension] = {0};
- scoped_refptr<media::VideoFrame> video_frame =
- media::VideoFrame::WrapExternalYuvData(
- media::PIXEL_FORMAT_I422, // format
- gfx::Size(kYWidth, kDimension), // coded_size
- gfx::Rect(2, 0, kDimension, kDimension), // visible_rect
- gfx::Size(kDimension, kDimension), // natural_size
- -kYWidth, // y_stride (negative)
- kUWidth, // u_stride
- kVWidth, // v_stride
- y_data + kYWidth * (kDimension - 1), // y_data
- u_data, // u_data
- v_data, // v_data
- base::TimeDelta()); // timestamp
+ scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalYuvData(
+ PIXEL_FORMAT_I422, // format
+ gfx::Size(kYWidth, kDimension), // coded_size
+ gfx::Rect(2, 0, kDimension, kDimension), // visible_rect
+ gfx::Size(kDimension, kDimension), // natural_size
+ -kYWidth, // y_stride (negative)
+ kUWidth, // u_stride
+ kVWidth, // v_stride
+ y_data + kYWidth * (kDimension - 1), // y_data
+ u_data, // u_data
+ v_data, // v_data
+ base::TimeDelta()); // timestamp
EXPECT_TRUE(video_frame);
return video_frame;
}
- scoped_refptr<media::VideoFrame> CreateTestHighBitFrame() {
+ scoped_refptr<VideoFrame> CreateTestRGBVideoFrame(VideoPixelFormat format) {
+ constexpr int kMaxDimension = 10;
+ constexpr gfx::Size kSize = gfx::Size(kMaxDimension, kMaxDimension);
+ static uint32_t rgb_data[kMaxDimension * kMaxDimension] = {0};
+ scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalData(
+ format, // format
+ kSize, // coded_size
+ gfx::Rect(kSize), // visible_rect
+ kSize, // natural_size
+ reinterpret_cast<uint8_t*>(rgb_data), // data,
+ sizeof(rgb_data), // data_size
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ scoped_refptr<VideoFrame> CreateTestHighBitFrame() {
const int kDimension = 10;
gfx::Size size(kDimension, kDimension);
- scoped_refptr<media::VideoFrame> video_frame(media::VideoFrame::CreateFrame(
- media::PIXEL_FORMAT_YUV420P10, size, gfx::Rect(size), size,
- base::TimeDelta()));
+ scoped_refptr<VideoFrame> video_frame(
+ VideoFrame::CreateFrame(PIXEL_FORMAT_YUV420P10, size, gfx::Rect(size),
+ size, base::TimeDelta()));
EXPECT_TRUE(video_frame);
return video_frame;
}
@@ -172,65 +186,62 @@ class VideoResourceUpdaterTest : public testing::Test {
release_sync_token_ = sync_token;
}
- scoped_refptr<media::VideoFrame> CreateTestHardwareVideoFrame(
- media::VideoPixelFormat format,
+ scoped_refptr<VideoFrame> CreateTestHardwareVideoFrame(
+ VideoPixelFormat format,
unsigned target) {
const int kDimension = 10;
gfx::Size size(kDimension, kDimension);
auto mailbox = gpu::Mailbox::GenerateForSharedImage();
- gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes] = {
+ gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes] = {
gpu::MailboxHolder(mailbox, kMailboxSyncToken, target)};
- scoped_refptr<media::VideoFrame> video_frame =
- media::VideoFrame::WrapNativeTextures(
- format, mailbox_holders,
- base::BindOnce(&VideoResourceUpdaterTest::SetReleaseSyncToken,
- base::Unretained(this)),
- size, // coded_size
- gfx::Rect(size), // visible_rect
- size, // natural_size
- base::TimeDelta()); // timestamp
+ scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapNativeTextures(
+ format, mailbox_holders,
+ base::BindOnce(&VideoResourceUpdaterTest::SetReleaseSyncToken,
+ base::Unretained(this)),
+ size, // coded_size
+ gfx::Rect(size), // visible_rect
+ size, // natural_size
+ base::TimeDelta()); // timestamp
EXPECT_TRUE(video_frame);
return video_frame;
}
- scoped_refptr<media::VideoFrame> CreateTestRGBAHardwareVideoFrame() {
- return CreateTestHardwareVideoFrame(media::PIXEL_FORMAT_ARGB,
- GL_TEXTURE_2D);
+ scoped_refptr<VideoFrame> CreateTestRGBAHardwareVideoFrame() {
+ return CreateTestHardwareVideoFrame(PIXEL_FORMAT_ARGB, GL_TEXTURE_2D);
}
- scoped_refptr<media::VideoFrame> CreateTestStreamTextureHardwareVideoFrame(
- base::Optional<media::VideoFrameMetadata::CopyMode> copy_mode) {
- scoped_refptr<media::VideoFrame> video_frame = CreateTestHardwareVideoFrame(
- media::PIXEL_FORMAT_ARGB, GL_TEXTURE_EXTERNAL_OES);
- video_frame->metadata()->copy_mode = std::move(copy_mode);
+ scoped_refptr<VideoFrame> CreateTestStreamTextureHardwareVideoFrame(
+ base::Optional<VideoFrameMetadata::CopyMode> copy_mode) {
+ scoped_refptr<VideoFrame> video_frame = CreateTestHardwareVideoFrame(
+ PIXEL_FORMAT_ARGB, GL_TEXTURE_EXTERNAL_OES);
+ video_frame->metadata().copy_mode = std::move(copy_mode);
return video_frame;
}
- scoped_refptr<media::VideoFrame> CreateTestYuvHardwareVideoFrame(
- media::VideoPixelFormat format,
+ scoped_refptr<VideoFrame> CreateTestYuvHardwareVideoFrame(
+ VideoPixelFormat format,
size_t num_textures,
unsigned target) {
const int kDimension = 10;
gfx::Size size(kDimension, kDimension);
- gpu::MailboxHolder mailbox_holders[media::VideoFrame::kMaxPlanes];
+ gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes];
for (size_t i = 0; i < num_textures; ++i) {
gpu::Mailbox mailbox;
mailbox.name[0] = 50 + 1;
mailbox_holders[i] =
gpu::MailboxHolder(mailbox, kMailboxSyncToken, target);
}
- scoped_refptr<media::VideoFrame> video_frame =
- media::VideoFrame::WrapNativeTextures(
- format, mailbox_holders,
- base::BindOnce(&VideoResourceUpdaterTest::SetReleaseSyncToken,
- base::Unretained(this)),
- size, // coded_size
- gfx::Rect(size), // visible_rect
- size, // natural_size
- base::TimeDelta()); // timestamp
+ scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapNativeTextures(
+ format, mailbox_holders,
+ base::BindOnce(&VideoResourceUpdaterTest::SetReleaseSyncToken,
+ base::Unretained(this)),
+ size, // coded_size
+ gfx::Rect(size), // visible_rect
+ size, // natural_size
+ base::TimeDelta()); // timestamp
EXPECT_TRUE(video_frame);
return video_frame;
}
@@ -259,16 +270,27 @@ const gpu::SyncToken VideoResourceUpdaterTest::kMailboxSyncToken =
TEST_F(VideoResourceUpdaterTest, SoftwareFrame) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
}
+TEST_F(VideoResourceUpdaterTest, SoftwareFrameRGB) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
+ for (const auto& fmt : {PIXEL_FORMAT_XBGR, PIXEL_FORMAT_XRGB,
+ PIXEL_FORMAT_ABGR, PIXEL_FORMAT_ARGB}) {
+ scoped_refptr<VideoFrame> video_frame = CreateTestRGBVideoFrame(fmt);
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA, resources.type);
+ }
+}
+
TEST_F(VideoResourceUpdaterTest, HighBitFrameNoF16) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestHighBitFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -284,7 +306,7 @@ class VideoResourceUpdaterTestWithF16 : public VideoResourceUpdaterTest {
TEST_F(VideoResourceUpdaterTestWithF16, HighBitFrame) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestHighBitFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -311,7 +333,7 @@ class VideoResourceUpdaterTestWithR16 : public VideoResourceUpdaterTest {
TEST_F(VideoResourceUpdaterTestWithR16, HighBitFrame) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestHighBitFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -333,7 +355,7 @@ TEST_F(VideoResourceUpdaterTestWithR16, HighBitFrame) {
TEST_F(VideoResourceUpdaterTest, HighBitFrameSoftwareCompositor) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestHighBitFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -342,7 +364,7 @@ TEST_F(VideoResourceUpdaterTest, HighBitFrameSoftwareCompositor) {
TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrame) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -351,7 +373,7 @@ TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrame) {
TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrameSoftwareCompositor) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
- scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -360,7 +382,7 @@ TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrameSoftwareCompositor) {
TEST_F(VideoResourceUpdaterTest, ReuseResource) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
// Allocate the resources for a YUV video frame.
@@ -390,7 +412,7 @@ TEST_F(VideoResourceUpdaterTest, ReuseResource) {
TEST_F(VideoResourceUpdaterTest, ReuseResourceNoDelete) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
// Allocate the resources for a YUV video frame.
@@ -415,16 +437,27 @@ TEST_F(VideoResourceUpdaterTest, ReuseResourceNoDelete) {
TEST_F(VideoResourceUpdaterTest, SoftwareFrameSoftwareCompositor) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
}
+TEST_F(VideoResourceUpdaterTest, SoftwareFrameRGBSoftwareCompositor) {
+ std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
+ for (const auto& fmt : {PIXEL_FORMAT_XBGR, PIXEL_FORMAT_XRGB,
+ PIXEL_FORMAT_ABGR, PIXEL_FORMAT_ARGB}) {
+ scoped_refptr<VideoFrame> video_frame = CreateTestRGBVideoFrame(fmt);
+ VideoFrameExternalResources resources =
+ updater->CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
+ }
+}
+
TEST_F(VideoResourceUpdaterTest, ReuseResourceSoftwareCompositor) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
// Allocate the resources for a software video frame.
@@ -453,7 +486,7 @@ TEST_F(VideoResourceUpdaterTest, ReuseResourceSoftwareCompositor) {
TEST_F(VideoResourceUpdaterTest, ReuseResourceNoDeleteSoftwareCompositor) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForSoftware();
- scoped_refptr<media::VideoFrame> video_frame = CreateTestYUVVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestYUVVideoFrame();
video_frame->set_timestamp(base::TimeDelta::FromSeconds(1234));
// Allocate the resources for a software video frame.
@@ -508,8 +541,7 @@ TEST_F(VideoResourceUpdaterTest, ChangeResourceSizeSoftwareCompositor) {
TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame =
- CreateTestRGBAHardwareVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestRGBAHardwareVideoFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -517,7 +549,7 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes) {
EXPECT_EQ(1u, resources.resources.size());
EXPECT_EQ(1u, resources.release_callbacks.size());
- video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_I420, 3,
+ video_frame = CreateTestYuvHardwareVideoFrame(PIXEL_FORMAT_I420, 3,
GL_TEXTURE_RECTANGLE_ARB);
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -528,9 +560,9 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes) {
EXPECT_FALSE(resources.resources[1].read_lock_fences_enabled);
EXPECT_FALSE(resources.resources[2].read_lock_fences_enabled);
- video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_I420, 3,
+ video_frame = CreateTestYuvHardwareVideoFrame(PIXEL_FORMAT_I420, 3,
GL_TEXTURE_RECTANGLE_ARB);
- video_frame->metadata()->read_lock_fences_enabled = true;
+ video_frame->metadata().read_lock_fences_enabled = true;
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_TRUE(resources.resources[0].read_lock_fences_enabled);
@@ -544,7 +576,7 @@ TEST_F(VideoResourceUpdaterTest,
std::unique_ptr<VideoResourceUpdater> updater =
CreateUpdaterForHardware(true);
EXPECT_EQ(0u, GetSharedImageCount());
- scoped_refptr<media::VideoFrame> video_frame =
+ scoped_refptr<VideoFrame> video_frame =
CreateTestStreamTextureHardwareVideoFrame(base::nullopt);
VideoFrameExternalResources resources =
@@ -559,7 +591,7 @@ TEST_F(VideoResourceUpdaterTest,
// A copied stream texture should return an RGBA resource in a new
// GL_TEXTURE_2D texture.
video_frame = CreateTestStreamTextureHardwareVideoFrame(
- media::VideoFrameMetadata::CopyMode::kCopyToNewTexture);
+ VideoFrameMetadata::CopyMode::kCopyToNewTexture);
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_EQ(VideoFrameResourceType::RGBA_PREMULTIPLIED, resources.type);
EXPECT_EQ(1u, resources.resources.size());
@@ -575,7 +607,7 @@ TEST_F(VideoResourceUpdaterTest,
std::unique_ptr<VideoResourceUpdater> updater =
CreateUpdaterForHardware(true);
EXPECT_EQ(0u, GetSharedImageCount());
- scoped_refptr<media::VideoFrame> video_frame =
+ scoped_refptr<VideoFrame> video_frame =
CreateTestStreamTextureHardwareVideoFrame(base::nullopt);
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -589,7 +621,7 @@ TEST_F(VideoResourceUpdaterTest,
// If mailbox is copied, the texture target should still be
// GL_TEXTURE_EXTERNAL_OES and resource type should be STREAM_TEXTURE.
video_frame = CreateTestStreamTextureHardwareVideoFrame(
- media::VideoFrameMetadata::CopyMode::kCopyMailboxesOnly);
+ VideoFrameMetadata::CopyMode::kCopyMailboxesOnly);
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_EQ(VideoFrameResourceType::STREAM_TEXTURE, resources.type);
EXPECT_EQ(1u, resources.resources.size());
@@ -604,7 +636,7 @@ TEST_F(VideoResourceUpdaterTest,
TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_TextureQuad) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
EXPECT_EQ(0u, GetSharedImageCount());
- scoped_refptr<media::VideoFrame> video_frame =
+ scoped_refptr<VideoFrame> video_frame =
CreateTestStreamTextureHardwareVideoFrame(base::nullopt);
VideoFrameExternalResources resources =
@@ -627,8 +659,7 @@ TEST_F(VideoResourceUpdaterTest, PassReleaseSyncToken) {
123);
{
- scoped_refptr<media::VideoFrame> video_frame =
- CreateTestRGBAHardwareVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestRGBAHardwareVideoFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -653,8 +684,7 @@ TEST_F(VideoResourceUpdaterTest, GenerateReleaseSyncToken) {
234);
{
- scoped_refptr<media::VideoFrame> video_frame =
- CreateTestRGBAHardwareVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestRGBAHardwareVideoFrame();
VideoFrameExternalResources resources1 =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -677,8 +707,7 @@ TEST_F(VideoResourceUpdaterTest, GenerateReleaseSyncToken) {
TEST_F(VideoResourceUpdaterTest, PassMailboxSyncToken) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame =
- CreateTestRGBAHardwareVideoFrame();
+ scoped_refptr<VideoFrame> video_frame = CreateTestRGBAHardwareVideoFrame();
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -693,9 +722,9 @@ TEST_F(VideoResourceUpdaterTest, PassMailboxSyncToken) {
TEST_F(VideoResourceUpdaterTest, GenerateSyncTokenOnTextureCopy) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
- scoped_refptr<media::VideoFrame> video_frame =
+ scoped_refptr<VideoFrame> video_frame =
CreateTestStreamTextureHardwareVideoFrame(
- media::VideoFrameMetadata::CopyMode::kCopyToNewTexture);
+ VideoFrameMetadata::CopyMode::kCopyToNewTexture);
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -712,8 +741,8 @@ TEST_F(VideoResourceUpdaterTest, GenerateSyncTokenOnTextureCopy) {
TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_SingleNV12) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
EXPECT_EQ(0u, GetSharedImageCount());
- scoped_refptr<media::VideoFrame> video_frame = CreateTestHardwareVideoFrame(
- media::PIXEL_FORMAT_NV12, GL_TEXTURE_EXTERNAL_OES);
+ scoped_refptr<VideoFrame> video_frame =
+ CreateTestHardwareVideoFrame(PIXEL_FORMAT_NV12, GL_TEXTURE_EXTERNAL_OES);
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -723,7 +752,7 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_SingleNV12) {
resources.resources[0].mailbox_holder.texture_target);
EXPECT_EQ(viz::YUV_420_BIPLANAR, resources.resources[0].format);
- video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_NV12, 1,
+ video_frame = CreateTestYuvHardwareVideoFrame(PIXEL_FORMAT_NV12, 1,
GL_TEXTURE_RECTANGLE_ARB);
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_EQ(VideoFrameResourceType::RGB, resources.type);
@@ -738,9 +767,8 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_SingleNV12) {
TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_DualNV12) {
std::unique_ptr<VideoResourceUpdater> updater = CreateUpdaterForHardware();
EXPECT_EQ(0u, GetSharedImageCount());
- scoped_refptr<media::VideoFrame> video_frame =
- CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_NV12, 2,
- GL_TEXTURE_EXTERNAL_OES);
+ scoped_refptr<VideoFrame> video_frame = CreateTestYuvHardwareVideoFrame(
+ PIXEL_FORMAT_NV12, 2, GL_TEXTURE_EXTERNAL_OES);
VideoFrameExternalResources resources =
updater->CreateExternalResourcesFromVideoFrame(video_frame);
@@ -753,7 +781,7 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_DualNV12) {
EXPECT_EQ(viz::RED_8, resources.resources[0].format);
EXPECT_EQ(viz::RG_88, resources.resources[1].format);
- video_frame = CreateTestYuvHardwareVideoFrame(media::PIXEL_FORMAT_NV12, 2,
+ video_frame = CreateTestYuvHardwareVideoFrame(PIXEL_FORMAT_NV12, 2,
GL_TEXTURE_RECTANGLE_ARB);
resources = updater->CreateExternalResourcesFromVideoFrame(video_frame);
EXPECT_EQ(VideoFrameResourceType::YUV, resources.type);
diff --git a/chromium/media/renderers/win/media_foundation_renderer.cc b/chromium/media/renderers/win/media_foundation_renderer.cc
index 97d50caf4dc..7c47302e438 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer.cc
@@ -73,7 +73,7 @@ MediaFoundationRenderer::MediaFoundationRenderer(
scoped_refptr<base::SequencedTaskRunner> task_runner,
bool force_dcomp_mode_for_testing)
: muted_(muted),
- task_runner_(task_runner),
+ task_runner_(std::move(task_runner)),
force_dcomp_mode_for_testing_(force_dcomp_mode_for_testing) {
DVLOG_FUNC(1);
}
@@ -241,7 +241,7 @@ HRESULT MediaFoundationRenderer::SetSourceOnMediaEngine() {
DVLOG(2) << "Set MFRendererSrc scheme as the source for MFMediaEngine.";
base::win::ScopedBstr mf_renderer_source_scheme(
- base::ASCIIToUTF16("MFRendererSrc"));
+ base::ASCIIToWide("MFRendererSrc"));
// We need to set our source scheme first in order for the MFMediaEngine to
// load of our custom MFMediaSource.
RETURN_IF_FAILED(
diff --git a/chromium/media/renderers/win/media_foundation_stream_wrapper.cc b/chromium/media/renderers/win/media_foundation_stream_wrapper.cc
index 7d5dc771116..ce71d39898d 100644
--- a/chromium/media/renderers/win/media_foundation_stream_wrapper.cc
+++ b/chromium/media/renderers/win/media_foundation_stream_wrapper.cc
@@ -416,6 +416,15 @@ void MediaFoundationStreamWrapper::OnDemuxerStreamRead(
<< ": QueueFormatChangedEvent failed: " << PrintHr(hr);
return;
}
+ } else {
+ // GetMediaType() calls {audio,video}_decoder_config(), which is
+ // required by DemuxerStream when kConfigChanged happens.
+ ComPtr<IMFMediaType> media_type;
+ hr = GetMediaType(&media_type);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << __func__ << ": GetMediaType failed: " << PrintHr(hr);
+ return;
+ }
}
} else if (status == DemuxerStream::Status::kError) {
DVLOG_FUNC(2) << "Stream read error";
diff --git a/chromium/media/renderers/win/media_foundation_video_stream.cc b/chromium/media/renderers/win/media_foundation_video_stream.cc
index 740c199590e..500fadac723 100644
--- a/chromium/media/renderers/win/media_foundation_video_stream.cc
+++ b/chromium/media/renderers/win/media_foundation_video_stream.cc
@@ -185,17 +185,24 @@ HRESULT MediaFoundationH264VideoStream::GetMediaType(
}
bool MediaFoundationH264VideoStream::AreFormatChangesEnabled() {
- // Disable format changes for H264 streams as it causes the H264 decoder MFT
- // to clear previously received SPS / PPS data, causing a stall until another
- // set of SPS and PPS NALUs are received.
+ // Disable explicit format change event for H264 to allow switching to the
+ // new stream without a full re-create, which will be much faster. This is
+ // also due to the fact that the MFT decoder can handle some format changes
+ // without a format change event. For format changes that the MFT decoder
+ // cannot support (e.g. codec change), the playback will fail later with
+ // MF_E_INVALIDMEDIATYPE (0xC00D36B4).
return false;
}
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
bool MediaFoundationHEVCVideoStream::AreFormatChangesEnabled() {
- // Disable format changes for HEVC streams to work around HEVC decoder MFT
- // limitation.
+ // Disable explicit format change event for HEVC to allow switching to the
+ // new stream without a full re-create, which will be much faster. This is
+ // also due to the fact that the MFT decoder can handle some format changes
+ // without a format change event. For format changes that the MFT decoder
+ // cannot support (e.g. codec change), the playback will fail later with
+ // MF_E_INVALIDMEDIATYPE (0xC00D36B4).
return false;
}
#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
diff --git a/chromium/media/test/BUILD.gn b/chromium/media/test/BUILD.gn
index 8ac0f132230..caf4b085245 100644
--- a/chromium/media/test/BUILD.gn
+++ b/chromium/media/test/BUILD.gn
@@ -4,7 +4,6 @@
import("//media/media_options.gni")
import("//testing/libfuzzer/fuzzer_test.gni")
-import("//third_party/libaom/options.gni")
source_set("run_all_unittests") {
testonly = true
@@ -44,7 +43,6 @@ source_set("pipeline_integration_test_base") {
"//media:test_support",
"//testing/gmock",
"//testing/gtest",
- "//third_party/libaom:libaom_buildflags",
"//url",
]
}
diff --git a/chromium/media/video/BUILD.gn b/chromium/media/video/BUILD.gn
index 744cc45c4c3..77850ed6e0d 100644
--- a/chromium/media/video/BUILD.gn
+++ b/chromium/media/video/BUILD.gn
@@ -18,6 +18,7 @@ source_set("video") {
sources = [
"gpu_memory_buffer_video_frame_pool.cc",
"gpu_memory_buffer_video_frame_pool.h",
+ "gpu_video_accelerator_factories.cc",
"gpu_video_accelerator_factories.h",
"h264_bit_reader.cc",
"h264_bit_reader.h",
@@ -31,8 +32,6 @@ source_set("video") {
"half_float_maker.h",
"picture.cc",
"picture.h",
- "supported_video_decoder_config.cc",
- "supported_video_decoder_config.h",
"trace_util.cc",
"trace_util.h",
"video_decode_accelerator.cc",
@@ -55,6 +54,7 @@ source_set("video") {
public_deps = [ "//gpu/command_buffer/client:gles2_interface" ]
deps = [
+ "//build:chromeos_buildflags",
"//gpu/command_buffer/client",
"//gpu/command_buffer/common",
"//media/base",
@@ -125,7 +125,7 @@ source_set("unit_tests") {
"h264_parser_unittest.cc",
"h264_poc_unittest.cc",
"half_float_maker_unittest.cc",
- "supported_video_decoder_config_unittest.cc",
+ "video_encode_accelerator_adapter_test.cc",
]
if (enable_platform_hevc) {
sources += [ "h265_parser_unittest.cc" ]
@@ -142,6 +142,7 @@ source_set("unit_tests") {
"//media:test_support",
"//testing/gmock",
"//testing/gtest",
+ "//third_party/libyuv:libyuv",
"//ui/gfx",
]
}
diff --git a/chromium/media/video/fake_video_encode_accelerator.cc b/chromium/media/video/fake_video_encode_accelerator.cc
index 8400e49040f..1f12fd2cc0b 100644
--- a/chromium/media/video/fake_video_encode_accelerator.cc
+++ b/chromium/media/video/fake_video_encode_accelerator.cc
@@ -7,14 +7,20 @@
#include "base/bind.h"
#include "base/check.h"
#include "base/location.h"
-#include "base/single_thread_task_runner.h"
+#include "base/logging.h"
+#include "base/sequenced_task_runner.h"
namespace media {
static const unsigned int kMinimumInputCount = 1;
+FakeVideoEncodeAccelerator::FrameToEncode::FrameToEncode() = default;
+FakeVideoEncodeAccelerator::FrameToEncode::FrameToEncode(
+ const FakeVideoEncodeAccelerator::FrameToEncode&) = default;
+FakeVideoEncodeAccelerator::FrameToEncode::~FrameToEncode() = default;
+
FakeVideoEncodeAccelerator::FakeVideoEncodeAccelerator(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner)
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner)
: task_runner_(task_runner),
will_initialization_succeed_(true),
client_(nullptr),
@@ -60,7 +66,10 @@ bool FakeVideoEncodeAccelerator::Initialize(const Config& config,
void FakeVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
bool force_keyframe) {
DCHECK(client_);
- queued_frames_.push(force_keyframe);
+ FrameToEncode encode;
+ encode.frame = frame;
+ encode.force_keyframe = force_keyframe;
+ queued_frames_.push(encode);
EncodeTask();
}
@@ -82,13 +91,8 @@ void FakeVideoEncodeAccelerator::RequestEncodingParametersChange(
stored_bitrate_allocations_.push_back(bitrate);
}
-void FakeVideoEncodeAccelerator::Destroy() { delete this; }
-
-void FakeVideoEncodeAccelerator::SendDummyFrameForTesting(bool key_frame) {
- task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&FakeVideoEncodeAccelerator::DoBitstreamBufferReady,
- weak_this_factory_.GetWeakPtr(), 0, 23, key_frame));
+void FakeVideoEncodeAccelerator::Destroy() {
+ delete this;
}
void FakeVideoEncodeAccelerator::SetWillInitializationSucceed(
@@ -100,34 +104,46 @@ void FakeVideoEncodeAccelerator::DoRequireBitstreamBuffers(
unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) const {
- client_->RequireBitstreamBuffers(
- input_count, input_coded_size, output_buffer_size);
+ client_->RequireBitstreamBuffers(input_count, input_coded_size,
+ output_buffer_size);
}
void FakeVideoEncodeAccelerator::EncodeTask() {
while (!queued_frames_.empty() && !available_buffers_.empty()) {
- bool force_key_frame = queued_frames_.front();
- queued_frames_.pop();
- int32_t bitstream_buffer_id = available_buffers_.front().id();
+ FrameToEncode frame_to_encode = queued_frames_.front();
+ BitstreamBuffer buffer = std::move(available_buffers_.front());
available_buffers_.pop_front();
- bool key_frame = next_frame_is_first_frame_ || force_key_frame;
- next_frame_is_first_frame_ = false;
+ queued_frames_.pop();
+
+ if (next_frame_is_first_frame_) {
+ frame_to_encode.force_keyframe = true;
+ next_frame_is_first_frame_ = false;
+ }
+
task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&FakeVideoEncodeAccelerator::DoBitstreamBufferReady,
- weak_this_factory_.GetWeakPtr(), bitstream_buffer_id,
- kMinimumOutputBufferSize, key_frame));
+ weak_this_factory_.GetWeakPtr(), std::move(buffer),
+ frame_to_encode));
}
}
void FakeVideoEncodeAccelerator::DoBitstreamBufferReady(
- int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame) const {
- client_->BitstreamBufferReady(
- bitstream_buffer_id,
- BitstreamBufferMetadata(payload_size, key_frame,
- base::Time::Now().since_origin()));
+ BitstreamBuffer buffer,
+ FrameToEncode frame_to_encode) const {
+ BitstreamBufferMetadata metadata(kMinimumOutputBufferSize,
+ frame_to_encode.force_keyframe,
+ frame_to_encode.frame->timestamp());
+
+ if (!encoding_callback_.is_null())
+ metadata = encoding_callback_.Run(buffer, frame_to_encode.force_keyframe,
+ frame_to_encode.frame);
+
+ client_->BitstreamBufferReady(buffer.id(), metadata);
+}
+
+bool FakeVideoEncodeAccelerator::IsGpuFrameResizeSupported() {
+ return resize_supported_;
}
} // namespace media
diff --git a/chromium/media/video/fake_video_encode_accelerator.h b/chromium/media/video/fake_video_encode_accelerator.h
index 5efad84517a..c9d7888ccc0 100644
--- a/chromium/media/video/fake_video_encode_accelerator.h
+++ b/chromium/media/video/fake_video_encode_accelerator.h
@@ -19,7 +19,7 @@
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
} // namespace base
@@ -30,7 +30,7 @@ static const size_t kMinimumOutputBufferSize = 123456;
class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
public:
explicit FakeVideoEncodeAccelerator(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner);
~FakeVideoEncodeAccelerator() override;
VideoEncodeAccelerator::SupportedProfiles GetSupportedProfiles() override;
@@ -41,6 +41,7 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
uint32_t framerate) override;
void RequestEncodingParametersChange(const VideoBitrateAllocation& bitrate,
uint32_t framerate) override;
+ bool IsGpuFrameResizeSupported() override;
void Destroy() override;
const std::vector<uint32_t>& stored_bitrates() const {
@@ -50,25 +51,43 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
const {
return stored_bitrate_allocations_;
}
- void SendDummyFrameForTesting(bool key_frame);
void SetWillInitializationSucceed(bool will_initialization_succeed);
size_t minimum_output_buffer_size() const { return kMinimumOutputBufferSize; }
+ struct FrameToEncode {
+ FrameToEncode();
+ FrameToEncode(const FrameToEncode&);
+ ~FrameToEncode();
+ scoped_refptr<VideoFrame> frame;
+ bool force_keyframe;
+ };
+
+ using EncodingCallback = base::RepeatingCallback<BitstreamBufferMetadata(
+ BitstreamBuffer&,
+ bool keyframe,
+ scoped_refptr<VideoFrame> frame)>;
+
+ void SetEncodingCallback(EncodingCallback callback) {
+ encoding_callback_ = std::move(callback);
+ }
+
+ void SupportResize() { resize_supported_ = true; }
+
private:
void DoRequireBitstreamBuffers(unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) const;
void EncodeTask();
- void DoBitstreamBufferReady(int32_t bitstream_buffer_id,
- size_t payload_size,
- bool key_frame) const;
+ void DoBitstreamBufferReady(BitstreamBuffer buffer,
+ FrameToEncode frame_to_encode) const;
// Our original (constructor) calling message loop used for all tasks.
- const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ const scoped_refptr<base::SequencedTaskRunner> task_runner_;
std::vector<uint32_t> stored_bitrates_;
std::vector<VideoBitrateAllocation> stored_bitrate_allocations_;
bool will_initialization_succeed_;
+ bool resize_supported_ = false;
VideoEncodeAccelerator::Client* client_;
@@ -76,13 +95,15 @@ class FakeVideoEncodeAccelerator : public VideoEncodeAccelerator {
// is used to force a fake key frame for the first encoded frame.
bool next_frame_is_first_frame_;
- // A queue containing the necessary data for incoming frames. The boolean
- // represent whether the queued frame should force a key frame.
- base::queue<bool> queued_frames_;
+ // A queue containing the necessary data for incoming frames.
+ base::queue<FrameToEncode> queued_frames_;
// A list of buffers available for putting fake encoded frames in.
std::list<BitstreamBuffer> available_buffers_;
+ // Callback that, if set, does actual frame to buffer conversion.
+ EncodingCallback encoding_callback_;
+
base::WeakPtrFactory<FakeVideoEncodeAccelerator> weak_this_factory_{this};
DISALLOW_COPY_AND_ASSIGN(FakeVideoEncodeAccelerator);
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
index bf5fe2a0d43..6dccffde372 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -456,7 +456,23 @@ void CopyRowsToNV12Buffer(int first_row,
DCHECK_LE(bytes_per_row, std::abs(dest_stride_uv));
DCHECK_EQ(0, first_row % 2);
DCHECK(source_frame->format() == PIXEL_FORMAT_I420 ||
- source_frame->format() == PIXEL_FORMAT_YV12);
+ source_frame->format() == PIXEL_FORMAT_YV12 ||
+ source_frame->format() == PIXEL_FORMAT_NV12);
+ if (source_frame->format() == PIXEL_FORMAT_NV12) {
+ libyuv::CopyPlane(source_frame->visible_data(VideoFrame::kYPlane) +
+ first_row * source_frame->stride(VideoFrame::kYPlane),
+ source_frame->stride(VideoFrame::kYPlane),
+ dest_y + first_row * dest_stride_y, dest_stride_y,
+ bytes_per_row, rows);
+ libyuv::CopyPlane(
+ source_frame->visible_data(VideoFrame::kUVPlane) +
+ first_row / 2 * source_frame->stride(VideoFrame::kUVPlane),
+ source_frame->stride(VideoFrame::kUVPlane),
+ dest_uv + first_row / 2 * dest_stride_uv, dest_stride_uv, bytes_per_row,
+ rows / 2);
+
+ return;
+ }
libyuv::I420ToNV12(
source_frame->visible_data(VideoFrame::kYPlane) +
first_row * source_frame->stride(VideoFrame::kYPlane),
@@ -642,6 +658,12 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
if (!IOSurfaceCanSetColorSpace(video_frame->ColorSpace()))
passthrough = true;
#endif
+
+ if (!video_frame->IsMappable()) {
+ // Already a hardware frame.
+ passthrough = true;
+ }
+
if (output_format_ == GpuVideoAcceleratorFactories::OutputFormat::UNDEFINED)
passthrough = true;
switch (pixel_format) {
@@ -650,11 +672,11 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
case PIXEL_FORMAT_I420:
case PIXEL_FORMAT_YUV420P10:
case PIXEL_FORMAT_I420A:
+ case PIXEL_FORMAT_NV12:
break;
// Unsupported cases.
case PIXEL_FORMAT_I422:
case PIXEL_FORMAT_I444:
- case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
case PIXEL_FORMAT_UYVY:
case PIXEL_FORMAT_YUY2:
@@ -677,6 +699,7 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
case PIXEL_FORMAT_P016LE:
case PIXEL_FORMAT_XR30:
case PIXEL_FORMAT_XB30:
+ case PIXEL_FORMAT_RGBAF16:
case PIXEL_FORMAT_UNKNOWN:
if (is_software_backed_video_frame) {
UMA_HISTOGRAM_ENUMERATION(
@@ -753,6 +776,10 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::OnCopiesDone(
plane_resource.gpu_memory_buffer->Unmap();
plane_resource.gpu_memory_buffer->SetColorSpace(
video_frame->ColorSpace());
+ if (video_frame->hdr_metadata()) {
+ plane_resource.gpu_memory_buffer->SetHDRMetadata(
+ video_frame->hdr_metadata().value());
+ }
}
}
@@ -1006,7 +1033,7 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
#else
switch (output_format_) {
case GpuVideoAcceleratorFactories::OutputFormat::I420:
- allow_overlay = video_frame->metadata()->allow_overlay;
+ allow_overlay = video_frame->metadata().allow_overlay;
break;
case GpuVideoAcceleratorFactories::OutputFormat::P010:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
@@ -1036,9 +1063,9 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
break;
}
#endif // OS_WIN
- frame->metadata()->MergeMetadataFrom(video_frame->metadata());
- frame->metadata()->allow_overlay = allow_overlay;
- frame->metadata()->read_lock_fences_enabled = true;
+ frame->metadata().MergeMetadataFrom(video_frame->metadata());
+ frame->metadata().allow_overlay = allow_overlay;
+ frame->metadata().read_lock_fences_enabled = true;
CompleteCopyRequestAndMaybeStartNextCopy(std::move(frame));
}
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
index e9821951132..be9e6fca56b 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
@@ -116,6 +116,30 @@ class GpuMemoryBufferVideoFramePoolTest : public ::testing::Test {
return video_frame;
}
+ static scoped_refptr<VideoFrame> CreateTestNV12VideoFrame(int dimension) {
+ const int kDimension = 10;
+ static uint8_t y_data[kDimension * kDimension] = {0};
+ // Subsampled by 2x2, two components.
+ static uint8_t uv_data[kDimension * kDimension / 2] = {0};
+
+ const VideoPixelFormat format = PIXEL_FORMAT_NV12;
+ DCHECK_LE(dimension, kDimension);
+ const gfx::Size size(dimension, dimension);
+
+ scoped_refptr<VideoFrame> video_frame =
+ VideoFrame::WrapExternalYuvData(format, // format
+ size, // coded_size
+ gfx::Rect(size), // visible_rect
+ size, // natural_size
+ size.width(), // y_stride
+ size.width(), // uv_stride
+ y_data, // y_data
+ uv_data, // uv_data
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
// Note, the X portion is set to 1 since it may use ARGB instead of
// XRGB on some platforms.
uint32_t as_xr30(uint32_t r, uint32_t g, uint32_t b) {
@@ -284,7 +308,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame) {
EXPECT_EQ(PIXEL_FORMAT_NV12, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame2) {
@@ -301,7 +325,23 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame2) {
EXPECT_EQ(PIXEL_FORMAT_NV12, frame->format());
EXPECT_EQ(2u, frame->NumTextures());
EXPECT_EQ(2u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
+}
+
+TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareFrameForNV12Input) {
+ scoped_refptr<VideoFrame> software_frame = CreateTestNV12VideoFrame(10);
+ scoped_refptr<VideoFrame> frame;
+ mock_gpu_factories_->SetVideoFrameOutputFormat(
+ media::GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB);
+ gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
+ software_frame, base::BindOnce(MaybeCreateHardwareFrameCallback, &frame));
+
+ RunUntilIdle();
+
+ EXPECT_NE(software_frame.get(), frame.get());
+ EXPECT_EQ(PIXEL_FORMAT_NV12, frame->format());
+ EXPECT_EQ(2u, frame->NumTextures());
+ EXPECT_EQ(2u, sii_->shared_image_count());
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30Frame) {
@@ -318,7 +358,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30Frame) {
EXPECT_EQ(PIXEL_FORMAT_XR30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
mock_gpu_factories_->created_memory_buffers()[0]->Map();
@@ -341,7 +381,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareP010Frame) {
EXPECT_EQ(PIXEL_FORMAT_P016LE, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
mock_gpu_factories_->created_memory_buffers()[0]->Map();
@@ -373,7 +413,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30FrameBT709) {
EXPECT_EQ(PIXEL_FORMAT_XR30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
mock_gpu_factories_->created_memory_buffers()[0]->Map();
@@ -397,7 +437,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30FrameBT601) {
EXPECT_EQ(PIXEL_FORMAT_XR30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
mock_gpu_factories_->created_memory_buffers()[0]->Map();
@@ -420,7 +460,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXB30Frame) {
EXPECT_EQ(PIXEL_FORMAT_XB30, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareRGBAFrame) {
@@ -437,15 +477,15 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareRGBAFrame) {
EXPECT_EQ(PIXEL_FORMAT_ABGR, frame->format());
EXPECT_EQ(1u, frame->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame->metadata().read_lock_fences_enabled);
}
TEST_F(GpuMemoryBufferVideoFramePoolTest, PreservesMetadata) {
scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10);
- software_frame->metadata()->end_of_stream = true;
+ software_frame->metadata().end_of_stream = true;
base::TimeTicks kTestReferenceTime =
base::TimeDelta::FromMilliseconds(12345) + base::TimeTicks();
- software_frame->metadata()->reference_time = kTestReferenceTime;
+ software_frame->metadata().reference_time = kTestReferenceTime;
scoped_refptr<VideoFrame> frame;
gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
software_frame, base::BindOnce(MaybeCreateHardwareFrameCallback, &frame));
@@ -453,8 +493,8 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, PreservesMetadata) {
RunUntilIdle();
EXPECT_NE(software_frame.get(), frame.get());
- EXPECT_TRUE(frame->metadata()->end_of_stream);
- EXPECT_EQ(kTestReferenceTime, *frame->metadata()->reference_time);
+ EXPECT_TRUE(frame->metadata().end_of_stream);
+ EXPECT_EQ(kTestReferenceTime, *frame->metadata().reference_time);
}
// CreateGpuMemoryBuffer can return null (e.g: when the GPU process is down).
@@ -655,7 +695,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, VideoFrameChangesPixelFormat) {
EXPECT_EQ(PIXEL_FORMAT_ABGR, frame_1->format());
EXPECT_EQ(1u, frame_1->NumTextures());
EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame_1->metadata()->read_lock_fences_enabled);
+ EXPECT_TRUE(frame_1->metadata().read_lock_fences_enabled);
scoped_refptr<VideoFrame> software_frame_2 = CreateTestYUVVideoFrame(10);
mock_gpu_factories_->SetVideoFrameOutputFormat(
diff --git a/chromium/media/video/gpu_video_accelerator_factories.cc b/chromium/media/video/gpu_video_accelerator_factories.cc
new file mode 100644
index 00000000000..b97e96b138d
--- /dev/null
+++ b/chromium/media/video/gpu_video_accelerator_factories.cc
@@ -0,0 +1,32 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/video/gpu_video_accelerator_factories.h"
+
+namespace media {
+
+GpuVideoAcceleratorFactories::Supported
+GpuVideoAcceleratorFactories::IsDecoderConfigSupported(
+ const VideoDecoderConfig& config) {
+ if (!IsDecoderSupportKnown())
+ return Supported::kUnknown;
+
+ static_assert(media::VideoDecoderImplementation::kAlternate ==
+ media::VideoDecoderImplementation::kMaxValue,
+ "Keep the array below in sync.");
+ VideoDecoderImplementation decoder_impls[] = {
+ VideoDecoderImplementation::kDefault,
+ VideoDecoderImplementation::kAlternate};
+ Supported supported = Supported::kUnknown;
+ for (const auto& impl : decoder_impls) {
+ supported = IsDecoderConfigSupported(impl, config);
+ DCHECK_NE(supported, Supported::kUnknown);
+ if (supported == Supported::kTrue)
+ break;
+ }
+
+ return supported;
+}
+
+} // namespace media
diff --git a/chromium/media/video/gpu_video_accelerator_factories.h b/chromium/media/video/gpu_video_accelerator_factories.h
index d8aa533af24..331233b05df 100644
--- a/chromium/media/video/gpu_video_accelerator_factories.h
+++ b/chromium/media/video/gpu_video_accelerator_factories.h
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_RENDERERS_GPU_VIDEO_ACCELERATOR_FACTORIES_H_
-#define MEDIA_RENDERERS_GPU_VIDEO_ACCELERATOR_FACTORIES_H_
+#ifndef MEDIA_VIDEO_GPU_VIDEO_ACCELERATOR_FACTORIES_H_
+#define MEDIA_VIDEO_GPU_VIDEO_ACCELERATOR_FACTORIES_H_
#include <stddef.h>
#include <stdint.h>
@@ -20,9 +20,9 @@
#include "gpu/command_buffer/common/mailbox.h"
#include "media/base/media_export.h"
#include "media/base/overlay_info.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/video_decoder.h"
#include "media/base/video_types.h"
-#include "media/video/supported_video_decoder_config.h"
#include "media/video/video_encode_accelerator.h"
#include "ui/gfx/gpu_memory_buffer.h"
@@ -98,6 +98,16 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
VideoDecoderImplementation implementation,
const VideoDecoderConfig& config) = 0;
+ // Helper function that merges IsDecoderConfigSupported() results across all
+ // VideoDecoderImplementations. Returns kTrue if any of the implementations
+ // support the config.
+ //
+ // Callers must verify IsDecoderSupportKnown() prior to using this, or they
+ // will immediately receive a kUnknown.
+ //
+ // May be called on any thread.
+ Supported IsDecoderConfigSupported(const VideoDecoderConfig& config);
+
// Returns true if IsDecoderConfigSupported() is ready to answer queries.
// Once decoder support is known, it remains known for the lifetime of |this|.
//
@@ -189,12 +199,13 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
virtual viz::RasterContextProvider* GetMediaContextProvider() = 0;
- // Sets the current pipeline rendering color space.
+ // Sets or gets the current pipeline rendering color space.
virtual void SetRenderingColorSpace(const gfx::ColorSpace& color_space) = 0;
+ virtual const gfx::ColorSpace& GetRenderingColorSpace() const = 0;
virtual ~GpuVideoAcceleratorFactories() = default;
};
} // namespace media
-#endif // MEDIA_RENDERERS_GPU_VIDEO_ACCELERATOR_FACTORIES_H_
+#endif // MEDIA_VIDEO_GPU_VIDEO_ACCELERATOR_FACTORIES_H_
diff --git a/chromium/media/video/h264_parser.cc b/chromium/media/video/h264_parser.cc
index 8dbe250864e..f4a10a12899 100644
--- a/chromium/media/video/h264_parser.cc
+++ b/chromium/media/video/h264_parser.cc
@@ -306,7 +306,7 @@ H264Parser::H264Parser() {
H264Parser::~H264Parser() = default;
void H264Parser::Reset() {
- stream_ = NULL;
+ stream_ = nullptr;
bytes_left_ = 0;
encrypted_ranges_.clear();
previous_nalu_range_.clear();
diff --git a/chromium/media/video/h264_parser.h b/chromium/media/video/h264_parser.h
index f7439fe3bb6..652cbe9f70f 100644
--- a/chromium/media/video/h264_parser.h
+++ b/chromium/media/video/h264_parser.h
@@ -359,6 +359,14 @@ struct MEDIA_EXPORT H264SliceHeader {
// Size in bits of dec_ref_pic_marking() syntax element.
size_t dec_ref_pic_marking_bit_size;
size_t pic_order_cnt_bit_size;
+
+ // This is when we are using full sample encryption and only the portions
+ // needed for DPB management are filled in, the rest will already be known
+ // by the accelerator and we will not need to specify it.
+ bool full_sample_encryption;
+ // This is used by some accelerators to handle decoding after slice header
+ // parsing.
+ uint32_t full_sample_index;
};
struct H264SEIRecoveryPoint {
diff --git a/chromium/media/video/h265_parser.cc b/chromium/media/video/h265_parser.cc
index 7d46b7029fb..61fcbbc3f8c 100644
--- a/chromium/media/video/h265_parser.cc
+++ b/chromium/media/video/h265_parser.cc
@@ -548,8 +548,8 @@ H265Parser::Result H265Parser::ParseSPS(int* sps_id) {
TRUE_OR_RETURN(width_crop.ValueOrDefault(0) <
sps->pic_width_in_luma_samples);
base::CheckedNumeric<int> height_crop = sps->conf_win_top_offset;
- width_crop += sps->conf_win_bottom_offset;
- width_crop *= sps->sub_height_c;
+ height_crop += sps->conf_win_bottom_offset;
+ height_crop *= sps->sub_height_c;
if (!height_crop.IsValid())
return kInvalidStream;
TRUE_OR_RETURN(height_crop.ValueOrDefault(0) <
@@ -583,10 +583,8 @@ H265Parser::Result H265Parser::ParseSPS(int* sps_id) {
TRUE_OR_RETURN(sps->sps_max_num_reorder_pics[i] >=
sps->sps_max_num_reorder_pics[i - 1]);
}
- READ_UE_OR_RETURN(&sps->sps_max_latency_increase_plus1[i]);
- sps->sps_max_latency_pictures[i] = sps->sps_max_num_reorder_pics[i] +
- sps->sps_max_latency_increase_plus1[i] -
- 1;
+ int sps_max_latency_increase_plus1;
+ READ_UE_OR_RETURN(&sps_max_latency_increase_plus1);
}
if (!sps_sub_layer_ordering_info_present_flag) {
// Fill in the default values for the other sublayers.
@@ -597,36 +595,43 @@ H265Parser::Result H265Parser::ParseSPS(int* sps_id) {
sps->sps_max_num_reorder_pics[sps->sps_max_sub_layers_minus1];
sps->sps_max_latency_increase_plus1[i] =
sps->sps_max_latency_increase_plus1[sps->sps_max_sub_layers_minus1];
- sps->sps_max_latency_pictures[i] =
- sps->sps_max_num_reorder_pics[i] +
- sps->sps_max_latency_increase_plus1[i] - 1;
}
}
READ_UE_OR_RETURN(&sps->log2_min_luma_coding_block_size_minus3);
+ // This enforces that min_cb_log2_size_y below will be <= 30 and prevents
+ // integer overflow math there.
+ TRUE_OR_RETURN(sps->log2_min_luma_coding_block_size_minus3 <= 27);
READ_UE_OR_RETURN(&sps->log2_diff_max_min_luma_coding_block_size);
int min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3;
- sps->ctb_log2_size_y =
- min_cb_log2_size_y + sps->log2_diff_max_min_luma_coding_block_size;
- TRUE_OR_RETURN(min_cb_log2_size_y <= 31 && sps->ctb_log2_size_y <= 31);
+ base::CheckedNumeric<int> ctb_log2_size_y = min_cb_log2_size_y;
+ ctb_log2_size_y += sps->log2_diff_max_min_luma_coding_block_size;
+ if (!ctb_log2_size_y.IsValid())
+ return kInvalidStream;
+
+ sps->ctb_log2_size_y = ctb_log2_size_y.ValueOrDefault(0);
+ TRUE_OR_RETURN(sps->ctb_log2_size_y <= 30);
int min_cb_size_y = 1 << min_cb_log2_size_y;
int ctb_size_y = 1 << sps->ctb_log2_size_y;
sps->pic_width_in_ctbs_y = base::ClampCeil(
static_cast<float>(sps->pic_width_in_luma_samples) / ctb_size_y);
sps->pic_height_in_ctbs_y = base::ClampCeil(
static_cast<float>(sps->pic_height_in_luma_samples) / ctb_size_y);
- sps->pic_size_in_ctbs_y =
- sps->pic_width_in_ctbs_y * sps->pic_height_in_ctbs_y;
+ base::CheckedNumeric<int> pic_size_in_ctbs_y = sps->pic_width_in_ctbs_y;
+ pic_size_in_ctbs_y *= sps->pic_height_in_ctbs_y;
+ if (!pic_size_in_ctbs_y.IsValid())
+ return kInvalidStream;
+ sps->pic_size_in_ctbs_y = pic_size_in_ctbs_y.ValueOrDefault(0);
TRUE_OR_RETURN(sps->pic_width_in_luma_samples % min_cb_size_y == 0);
TRUE_OR_RETURN(sps->pic_height_in_luma_samples % min_cb_size_y == 0);
READ_UE_OR_RETURN(&sps->log2_min_luma_transform_block_size_minus2);
+ TRUE_OR_RETURN(sps->log2_min_luma_transform_block_size_minus2 <
+ min_cb_log2_size_y - 2);
int min_tb_log2_size_y = sps->log2_min_luma_transform_block_size_minus2 + 2;
- TRUE_OR_RETURN(min_tb_log2_size_y < min_cb_log2_size_y);
READ_UE_OR_RETURN(&sps->log2_diff_max_min_luma_transform_block_size);
- sps->max_tb_log2_size_y =
- min_tb_log2_size_y + sps->log2_diff_max_min_luma_transform_block_size;
- TRUE_OR_RETURN(sps->max_tb_log2_size_y <= std::min(sps->ctb_log2_size_y, 5));
+ TRUE_OR_RETURN(sps->log2_diff_max_min_luma_transform_block_size <=
+ std::min(sps->ctb_log2_size_y, 5) - min_tb_log2_size_y);
READ_UE_OR_RETURN(&sps->max_transform_hierarchy_depth_inter);
IN_RANGE_OR_RETURN(sps->max_transform_hierarchy_depth_inter, 0,
sps->ctb_log2_size_y - min_tb_log2_size_y);
@@ -660,16 +665,14 @@ H265Parser::Result H265Parser::ParseSPS(int* sps_id) {
TRUE_OR_RETURN(sps->pcm_sample_bit_depth_chroma_minus1 + 1 <=
sps->bit_depth_c);
READ_UE_OR_RETURN(&sps->log2_min_pcm_luma_coding_block_size_minus3);
+ IN_RANGE_OR_RETURN(sps->log2_min_pcm_luma_coding_block_size_minus3, 0, 2);
int log2_min_ipcm_cb_size_y =
sps->log2_min_pcm_luma_coding_block_size_minus3 + 3;
IN_RANGE_OR_RETURN(log2_min_ipcm_cb_size_y, std::min(min_cb_log2_size_y, 5),
std::min(sps->ctb_log2_size_y, 5));
READ_UE_OR_RETURN(&sps->log2_diff_max_min_pcm_luma_coding_block_size);
- int log2_max_ipcm_cb_size_y =
- log2_min_ipcm_cb_size_y +
- sps->log2_diff_max_min_pcm_luma_coding_block_size;
- TRUE_OR_RETURN(log2_max_ipcm_cb_size_y <=
- std::min(sps->ctb_log2_size_y, 5));
+ TRUE_OR_RETURN(sps->log2_diff_max_min_pcm_luma_coding_block_size <=
+ std::min(sps->ctb_log2_size_y, 5) - log2_min_ipcm_cb_size_y);
READ_BOOL_OR_RETURN(&sps->pcm_loop_filter_disabled_flag);
}
READ_UE_OR_RETURN(&sps->num_short_term_ref_pic_sets);
@@ -836,6 +839,9 @@ H265Parser::Result H265Parser::ParsePPS(const H265NALU& nalu, int* pps_id) {
sps->pic_width_in_ctbs_y - 1;
for (int i = 0; i < pps->num_tile_columns_minus1; ++i) {
READ_UE_OR_RETURN(&pps->column_width_minus1[i]);
+ IN_RANGE_OR_RETURN(
+ pps->column_width_minus1[i], 0,
+ pps->column_width_minus1[pps->num_tile_columns_minus1] - 1);
pps->column_width_minus1[pps->num_tile_columns_minus1] -=
pps->column_width_minus1[i] + 1;
}
@@ -843,6 +849,9 @@ H265Parser::Result H265Parser::ParsePPS(const H265NALU& nalu, int* pps_id) {
sps->pic_height_in_ctbs_y - 1;
for (int i = 0; i < pps->num_tile_rows_minus1; ++i) {
READ_UE_OR_RETURN(&pps->row_height_minus1[i]);
+ IN_RANGE_OR_RETURN(
+ pps->row_height_minus1[i], 0,
+ pps->row_height_minus1[pps->num_tile_rows_minus1] - 1);
pps->row_height_minus1[pps->num_tile_rows_minus1] -=
pps->row_height_minus1[i] + 1;
}
@@ -932,7 +941,8 @@ const H265PPS* H265Parser::GetPPS(int pps_id) const {
}
H265Parser::Result H265Parser::ParseSliceHeader(const H265NALU& nalu,
- H265SliceHeader* shdr) {
+ H265SliceHeader* shdr,
+ H265SliceHeader* prior_shdr) {
// 7.4.7 Slice segment header
DVLOG(4) << "Parsing slice header";
Result res = kOk;
@@ -959,20 +969,6 @@ H265Parser::Result H265Parser::ParseSliceHeader(const H265NALU& nalu,
sps = GetSPS(pps->pps_seq_parameter_set_id);
DCHECK(sps); // We already validated this when we parsed the PPS.
- // Set these defaults if they are not present here.
- shdr->pic_output_flag = 1;
- shdr->num_ref_idx_l0_active_minus1 =
- pps->num_ref_idx_l0_default_active_minus1;
- shdr->num_ref_idx_l1_active_minus1 =
- pps->num_ref_idx_l1_default_active_minus1;
- shdr->collocated_from_l0_flag = 1;
- shdr->slice_deblocking_filter_disabled_flag =
- pps->pps_deblocking_filter_disabled_flag;
- shdr->slice_beta_offset_div2 = pps->pps_beta_offset_div2;
- shdr->slice_tc_offset_div2 = pps->pps_tc_offset_div2;
- shdr->slice_loop_filter_across_slices_enabled_flag =
- pps->pps_loop_filter_across_slices_enabled_flag;
-
if (!shdr->first_slice_segment_in_pic_flag) {
if (pps->dependent_slice_segments_enabled_flag)
READ_BOOL_OR_RETURN(&shdr->dependent_slice_segment_flag);
@@ -981,8 +977,33 @@ H265Parser::Result H265Parser::ParseSliceHeader(const H265NALU& nalu,
IN_RANGE_OR_RETURN(shdr->slice_segment_address, 0,
sps->pic_size_in_ctbs_y - 1);
}
- shdr->curr_rps_idx = sps->num_short_term_ref_pic_sets;
- if (!shdr->dependent_slice_segment_flag) {
+ if (shdr->dependent_slice_segment_flag) {
+ if (!prior_shdr) {
+ DVLOG(1) << "Cannot parse dependent slice w/out prior slice data";
+ return kInvalidStream;
+ }
+ // Copy everything in the structure starting at |slice_type| going forward.
+ // This is copying the dependent slice data that we do not parse below.
+ size_t skip_amount = offsetof(H265SliceHeader, slice_type);
+ memcpy(reinterpret_cast<uint8_t*>(shdr) + skip_amount,
+ reinterpret_cast<uint8_t*>(prior_shdr) + skip_amount,
+ sizeof(H265SliceHeader) - skip_amount);
+ } else {
+ // Set these defaults if they are not present here.
+ shdr->pic_output_flag = 1;
+ shdr->num_ref_idx_l0_active_minus1 =
+ pps->num_ref_idx_l0_default_active_minus1;
+ shdr->num_ref_idx_l1_active_minus1 =
+ pps->num_ref_idx_l1_default_active_minus1;
+ shdr->collocated_from_l0_flag = 1;
+ shdr->slice_deblocking_filter_disabled_flag =
+ pps->pps_deblocking_filter_disabled_flag;
+ shdr->slice_beta_offset_div2 = pps->pps_beta_offset_div2;
+ shdr->slice_tc_offset_div2 = pps->pps_tc_offset_div2;
+ shdr->slice_loop_filter_across_slices_enabled_flag =
+ pps->pps_loop_filter_across_slices_enabled_flag;
+ shdr->curr_rps_idx = sps->num_short_term_ref_pic_sets;
+
// slice_reserved_flag
SKIP_BITS_OR_RETURN(pps->num_extra_slice_header_bits);
READ_UE_OR_RETURN(&shdr->slice_type);
@@ -1040,8 +1061,8 @@ H265Parser::Result H265Parser::ParseSliceHeader(const H265NALU& nalu,
shdr->GetStRefPicSet(sps).num_positive_pics -
shdr->num_long_term_sps));
}
- IN_RANGE_OR_RETURN(shdr->num_long_term_sps + shdr->num_long_term_pics,
- 0, kMaxLongTermRefPicSets);
+ IN_RANGE_OR_RETURN(shdr->num_long_term_pics, 0,
+ kMaxLongTermRefPicSets - shdr->num_long_term_sps);
for (int i = 0; i < shdr->num_long_term_sps + shdr->num_long_term_pics;
++i) {
if (i < shdr->num_long_term_sps) {
@@ -1113,6 +1134,7 @@ H265Parser::Result H265Parser::ParseSliceHeader(const H265NALU& nalu,
shdr->num_pic_total_curr++;
}
+ TRUE_OR_RETURN(shdr->num_pic_total_curr);
if (pps->lists_modification_present_flag &&
shdr->num_pic_total_curr > 1) {
res = ParseRefPicListsModifications(*shdr,
@@ -1450,6 +1472,7 @@ H265Parser::Result H265Parser::ParseStRefPicSet(
int abs_delta_rps_minus1;
READ_BOOL_OR_RETURN(&delta_rps_sign);
READ_UE_OR_RETURN(&abs_delta_rps_minus1);
+ IN_RANGE_OR_RETURN(abs_delta_rps_minus1, 0, 0x7FFF);
int delta_rps = (1 - 2 * delta_rps_sign) * (abs_delta_rps_minus1 + 1);
const H265StRefPicSet& ref_set = sps.st_ref_pic_set[ref_rps_idx];
bool used_by_curr_pic_flag[kMaxShortTermRefPicSets];
@@ -1511,6 +1534,13 @@ H265Parser::Result H265Parser::ParseStRefPicSet(
}
}
st_ref_pic_set->num_positive_pics = i;
+ IN_RANGE_OR_RETURN(
+ st_ref_pic_set->num_negative_pics, 0,
+ sps.sps_max_dec_pic_buffering_minus1[sps.sps_max_sub_layers_minus1]);
+ IN_RANGE_OR_RETURN(
+ st_ref_pic_set->num_positive_pics, 0,
+ sps.sps_max_dec_pic_buffering_minus1[sps.sps_max_sub_layers_minus1] -
+ st_ref_pic_set->num_negative_pics);
} else {
READ_UE_OR_RETURN(&st_ref_pic_set->num_negative_pics);
READ_UE_OR_RETURN(&st_ref_pic_set->num_positive_pics);
@@ -1524,6 +1554,7 @@ H265Parser::Result H265Parser::ParseStRefPicSet(
for (int i = 0; i < st_ref_pic_set->num_negative_pics; ++i) {
int delta_poc_s0_minus1;
READ_UE_OR_RETURN(&delta_poc_s0_minus1);
+ IN_RANGE_OR_RETURN(delta_poc_s0_minus1, 0, 0x7FFF);
if (i == 0) {
st_ref_pic_set->delta_poc_s0[i] = -(delta_poc_s0_minus1 + 1);
} else {
@@ -1535,6 +1566,7 @@ H265Parser::Result H265Parser::ParseStRefPicSet(
for (int i = 0; i < st_ref_pic_set->num_positive_pics; ++i) {
int delta_poc_s1_minus1;
READ_UE_OR_RETURN(&delta_poc_s1_minus1);
+ IN_RANGE_OR_RETURN(delta_poc_s1_minus1, 0, 0x7FFF);
if (i == 0) {
st_ref_pic_set->delta_poc_s1[i] = delta_poc_s1_minus1 + 1;
} else {
@@ -1683,6 +1715,7 @@ H265Parser::Result H265Parser::ParseAndIgnoreHrdParameters(
int cpb_cnt = 1;
if (!low_delay_hrd_flag) {
READ_UE_OR_RETURN(&cpb_cnt);
+ IN_RANGE_OR_RETURN(cpb_cnt, 0, 31);
cpb_cnt += 1; // parsed as minus1
}
if (nal_hrd_parameters_present_flag) {
diff --git a/chromium/media/video/h265_parser.h b/chromium/media/video/h265_parser.h
index e922e82f432..e322aa81358 100644
--- a/chromium/media/video/h265_parser.h
+++ b/chromium/media/video/h265_parser.h
@@ -250,12 +250,10 @@ struct MEDIA_EXPORT H265SPS {
int bit_depth_y;
int bit_depth_c;
int max_pic_order_cnt_lsb;
- int sps_max_latency_pictures[kMaxSubLayers];
int ctb_log2_size_y;
int pic_width_in_ctbs_y;
int pic_height_in_ctbs_y;
int pic_size_in_ctbs_y;
- int max_tb_log2_size_y;
int wp_offset_half_range_y;
int wp_offset_half_range_c;
@@ -479,7 +477,12 @@ class MEDIA_EXPORT H265Parser {
// Parse a slice header, returning it in |*shdr|. |*nalu| must be set to
// the NALU returned from AdvanceToNextNALU() and corresponding to |*shdr|.
- Result ParseSliceHeader(const H265NALU& nalu, H265SliceHeader* shdr);
+ // |prior_shdr| should be the last parsed header in decoding order for
+ // handling dependent slice segments. If |prior_shdr| is null and this is a
+ // dependent slice segment, an error will be returned.
+ Result ParseSliceHeader(const H265NALU& nalu,
+ H265SliceHeader* shdr,
+ H265SliceHeader* prior_shdr);
static VideoCodecProfile ProfileIDCToVideoCodecProfile(int profile_idc);
diff --git a/chromium/media/video/h265_parser_fuzzertest.cc b/chromium/media/video/h265_parser_fuzzertest.cc
index 8d0186f6acd..9ae4e5b7114 100644
--- a/chromium/media/video/h265_parser_fuzzertest.cc
+++ b/chromium/media/video/h265_parser_fuzzertest.cc
@@ -24,6 +24,7 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
break;
media::H265SliceHeader shdr;
+ media::H265SliceHeader prior_shdr;
switch (nalu.nal_unit_type) {
case media::H265NALU::SPS_NUT:
int sps_id;
@@ -49,7 +50,8 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
case media::H265NALU::IDR_W_RADL:
case media::H265NALU::IDR_N_LP:
case media::H265NALU::CRA_NUT: // fallthrough
- res = parser.ParseSliceHeader(nalu, &shdr);
+ res = parser.ParseSliceHeader(nalu, &shdr, &prior_shdr);
+ prior_shdr = shdr;
break;
default:
// Skip any other NALU.
diff --git a/chromium/media/video/h265_parser_unittest.cc b/chromium/media/video/h265_parser_unittest.cc
index 7acece54eb0..b232e3678a2 100644
--- a/chromium/media/video/h265_parser_unittest.cc
+++ b/chromium/media/video/h265_parser_unittest.cc
@@ -79,6 +79,7 @@ TEST_F(H265ParserTest, RawHevcStreamFileParsing) {
DVLOG(4) << "Found NALU " << nalu.nal_unit_type;
H265SliceHeader shdr;
+ H265SliceHeader prior_shdr;
switch (nalu.nal_unit_type) {
case H265NALU::SPS_NUT:
int sps_id;
@@ -106,7 +107,8 @@ TEST_F(H265ParserTest, RawHevcStreamFileParsing) {
case H265NALU::IDR_W_RADL:
case H265NALU::IDR_N_LP:
case H265NALU::CRA_NUT: // fallthrough
- res = parser_.ParseSliceHeader(nalu, &shdr);
+ res = parser_.ParseSliceHeader(nalu, &shdr, &prior_shdr);
+ prior_shdr = shdr;
break;
default:
break;
@@ -236,7 +238,8 @@ TEST_F(H265ParserTest, SliceHeaderParsing) {
// Do an IDR slice header first.
EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::IDR_W_RADL));
H265SliceHeader shdr;
- EXPECT_EQ(H265Parser::kOk, parser_.ParseSliceHeader(target_nalu, &shdr));
+ EXPECT_EQ(H265Parser::kOk,
+ parser_.ParseSliceHeader(target_nalu, &shdr, nullptr));
EXPECT_TRUE(shdr.first_slice_segment_in_pic_flag);
EXPECT_FALSE(shdr.no_output_of_prior_pics_flag);
EXPECT_EQ(shdr.slice_pic_parameter_set_id, 0);
@@ -249,7 +252,8 @@ TEST_F(H265ParserTest, SliceHeaderParsing) {
// Then do a non-IDR slice header.
EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::TRAIL_R));
- EXPECT_EQ(H265Parser::kOk, parser_.ParseSliceHeader(target_nalu, &shdr));
+ EXPECT_EQ(H265Parser::kOk,
+ parser_.ParseSliceHeader(target_nalu, &shdr, nullptr));
EXPECT_TRUE(shdr.first_slice_segment_in_pic_flag);
EXPECT_EQ(shdr.slice_pic_parameter_set_id, 0);
EXPECT_FALSE(shdr.dependent_slice_segment_flag);
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.cc b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
index 6b7c4722bcd..32a5ce80caf 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.cc
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
@@ -6,6 +6,7 @@
#include <memory>
+#include "base/atomic_sequence_num.h"
#include "base/memory/ptr_util.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "ui/gfx/buffer_format_util.h"
@@ -15,7 +16,7 @@ namespace media {
namespace {
-int g_next_gpu_memory_buffer_id = 1;
+base::AtomicSequenceNumber g_gpu_memory_buffer_id_generator;
class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
public:
@@ -24,7 +25,7 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
format_(format),
size_(size),
num_planes_(gfx::NumberOfPlanesForLinearBufferFormat(format)),
- id_(g_next_gpu_memory_buffer_id++) {
+ id_(g_gpu_memory_buffer_id_generator.GetNext() + 1) {
DCHECK(gfx::BufferFormat::R_8 == format_ ||
gfx::BufferFormat::RG_88 == format_ ||
gfx::BufferFormat::YUV_420_BIPLANAR == format_ ||
@@ -65,7 +66,7 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
}
gfx::GpuMemoryBufferId GetId() const override { return id_; }
gfx::GpuMemoryBufferType GetType() const override {
- return gfx::NATIVE_PIXMAP;
+ return gfx::SHARED_MEMORY_BUFFER;
}
gfx::GpuMemoryBufferHandle CloneHandle() const override {
NOTREACHED();
@@ -108,6 +109,7 @@ MockGpuVideoAcceleratorFactories::CreateGpuMemoryBuffer(
const gfx::Size& size,
gfx::BufferFormat format,
gfx::BufferUsage /* usage */) {
+ base::AutoLock guard(lock_);
if (fail_to_allocate_gpu_memory_buffer_)
return nullptr;
std::unique_ptr<gfx::GpuMemoryBuffer> ret(
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.h b/chromium/media/video/mock_gpu_video_accelerator_factories.h
index 6fe68856aa2..a1aadde4566 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.h
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.h
@@ -50,6 +50,7 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
MOCK_METHOD0(GetTaskRunner, scoped_refptr<base::SequencedTaskRunner>());
MOCK_METHOD0(GetMediaContextProvider, viz::RasterContextProvider*());
MOCK_METHOD1(SetRenderingColorSpace, void(const gfx::ColorSpace&));
+ MOCK_CONST_METHOD0(GetRenderingColorSpace, const gfx::ColorSpace&());
std::unique_ptr<gfx::GpuMemoryBuffer> CreateGpuMemoryBuffer(
const gfx::Size& size,
diff --git a/chromium/media/video/openh264_video_encoder.cc b/chromium/media/video/openh264_video_encoder.cc
index 05abf97b4f4..bc75f01f0c9 100644
--- a/chromium/media/video/openh264_video_encoder.cc
+++ b/chromium/media/video/openh264_video_encoder.cc
@@ -13,6 +13,7 @@
#include "base/time/time.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
namespace media {
@@ -131,10 +132,13 @@ void OpenH264VideoEncoder::Initialize(VideoCodecProfile profile,
return;
}
+ if (!options.avc.produce_annexb)
+ h264_converter_ = std::make_unique<H264AnnexBToAvcBitstreamConverter>();
+
options_ = options;
output_cb_ = BindToCurrentLoop(std::move(output_cb));
codec_ = std::move(codec);
- std::move(done_cb).Run(Status());
+ std::move(done_cb).Run(OkStatus());
}
void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
@@ -152,7 +156,14 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
- if (!frame->IsMappable() || frame->format() != PIXEL_FORMAT_I420) {
+ const bool supported_format = frame->format() == PIXEL_FORMAT_NV12 ||
+ frame->format() == PIXEL_FORMAT_I420 ||
+ frame->format() == PIXEL_FORMAT_XBGR ||
+ frame->format() == PIXEL_FORMAT_XRGB ||
+ frame->format() == PIXEL_FORMAT_ABGR ||
+ frame->format() == PIXEL_FORMAT_ARGB;
+ if ((!frame->IsMappable() && !frame->HasGpuMemoryBuffer()) ||
+ !supported_format) {
status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
.WithData("IsMappable", frame->IsMappable())
@@ -161,6 +172,35 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
return;
}
+ if (frame->format() == PIXEL_FORMAT_NV12 && frame->HasGpuMemoryBuffer()) {
+ frame = ConvertToMemoryMappedFrame(frame);
+ if (!frame) {
+ std::move(done_cb).Run(
+ Status(StatusCode::kEncoderFailedEncode,
+ "Convert GMB frame to MemoryMappedFrame failed."));
+ return;
+ }
+ }
+
+ if (frame->format() != PIXEL_FORMAT_I420) {
+ // OpenH264 can resize frame automatically, but since we're converting
+ // pixel fromat anyway we can do resize as well.
+ auto i420_frame = frame_pool_.CreateFrame(
+ PIXEL_FORMAT_I420, options_.frame_size, gfx::Rect(options_.frame_size),
+ options_.frame_size, frame->timestamp());
+ if (i420_frame) {
+ status = ConvertAndScaleFrame(*frame, *i420_frame, conversion_buffer_);
+ } else {
+ status = Status(StatusCode::kEncoderFailedEncode,
+ "Can't allocate an I420 frame.");
+ }
+ if (!status.is_ok()) {
+ std::move(done_cb).Run(std::move(status));
+ return;
+ }
+ frame = std::move(i420_frame);
+ }
+
SSourcePicture picture = {};
picture.iPicWidth = frame->visible_rect().width();
picture.iPicHeight = frame->visible_rect().height();
@@ -196,7 +236,16 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
DCHECK_GT(frame_info.iFrameSizeInBytes, 0);
size_t total_chunk_size = frame_info.iFrameSizeInBytes;
- conversion_buffer_.resize(total_chunk_size);
+
+ result.data.reset(new uint8_t[total_chunk_size]);
+
+ auto* gather_buffer = result.data.get();
+
+ if (h264_converter_) {
+ // Copy data to a temporary buffer instead.
+ conversion_buffer_.resize(total_chunk_size);
+ gather_buffer = conversion_buffer_.data();
+ }
size_t written_size = 0;
for (int layer_idx = 0; layer_idx < frame_info.iLayerNum; ++layer_idx) {
@@ -210,16 +259,22 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
return;
}
- memcpy(conversion_buffer_.data() + written_size, layer_info.pBsBuf,
- layer_len);
+ memcpy(gather_buffer + written_size, layer_info.pBsBuf, layer_len);
written_size += layer_len;
}
DCHECK_EQ(written_size, total_chunk_size);
+ if (!h264_converter_) {
+ result.size = total_chunk_size;
+
+ output_cb_.Run(std::move(result), base::Optional<CodecDescription>());
+ std::move(done_cb).Run(OkStatus());
+ return;
+ }
+
size_t converted_output_size = 0;
bool config_changed = false;
- result.data.reset(new uint8_t[total_chunk_size]);
- status = h264_converter_.ConvertChunk(
+ status = h264_converter_->ConvertChunk(
conversion_buffer_,
base::span<uint8_t>(result.data.get(), total_chunk_size), &config_changed,
&converted_output_size);
@@ -228,11 +283,12 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
std::move(done_cb).Run(std::move(status).AddHere(FROM_HERE));
return;
}
+
result.size = converted_output_size;
base::Optional<CodecDescription> desc;
if (config_changed) {
- const auto& config = h264_converter_.GetCurrentConfig();
+ const auto& config = h264_converter_->GetCurrentConfig();
desc = CodecDescription();
if (!config.Serialize(desc.value())) {
std::move(done_cb).Run(Status(StatusCode::kEncoderFailedEncode,
@@ -242,7 +298,7 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
}
output_cb_.Run(std::move(result), std::move(desc));
- std::move(done_cb).Run(Status());
+ std::move(done_cb).Run(OkStatus());
}
void OpenH264VideoEncoder::ChangeOptions(const Options& options,
@@ -279,9 +335,15 @@ void OpenH264VideoEncoder::ChangeOptions(const Options& options,
return;
}
+ if (options.avc.produce_annexb) {
+ h264_converter_.reset();
+ } else if (!h264_converter_) {
+ h264_converter_ = std::make_unique<H264AnnexBToAvcBitstreamConverter>();
+ }
+
if (!output_cb.is_null())
output_cb_ = BindToCurrentLoop(std::move(output_cb));
- std::move(done_cb).Run(Status());
+ std::move(done_cb).Run(OkStatus());
}
void OpenH264VideoEncoder::Flush(StatusCB done_cb) {
@@ -292,7 +354,7 @@ void OpenH264VideoEncoder::Flush(StatusCB done_cb) {
}
// Nothing to do really.
- std::move(done_cb).Run(Status());
+ std::move(done_cb).Run(OkStatus());
}
} // namespace media
diff --git a/chromium/media/video/openh264_video_encoder.h b/chromium/media/video/openh264_video_encoder.h
index f7bf3b0216f..d737b3da223 100644
--- a/chromium/media/video/openh264_video_encoder.h
+++ b/chromium/media/video/openh264_video_encoder.h
@@ -11,6 +11,7 @@
#include "base/callback_forward.h"
#include "media/base/media_export.h"
#include "media/base/video_encoder.h"
+#include "media/base/video_frame_pool.h"
#include "media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h"
#include "third_party/openh264/src/codec/api/svc/codec_api.h"
#include "ui/gfx/geometry/size.h"
@@ -57,8 +58,12 @@ class MEDIA_EXPORT OpenH264VideoEncoder : public VideoEncoder {
Options options_;
OutputCB output_cb_;
std::vector<uint8_t> conversion_buffer_;
- H264AnnexBToAvcBitstreamConverter h264_converter_;
+ VideoFramePool frame_pool_;
+
+ // If |h264_converter_| is null, we output in annexb format. Otherwise, we
+ // output in avc format.
+ std::unique_ptr<H264AnnexBToAvcBitstreamConverter> h264_converter_;
};
} // namespace media
-#endif // MEDIA_VIDEO_OPENH264_VIDEO_ENCODER_H_ \ No newline at end of file
+#endif // MEDIA_VIDEO_OPENH264_VIDEO_ENCODER_H_
diff --git a/chromium/media/video/picture.cc b/chromium/media/video/picture.cc
index 5763f3b4129..8e191581d5d 100644
--- a/chromium/media/video/picture.cc
+++ b/chromium/media/video/picture.cc
@@ -53,17 +53,37 @@ PictureBuffer::PictureBuffer(int32_t id,
DCHECK_EQ(client_texture_ids.size(), texture_mailboxes.size());
}
+PictureBuffer::PictureBuffer(int32_t id,
+ const gfx::Size& size,
+ const TextureSizes& texture_sizes,
+ const TextureIds& client_texture_ids,
+ const TextureIds& service_texture_ids,
+ uint32_t texture_target,
+ VideoPixelFormat pixel_format)
+ : id_(id),
+ size_(size),
+ texture_sizes_(texture_sizes),
+ client_texture_ids_(client_texture_ids),
+ service_texture_ids_(service_texture_ids),
+ texture_target_(texture_target),
+ pixel_format_(pixel_format) {
+ // We either not have client texture ids at all, or if we do, then their
+ // number must be the same as the number of service texture ids.
+ DCHECK(client_texture_ids_.empty() ||
+ client_texture_ids_.size() == service_texture_ids_.size());
+}
+
PictureBuffer::PictureBuffer(const PictureBuffer& other) = default;
PictureBuffer::~PictureBuffer() = default;
-gpu::Mailbox PictureBuffer::texture_mailbox(size_t plane) const {
- if (plane >= texture_mailboxes_.size()) {
- LOG(ERROR) << "No mailbox for plane " << plane;
- return gpu::Mailbox();
+gfx::Size PictureBuffer::texture_size(size_t plane) const {
+ if (plane >= texture_sizes_.size()) {
+ LOG(ERROR) << "Missing texture size for plane " << plane;
+ return gfx::Size();
}
- return texture_mailboxes_[plane];
+ return texture_sizes_[plane];
}
Picture::Picture(int32_t picture_buffer_id,
diff --git a/chromium/media/video/picture.h b/chromium/media/video/picture.h
index 3ec0a1f8107..0b2173d5d0b 100644
--- a/chromium/media/video/picture.h
+++ b/chromium/media/video/picture.h
@@ -13,6 +13,7 @@
#include "base/memory/ref_counted.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/media_export.h"
+#include "media/base/video_frame.h"
#include "media/base/video_types.h"
#include "ui/gfx/color_space.h"
#include "ui/gfx/geometry/rect.h"
@@ -25,6 +26,7 @@ namespace media {
class MEDIA_EXPORT PictureBuffer {
public:
using TextureIds = std::vector<uint32_t>;
+ using TextureSizes = std::vector<gfx::Size>;
PictureBuffer(int32_t id, const gfx::Size& size);
PictureBuffer(int32_t id,
@@ -42,6 +44,13 @@ class MEDIA_EXPORT PictureBuffer {
const std::vector<gpu::Mailbox>& texture_mailboxes,
uint32_t texture_target,
VideoPixelFormat pixel_format);
+ PictureBuffer(int32_t id,
+ const gfx::Size& size,
+ const TextureSizes& texture_sizes,
+ const TextureIds& client_texture_ids,
+ const TextureIds& service_texture_ids,
+ uint32_t texture_target,
+ VideoPixelFormat pixel_format);
PictureBuffer(const PictureBuffer& other);
~PictureBuffer();
@@ -64,11 +73,12 @@ class MEDIA_EXPORT PictureBuffer {
VideoPixelFormat pixel_format() const { return pixel_format_; }
- gpu::Mailbox texture_mailbox(size_t plane) const;
+ gfx::Size texture_size(size_t plane) const;
private:
int32_t id_;
gfx::Size size_;
+ TextureSizes texture_sizes_;
TextureIds client_texture_ids_;
TextureIds service_texture_ids_;
std::vector<gpu::Mailbox> texture_mailboxes_;
@@ -151,11 +161,16 @@ class MEDIA_EXPORT Picture {
}
void set_scoped_shared_image(
- scoped_refptr<ScopedSharedImage> scoped_shared_image) {
- scoped_shared_image_ = scoped_shared_image;
+ scoped_refptr<ScopedSharedImage> scoped_shared_image,
+ uint32_t plane = 0) {
+ DCHECK(plane < scoped_shared_images_.size());
+ scoped_shared_images_[plane] = scoped_shared_image;
}
- scoped_refptr<ScopedSharedImage> scoped_shared_image() const {
- return scoped_shared_image_;
+
+ scoped_refptr<ScopedSharedImage> scoped_shared_image(
+ uint32_t plane = 0) const {
+ DCHECK(plane < scoped_shared_images_.size());
+ return scoped_shared_images_[plane];
}
private:
@@ -168,7 +183,8 @@ class MEDIA_EXPORT Picture {
bool size_changed_;
bool texture_owner_;
bool wants_promotion_hint_;
- scoped_refptr<ScopedSharedImage> scoped_shared_image_;
+ std::array<scoped_refptr<ScopedSharedImage>, VideoFrame::kMaxPlanes>
+ scoped_shared_images_;
};
} // namespace media
diff --git a/chromium/media/video/supported_video_decoder_config_unittest.cc b/chromium/media/video/supported_video_decoder_config_unittest.cc
index c2e3593698b..dd3dabdbe3d 100644
--- a/chromium/media/video/supported_video_decoder_config_unittest.cc
+++ b/chromium/media/video/supported_video_decoder_config_unittest.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/video/supported_video_decoder_config.h"
+#include "media/base/supported_video_decoder_config.h"
#include "media/base/test_helpers.h"
#include "media/base/video_codecs.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/media/video/video_decode_accelerator.cc b/chromium/media/video/video_decode_accelerator.cc
index b15a8aa2adb..78171dcfbb8 100644
--- a/chromium/media/video/video_decode_accelerator.cc
+++ b/chromium/media/video/video_decode_accelerator.cc
@@ -71,6 +71,11 @@ gpu::SharedImageStub* VideoDecodeAccelerator::Client::GetSharedImageStub()
return nullptr;
}
+CommandBufferHelper* VideoDecodeAccelerator::Client::GetCommandBufferHelper()
+ const {
+ return nullptr;
+}
+
VideoDecodeAccelerator::~VideoDecodeAccelerator() = default;
void VideoDecodeAccelerator::Decode(scoped_refptr<DecoderBuffer> buffer,
@@ -105,6 +110,11 @@ bool VideoDecodeAccelerator::SupportsSharedImagePictureBuffers() const {
return false;
}
+VideoDecodeAccelerator::TextureAllocationMode
+VideoDecodeAccelerator::GetSharedImageTextureAllocationMode() const {
+ return VideoDecodeAccelerator::TextureAllocationMode::kAllocateGLTextures;
+}
+
VideoDecodeAccelerator::SupportedProfile::SupportedProfile()
: profile(media::VIDEO_CODEC_PROFILE_UNKNOWN), encrypted_only(false) {}
diff --git a/chromium/media/video/video_decode_accelerator.h b/chromium/media/video/video_decode_accelerator.h
index 8f712d8b0a1..5821cc9ece5 100644
--- a/chromium/media/video/video_decode_accelerator.h
+++ b/chromium/media/video/video_decode_accelerator.h
@@ -38,6 +38,8 @@ class SharedImageStub;
namespace media {
+class CommandBufferHelper;
+
// Video decoder interface.
// This interface is extended by the various components that ultimately
// implement the backend of PPB_VideoDecoder_Dev.
@@ -262,6 +264,10 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
// Default implementation returns nullptr.
virtual gpu::SharedImageStub* GetSharedImageStub() const;
+ // Return the CommandBufferHelper through which GL passthrough textures may
+ // be created. Default implementation returns nullptr.
+ virtual CommandBufferHelper* GetCommandBufferHelper() const;
+
protected:
virtual ~Client() {}
};
@@ -418,6 +424,15 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
// May be called on any thread at any time.
virtual bool SupportsSharedImagePictureBuffers() const;
+ enum class TextureAllocationMode {
+ kDoNotAllocateGLTextures,
+ kAllocateGLTextures
+ };
+
+ // Returns an enum used to allocate GL textures for shared images.
+ // May be called on any thread at any time.
+ virtual TextureAllocationMode GetSharedImageTextureAllocationMode() const;
+
protected:
// Do not delete directly; use Destroy() or own it with a scoped_ptr, which
// will Destroy() it properly by default.
diff --git a/chromium/media/video/video_encode_accelerator.cc b/chromium/media/video/video_encode_accelerator.cc
index 35e00c786bd..e0e0fe45b9a 100644
--- a/chromium/media/video/video_encode_accelerator.cc
+++ b/chromium/media/video/video_encode_accelerator.cc
@@ -8,6 +8,8 @@
#include "base/callback.h"
#include "base/strings/stringprintf.h"
+#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
namespace media {
@@ -98,7 +100,7 @@ std::string VideoEncodeAccelerator::Config::AsHumanReadableString() const {
for (size_t i = 0; i < spatial_layers.size(); ++i) {
const auto& sl = spatial_layers[i];
str += base::StringPrintf(
- ", {SatialLayer#%zu: width=%" PRId32 ", height=%" PRId32
+ ", {SpatialLayer#%zu: width=%" PRId32 ", height=%" PRId32
", bitrate_bps=%" PRIu32 ", framerate=%" PRId32
", max_qp=%u, num_of_temporal_layers=%u}",
i, sl.width, sl.height, sl.bitrate_bps, sl.framerate, sl.max_qp,
@@ -127,8 +129,7 @@ VideoEncodeAccelerator::~VideoEncodeAccelerator() = default;
VideoEncodeAccelerator::SupportedProfile::SupportedProfile()
: profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
max_framerate_numerator(0),
- max_framerate_denominator(0) {
-}
+ max_framerate_denominator(0) {}
VideoEncodeAccelerator::SupportedProfile::SupportedProfile(
VideoCodecProfile profile,
@@ -152,6 +153,16 @@ bool VideoEncodeAccelerator::IsFlushSupported() {
return false;
}
+bool VideoEncodeAccelerator::IsGpuFrameResizeSupported() {
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ // TODO(crbug.com/1166889) Add proper method overrides in
+ // MojoVideoEncodeAccelerator and other subclasses that might return true.
+ return true;
+#else
+ return false;
+#endif
+}
+
void VideoEncodeAccelerator::RequestEncodingParametersChange(
const VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate) {
diff --git a/chromium/media/video/video_encode_accelerator.h b/chromium/media/video/video_encode_accelerator.h
index e7331f8846e..0bea721e347 100644
--- a/chromium/media/video/video_encode_accelerator.h
+++ b/chromium/media/video/video_encode_accelerator.h
@@ -133,9 +133,9 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
// or as generated (e.g. screen capture).
enum class ContentType { kCamera, kDisplay };
// Indicates the storage type of a video frame provided on Encode().
- // kShmem if a video frame is mapped in user space.
- // kDmabuf if a video frame is referred by dmabuf.
- enum class StorageType { kShmem, kDmabuf };
+ // kShmem if a video frame has a shared memory.
+ // kGpuMemoryBuffer if a video frame has a GpuMemoryBuffer.
+ enum class StorageType { kShmem, kGpuMemoryBuffer };
struct MEDIA_EXPORT SpatialLayer {
// The encoder dimension of the spatial layer.
@@ -343,6 +343,11 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
// VEA has been initialized.
virtual bool IsFlushSupported();
+ // Returns true if the encoder supports automatic resize of GPU backed frames
+ // to the size provided during encoder configuration.
+ // This method must be called after VEA has been initialized.
+ virtual bool IsGpuFrameResizeSupported();
+
protected:
// Do not delete directly; use Destroy() or own it with a scoped_ptr, which
// will Destroy() it properly by default.
diff --git a/chromium/media/video/video_encode_accelerator_adapter.cc b/chromium/media/video/video_encode_accelerator_adapter.cc
index bdfa80bd994..033970eee70 100644
--- a/chromium/media/video/video_encode_accelerator_adapter.cc
+++ b/chromium/media/video/video_encode_accelerator_adapter.cc
@@ -7,6 +7,8 @@
#include <limits>
#include <vector>
+#include "base/bind_post_task.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/sequenced_task_runner.h"
@@ -16,11 +18,11 @@
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
#include "media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.h"
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
#include "media/video/gpu_video_accelerator_factories.h"
-#include "third_party/libyuv/include/libyuv.h"
namespace media {
@@ -35,15 +37,32 @@ VideoEncodeAccelerator::Config SetUpVeaConfig(
const VideoEncoder::Options& opts,
VideoPixelFormat format,
VideoFrame::StorageType storage_type) {
+ base::Optional<uint32_t> initial_framerate;
+ if (opts.framerate.has_value())
+ initial_framerate = static_cast<uint32_t>(opts.framerate.value());
+
auto config = VideoEncodeAccelerator::Config(
format, opts.frame_size, profile,
opts.bitrate.value_or(opts.frame_size.width() * opts.frame_size.height() *
- kVEADefaultBitratePerPixel));
+ kVEADefaultBitratePerPixel),
+ initial_framerate);
+
+ const bool is_rgb =
+ format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
+ format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
+
+ // Override the provided format if incoming frames are RGB -- they'll be
+ // converted to I420 or NV12 depending on the VEA configuration.
+ if (is_rgb)
+ config.input_format = PIXEL_FORMAT_I420;
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
if (storage_type == VideoFrame::STORAGE_DMABUFS ||
storage_type == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
- config.storage_type = VideoEncodeAccelerator::Config::StorageType::kDmabuf;
+ if (is_rgb)
+ config.input_format = PIXEL_FORMAT_NV12;
+ config.storage_type =
+ VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer;
}
#endif
@@ -52,72 +71,6 @@ VideoEncodeAccelerator::Config SetUpVeaConfig(
} // namespace
-class VideoEncodeAcceleratorAdapter::SharedMemoryPool
- : public base::RefCountedThreadSafe<
- VideoEncodeAcceleratorAdapter::SharedMemoryPool> {
- public:
- SharedMemoryPool(GpuVideoAcceleratorFactories* gpu_factories,
- size_t region_size) {
- DCHECK(gpu_factories);
- gpu_factories_ = gpu_factories;
- region_size_ = region_size;
- }
-
- bool MaybeAllocateBuffer(int32_t* id) {
- if (!free_buffer_ids_.empty()) {
- *id = free_buffer_ids_.back();
- free_buffer_ids_.pop_back();
- return true;
- }
-
- if (!gpu_factories_)
- return false;
-
- base::UnsafeSharedMemoryRegion region =
- gpu_factories_->CreateSharedMemoryRegion(region_size_);
- if (!region.IsValid())
- return false;
-
- base::WritableSharedMemoryMapping mapping = region.Map();
- if (!mapping.IsValid())
- return false;
-
- regions_.push_back(std::move(region));
- mappings_.push_back(std::move(mapping));
- if (regions_.size() >= std::numeric_limits<int32_t>::max() / 2) {
- // Suspiciously many buffers have been allocated already.
- return false;
- }
- *id = int32_t{regions_.size()} - 1;
- return true;
- }
-
- void ReleaseBuffer(int32_t id) { free_buffer_ids_.push_back(id); }
-
- base::WritableSharedMemoryMapping* GetMapping(int32_t buffer_id) {
- if (size_t{buffer_id} >= mappings_.size())
- return nullptr;
- return &mappings_[buffer_id];
- }
-
- base::UnsafeSharedMemoryRegion* GetRegion(int32_t buffer_id) {
- if (size_t{buffer_id} >= regions_.size())
- return nullptr;
- return &regions_[buffer_id];
- }
-
- private:
- friend class base::RefCountedThreadSafe<
- VideoEncodeAcceleratorAdapter::SharedMemoryPool>;
- ~SharedMemoryPool() = default;
-
- size_t region_size_;
- GpuVideoAcceleratorFactories* gpu_factories_;
- std::vector<base::UnsafeSharedMemoryRegion> regions_;
- std::vector<base::WritableSharedMemoryMapping> mappings_;
- std::vector<int32_t> free_buffer_ids_;
-};
-
VideoEncodeAcceleratorAdapter::PendingOp::PendingOp() = default;
VideoEncodeAcceleratorAdapter::PendingOp::~PendingOp() = default;
VideoEncodeAcceleratorAdapter::PendingEncode::PendingEncode() = default;
@@ -126,7 +79,9 @@ VideoEncodeAcceleratorAdapter::PendingEncode::~PendingEncode() = default;
VideoEncodeAcceleratorAdapter::VideoEncodeAcceleratorAdapter(
GpuVideoAcceleratorFactories* gpu_factories,
scoped_refptr<base::SequencedTaskRunner> callback_task_runner)
- : gpu_factories_(gpu_factories),
+ : output_pool_(base::MakeRefCounted<SharedMemoryPool>()),
+ input_pool_(base::MakeRefCounted<SharedMemoryPool>()),
+ gpu_factories_(gpu_factories),
accelerator_task_runner_(gpu_factories_->GetTaskRunner()),
callback_task_runner_(std::move(callback_task_runner)) {
DETACH_FROM_SEQUENCE(accelerator_sequence_checker_);
@@ -134,6 +89,8 @@ VideoEncodeAcceleratorAdapter::VideoEncodeAcceleratorAdapter(
VideoEncodeAcceleratorAdapter::~VideoEncodeAcceleratorAdapter() {
DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
+ input_pool_->Shutdown();
+ output_pool_->Shutdown();
}
void VideoEncodeAcceleratorAdapter::DestroyAsync(
@@ -145,6 +102,11 @@ void VideoEncodeAcceleratorAdapter::DestroyAsync(
runner->DeleteSoon(FROM_HERE, std::move(self));
}
+void VideoEncodeAcceleratorAdapter::SetInputBufferPreferenceForTesting(
+ InputBufferKind pref) {
+ input_buffer_preference_ = pref;
+}
+
void VideoEncodeAcceleratorAdapter::Initialize(VideoCodecProfile profile,
const Options& options,
OutputCB output_cb,
@@ -200,8 +162,10 @@ void VideoEncodeAcceleratorAdapter::InitializeOnAcceleratorThread(
state_ = State::kWaitingForFirstFrame;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX)
+ if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX &&
+ !options_.avc.produce_annexb) {
h264_converter_ = std::make_unique<H264AnnexBToAvcBitstreamConverter>();
+ }
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
std::move(done_cb).Run(Status());
@@ -217,8 +181,37 @@ void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
// We use the first frame to setup the VEA config so that we can ensure that
// zero copy hardware encoding from the camera can be used.
const auto& first_frame = pending_encodes_.front()->frame;
- auto vea_config = SetUpVeaConfig(profile_, options_, first_frame->format(),
- first_frame->storage_type());
+ const auto format = first_frame->format();
+ const bool is_rgb =
+ format == PIXEL_FORMAT_XBGR || format == PIXEL_FORMAT_XRGB ||
+ format == PIXEL_FORMAT_ABGR || format == PIXEL_FORMAT_ARGB;
+ const bool supported_format =
+ format == PIXEL_FORMAT_NV12 || format == PIXEL_FORMAT_I420 || is_rgb;
+ if (!supported_format) {
+ auto status =
+ Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
+ .WithData("frame", first_frame->AsHumanReadableString());
+ InitCompleted(std::move(status));
+ return;
+ }
+
+ auto vea_config =
+ SetUpVeaConfig(profile_, options_, format, first_frame->storage_type());
+
+#if defined(OS_LINUX) || defined(OS_CHROMEOS)
+ // Linux/ChromeOS require a special configuration to use dmabuf storage.
+ // We need to keep sending frames the same way the first frame was sent.
+ // Other platforms will happily mix GpuMemoryBuffer storage with regular
+ // storage, so we don't care about mismatches on other platforms.
+ if (input_buffer_preference_ == InputBufferKind::Any) {
+ if (vea_config.storage_type ==
+ VideoEncodeAccelerator::Config::StorageType::kGpuMemoryBuffer) {
+ input_buffer_preference_ = InputBufferKind::GpuMemBuf;
+ } else {
+ input_buffer_preference_ = InputBufferKind::CpuMemBuf;
+ }
+ }
+#endif
if (!accelerator_->Initialize(vea_config, this)) {
auto status = Status(StatusCode::kEncoderInitializationError,
@@ -228,9 +221,7 @@ void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
}
state_ = State::kInitializing;
- format_ = first_frame->format();
- storage_type_ = first_frame->storage_type();
- using_native_input_ = first_frame->HasGpuMemoryBuffer();
+ format_ = vea_config.input_format;
}
void VideoEncodeAcceleratorAdapter::Encode(scoped_refptr<VideoFrame> frame,
@@ -269,82 +260,36 @@ void VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread(
return;
}
-#if defined(OS_LINUX) || defined(OS_CHROMEOS)
- // Linux/ChromeOS require a special configuration to use dmabuf storage.
- const bool is_same_storage_type = storage_type_ == frame->storage_type();
-#else
- // Other platforms will happily mix GpuMemoryBuffer storage with regular
- // storage, so we don't care about mismatches on other platforms.
- const bool is_same_storage_type = true;
-#endif
-
- if (format_ != frame->format() || !is_same_storage_type) {
- auto status = Status(StatusCode::kEncoderFailedEncode,
- "Unexpected frame format change.")
- .WithData("current_format", format_)
- .WithData("current_storage_type", storage_type_)
- .WithData("new_frame", frame->AsHumanReadableString());
- std::move(done_cb).Run(status);
- return;
- }
-
- if (!frame->HasGpuMemoryBuffer() && !frame->IsMappable() &&
- frame->format() != PIXEL_FORMAT_I420) {
- auto status =
- Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
- .WithData("frame", frame->AsHumanReadableString());
- std::move(done_cb).Run(std::move(status));
+ const bool frame_needs_resizing =
+ frame->visible_rect().size() != options_.frame_size;
+
+ // Try using a frame with GPU buffer both are true:
+ // 1. the frame already has GPU buffer
+ // 2. frame doesn't need resizing or can be resized by GPU encoder.
+ bool use_gpu_buffer = frame->HasGpuMemoryBuffer() &&
+ (!frame_needs_resizing || gpu_resize_supported_);
+
+ // Currently configured encoder's preference takes precedence overe heuristic
+ // above.
+ if (input_buffer_preference_ == InputBufferKind::GpuMemBuf)
+ use_gpu_buffer = true;
+ if (input_buffer_preference_ == InputBufferKind::CpuMemBuf)
+ use_gpu_buffer = false;
+
+ StatusOr<scoped_refptr<VideoFrame>> result(nullptr);
+ if (use_gpu_buffer)
+ result = PrepareGpuFrame(options_.frame_size, frame);
+ else
+ result = PrepareCpuFrame(options_.frame_size, frame);
+
+ if (result.has_error()) {
+ auto status = std::move(result).error();
+ status.WithData("frame", frame->AsHumanReadableString());
+ std::move(done_cb).Run(std::move(status).AddHere());
return;
}
- if (!frame->HasGpuMemoryBuffer()) {
- DCHECK_EQ(format_, PIXEL_FORMAT_I420);
-
- int32_t buffer_id;
- if (!input_pool_->MaybeAllocateBuffer(&buffer_id)) {
- auto status = Status(StatusCode::kEncoderFailedEncode,
- "Can't allocate a shared input buffer");
- std::move(done_cb).Run(std::move(status));
- return;
- }
-
- base::UnsafeSharedMemoryRegion* region = input_pool_->GetRegion(buffer_id);
- base::WritableSharedMemoryMapping* mapping =
- input_pool_->GetMapping(buffer_id);
-
- auto shared_frame = VideoFrame::WrapExternalData(
- format_, frame->coded_size(), frame->visible_rect(),
- frame->natural_size(), mapping->GetMemoryAsSpan<uint8_t>().data(),
- mapping->size(), frame->timestamp());
-
- if (!shared_frame) {
- auto status = Status(StatusCode::kEncoderFailedEncode,
- "Can't allocate a shared frame");
- std::move(done_cb).Run(std::move(status));
- return;
- }
-
- shared_frame->BackWithSharedMemory(region);
- shared_frame->AddDestructionObserver(BindToCurrentLoop(base::BindOnce(
- &SharedMemoryPool::ReleaseBuffer, input_pool_, buffer_id)));
- libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane),
- frame->stride(VideoFrame::kYPlane),
- frame->visible_data(VideoFrame::kUPlane),
- frame->stride(VideoFrame::kUPlane),
- frame->visible_data(VideoFrame::kVPlane),
- frame->stride(VideoFrame::kVPlane),
- shared_frame->visible_data(VideoFrame::kYPlane),
- shared_frame->stride(VideoFrame::kYPlane),
- shared_frame->visible_data(VideoFrame::kUPlane),
- shared_frame->stride(VideoFrame::kUPlane),
- shared_frame->visible_data(VideoFrame::kVPlane),
- shared_frame->stride(VideoFrame::kVPlane),
- frame->visible_rect().width(),
- frame->visible_rect().height());
- frame = std::move(shared_frame);
- } else {
- DCHECK_EQ(format_, PIXEL_FORMAT_NV12);
- }
+ frame = std::move(result).value();
auto active_encode = std::make_unique<PendingOp>();
active_encode->done_callback = std::move(done_cb);
@@ -391,9 +336,20 @@ void VideoEncodeAcceleratorAdapter::ChangeOptionsOnAcceleratorThread(
options.framerate.value_or(VideoEncodeAccelerator::kDefaultFramerate))};
accelerator_->RequestEncodingParametersChange(bitrate, framerate);
+
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX) {
+ if (options.avc.produce_annexb) {
+ h264_converter_.reset();
+ } else if (!h264_converter_) {
+ h264_converter_ = std::make_unique<H264AnnexBToAvcBitstreamConverter>();
+ }
+ }
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+
options_ = options;
if (!output_cb.is_null())
- output_cb_ = BindToCurrentLoop(std::move(output_cb));
+ output_cb_ = std::move(output_cb);
std::move(done_cb).Run(Status());
}
@@ -437,7 +393,7 @@ void VideoEncodeAcceleratorAdapter::FlushOnAcceleratorThread(StatusCB done_cb) {
// If flush is not supported FlushCompleted() will be called by
// BitstreamBufferReady() when |active_encodes_| is empty.
- if (flush_support_ && state_ == State::kFlushing) {
+ if (state_ == State::kFlushing && flush_support_.value()) {
accelerator_->Flush(
base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushCompleted,
base::Unretained(this)));
@@ -449,26 +405,22 @@ void VideoEncodeAcceleratorAdapter::RequireBitstreamBuffers(
const gfx::Size& input_coded_size,
size_t output_buffer_size) {
DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
- output_pool_ = base::MakeRefCounted<SharedMemoryPool>(gpu_factories_,
- output_buffer_size);
- if (!using_native_input_) {
- size_t input_buffer_size =
- VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_coded_size);
- input_pool_ = base::MakeRefCounted<SharedMemoryPool>(gpu_factories_,
- input_buffer_size);
- }
- int32_t buffer_id;
- if (!output_pool_->MaybeAllocateBuffer(&buffer_id)) {
+ input_buffer_size_ =
+ VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_coded_size);
+
+ output_handle_holder_ = output_pool_->MaybeAllocateBuffer(output_buffer_size);
+
+ if (!output_handle_holder_) {
InitCompleted(Status(StatusCode::kEncoderInitializationError));
return;
}
- base::UnsafeSharedMemoryRegion* region = output_pool_->GetRegion(buffer_id);
+ base::UnsafeSharedMemoryRegion* region = output_handle_holder_->GetRegion();
+ // There is always one output buffer.
accelerator_->UseOutputBitstreamBuffer(
- BitstreamBuffer(buffer_id, region->Duplicate(), region->GetSize()));
+ BitstreamBuffer(0, region->Duplicate(), region->GetSize()));
InitCompleted(Status());
- flush_support_ = accelerator_->IsFlushSupported();
}
void VideoEncodeAcceleratorAdapter::BitstreamBufferReady(
@@ -480,8 +432,10 @@ void VideoEncodeAcceleratorAdapter::BitstreamBufferReady(
result.timestamp = metadata.timestamp;
result.size = metadata.payload_size_bytes;
+ DCHECK_EQ(buffer_id, 0);
+ // There is always one output buffer.
base::WritableSharedMemoryMapping* mapping =
- output_pool_->GetMapping(buffer_id);
+ output_handle_holder_->GetMapping();
DCHECK_LE(result.size, mapping->size());
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -534,7 +488,7 @@ void VideoEncodeAcceleratorAdapter::BitstreamBufferReady(
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
// Give the buffer back to |accelerator_|
- base::UnsafeSharedMemoryRegion* region = output_pool_->GetRegion(buffer_id);
+ base::UnsafeSharedMemoryRegion* region = output_handle_holder_->GetRegion();
accelerator_->UseOutputBitstreamBuffer(
BitstreamBuffer(buffer_id, region->Duplicate(), region->GetSize()));
@@ -546,7 +500,7 @@ void VideoEncodeAcceleratorAdapter::BitstreamBufferReady(
}
}
output_cb_.Run(std::move(result), std::move(desc));
- if (active_encodes_.empty() && !flush_support_) {
+ if (active_encodes_.empty() && !flush_support_.value()) {
// Manually call FlushCompleted(), since |accelerator_| won't do it for us.
FlushCompleted(true);
}
@@ -585,11 +539,8 @@ void VideoEncodeAcceleratorAdapter::InitCompleted(Status status) {
if (!status.is_ok()) {
// Report the error to all encoding-done callbacks
- for (auto& encode : pending_encodes_) {
- auto status = Status(StatusCode::kEncoderFailedEncode,
- "VideoEncodeAccelerator encountered an error");
- std::move(encode->done_callback).Run(Status());
- }
+ for (auto& encode : pending_encodes_)
+ std::move(encode->done_callback).Run(status);
if (pending_flush_)
FlushCompleted(false);
@@ -601,6 +552,8 @@ void VideoEncodeAcceleratorAdapter::InitCompleted(Status status) {
}
state_ = State::kReadyToEncode;
+ flush_support_ = accelerator_->IsFlushSupported();
+ gpu_resize_supported_ = accelerator_->IsGpuFrameResizeSupported();
// Send off the encodes that came in while we were waiting for initialization.
for (auto& encode : pending_encodes_) {
@@ -613,7 +566,7 @@ void VideoEncodeAcceleratorAdapter::InitCompleted(Status status) {
// all the pending encodes have been sent.
if (pending_flush_) {
state_ = State::kFlushing;
- if (flush_support_) {
+ if (flush_support_.value()) {
accelerator_->Flush(
base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushCompleted,
base::Unretained(this)));
@@ -637,7 +590,96 @@ T VideoEncodeAcceleratorAdapter::WrapCallback(T cb) {
DCHECK(callback_task_runner_);
if (cb.is_null())
return cb;
- return BindToLoop(callback_task_runner_.get(), std::move(cb));
+ return base::BindPostTask(callback_task_runner_, std::move(cb));
+}
+
+// Copy a frame into a shared mem buffer and resize it as the same time. Input
+// frames can I420, NV12, or RGB -- they'll be converted to I420 if needed.
+StatusOr<scoped_refptr<VideoFrame>>
+VideoEncodeAcceleratorAdapter::PrepareCpuFrame(
+ const gfx::Size& size,
+ scoped_refptr<VideoFrame> src_frame) {
+ auto handle = input_pool_->MaybeAllocateBuffer(input_buffer_size_);
+ if (!handle)
+ return Status(StatusCode::kEncoderFailedEncode);
+
+ base::UnsafeSharedMemoryRegion* region = handle->GetRegion();
+ base::WritableSharedMemoryMapping* mapping = handle->GetMapping();
+
+ auto mapped_src_frame = src_frame->HasGpuMemoryBuffer()
+ ? ConvertToMemoryMappedFrame(src_frame)
+ : src_frame;
+ auto shared_frame = VideoFrame::WrapExternalData(
+ PIXEL_FORMAT_I420, options_.frame_size, gfx::Rect(size), size,
+ mapping->GetMemoryAsSpan<uint8_t>().data(), mapping->size(),
+ src_frame->timestamp());
+
+ if (!shared_frame || !mapped_src_frame)
+ return Status(StatusCode::kEncoderFailedEncode);
+
+ shared_frame->BackWithSharedMemory(region);
+ // Keep the SharedMemoryHolder until the frame is destroyed so that the
+ // memory is not freed prematurely.
+ shared_frame->AddDestructionObserver(BindToCurrentLoop(base::BindOnce(
+ base::DoNothing::Once<
+ std::unique_ptr<SharedMemoryPool::SharedMemoryHandle>>(),
+ std::move(handle))));
+ auto status =
+ ConvertAndScaleFrame(*mapped_src_frame, *shared_frame, resize_buf_);
+ if (!status.is_ok())
+ return std::move(status).AddHere();
+
+ return shared_frame;
+}
+
+// Copy a frame into a GPU buffer and resize it as the same time. Input frames
+// can I420, NV12, or RGB -- they'll be converted to NV12 if needed.
+StatusOr<scoped_refptr<VideoFrame>>
+VideoEncodeAcceleratorAdapter::PrepareGpuFrame(
+ const gfx::Size& size,
+ scoped_refptr<VideoFrame> src_frame) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
+ DCHECK(src_frame);
+ if (src_frame->HasGpuMemoryBuffer() &&
+ src_frame->format() == PIXEL_FORMAT_NV12 &&
+ (gpu_resize_supported_ || src_frame->visible_rect().size() == size)) {
+ // Nothing to do here, the input frame is already what we need
+ return src_frame;
+ }
+
+ auto gmb = gpu_factories_->CreateGpuMemoryBuffer(
+ size, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
+
+ if (!gmb)
+ return Status(StatusCode::kEncoderFailedEncode);
+ gmb->SetColorSpace(src_frame->ColorSpace());
+
+ gpu::MailboxHolder empty_mailboxes[media::VideoFrame::kMaxPlanes];
+ auto gpu_frame = VideoFrame::WrapExternalGpuMemoryBuffer(
+ gfx::Rect(size), size, std::move(gmb), empty_mailboxes,
+ base::NullCallback(), src_frame->timestamp());
+ gpu_frame->set_color_space(src_frame->ColorSpace());
+ gpu_frame->metadata().MergeMetadataFrom(src_frame->metadata());
+
+ // Don't be scared. ConvertToMemoryMappedFrame() doesn't copy pixel data
+ // it just maps GPU buffer owned by |gpu_frame| and presents it as mapped
+ // view in CPU memory. It allows us to use ConvertAndScaleFrame() without
+ // having to tinker with libyuv and GpuMemoryBuffer memory views.
+ // |mapped_gpu_frame| doesn't own anything, but unmaps the buffer when freed.
+ auto mapped_gpu_frame = ConvertToMemoryMappedFrame(gpu_frame);
+ auto mapped_src_frame = src_frame->HasGpuMemoryBuffer()
+ ? ConvertToMemoryMappedFrame(src_frame)
+ : src_frame;
+ if (!mapped_gpu_frame || !mapped_src_frame)
+ return Status(StatusCode::kEncoderFailedEncode);
+
+ auto status =
+ ConvertAndScaleFrame(*mapped_src_frame, *mapped_gpu_frame, resize_buf_);
+ if (!status.is_ok())
+ return std::move(status).AddHere();
+
+ return gpu_frame;
}
} // namespace media
diff --git a/chromium/media/video/video_encode_accelerator_adapter.h b/chromium/media/video/video_encode_accelerator_adapter.h
index bd43cf5ad38..d1a6307c9dd 100644
--- a/chromium/media/video/video_encode_accelerator_adapter.h
+++ b/chromium/media/video/video_encode_accelerator_adapter.h
@@ -9,10 +9,13 @@
#include "base/callback_forward.h"
#include "base/containers/circular_deque.h"
+#include "base/containers/flat_map.h"
#include "base/containers/queue.h"
#include "base/memory/scoped_refptr.h"
+#include "base/optional.h"
#include "base/synchronization/lock.h"
#include "media/base/media_export.h"
+#include "media/base/shared_memory_pool.h"
#include "media/base/video_encoder.h"
#include "media/video/video_encode_accelerator.h"
#include "ui/gfx/geometry/size.h"
@@ -27,7 +30,7 @@ class H264AnnexBToAvcBitstreamConverter;
// This class is a somewhat complex adapter from VideoEncodeAccelerator
// to VideoEncoder, it takes cares of such things as
-// - managing and copying GPU-shared memory buffers
+// - managing and copying GPU/shared memory buffers
// - managing hops between task runners, for VEA and callbacks
// - keeping track of the state machine. Forbiding encodes during flush etc.
class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
@@ -39,6 +42,10 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
scoped_refptr<base::SequencedTaskRunner> callback_task_runner);
~VideoEncodeAcceleratorAdapter() override;
+ enum class InputBufferKind { Any, GpuMemBuf, CpuMemBuf };
+ // A way to force a certain way of submitting frames to VEA.
+ void SetInputBufferPreferenceForTesting(InputBufferKind type);
+
// VideoEncoder implementation.
void Initialize(VideoCodecProfile profile,
const Options& options,
@@ -68,7 +75,6 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
static void DestroyAsync(std::unique_ptr<VideoEncodeAcceleratorAdapter> self);
private:
- class SharedMemoryPool;
enum class State {
kNotInitialized,
kWaitingForFirstFrame,
@@ -101,13 +107,24 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
template <class T>
T WrapCallback(T cb);
+ StatusOr<scoped_refptr<VideoFrame>> PrepareGpuFrame(
+ const gfx::Size& size,
+ scoped_refptr<VideoFrame> src_frame);
+ StatusOr<scoped_refptr<VideoFrame>> PrepareCpuFrame(
+ const gfx::Size& size,
+ scoped_refptr<VideoFrame> src_frame);
scoped_refptr<SharedMemoryPool> output_pool_;
scoped_refptr<SharedMemoryPool> input_pool_;
+ std::unique_ptr<SharedMemoryPool::SharedMemoryHandle> output_handle_holder_;
+ size_t input_buffer_size_;
+
std::unique_ptr<VideoEncodeAccelerator> accelerator_;
GpuVideoAcceleratorFactories* gpu_factories_;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+ // If |h264_converter_| is null, we output in annexb format. Otherwise, we
+ // output in avc format.
std::unique_ptr<H264AnnexBToAvcBitstreamConverter> h264_converter_;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -125,7 +142,11 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
scoped_refptr<base::SequencedTaskRunner> callback_task_runner_;
State state_ = State::kNotInitialized;
- bool flush_support_ = false;
+ base::Optional<bool> flush_support_;
+
+ // True if underlying instance of VEA can handle GPU backed frames with a
+ // size different from what VEA was configured for.
+ bool gpu_resize_supported_ = false;
struct PendingEncode {
PendingEncode();
@@ -138,14 +159,14 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
// These are encodes that have not been sent to the accelerator.
std::vector<std::unique_ptr<PendingEncode>> pending_encodes_;
- bool using_native_input_;
VideoPixelFormat format_;
- VideoFrame::StorageType storage_type_;
+ InputBufferKind input_buffer_preference_ = InputBufferKind::Any;
+ std::vector<uint8_t> resize_buf_;
- VideoCodecProfile profile_;
+ VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
Options options_;
OutputCB output_cb_;
};
} // namespace media
-#endif // MEDIA_VIDEO_VIDEO_ENCODE_ACCELERATOR_ADAPTER_H_ \ No newline at end of file
+#endif // MEDIA_VIDEO_VIDEO_ENCODE_ACCELERATOR_ADAPTER_H_
diff --git a/chromium/media/video/video_encode_accelerator_adapter_test.cc b/chromium/media/video/video_encode_accelerator_adapter_test.cc
new file mode 100644
index 00000000000..cab03861d03
--- /dev/null
+++ b/chromium/media/video/video_encode_accelerator_adapter_test.cc
@@ -0,0 +1,403 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/video/video_encode_accelerator_adapter.h"
+
+#include <memory>
+#include <string>
+
+#include "base/logging.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/sequenced_task_runner.h"
+#include "base/strings/stringprintf.h"
+#include "base/test/bind.h"
+#include "base/test/gmock_callback_support.h"
+#include "base/test/task_environment.h"
+#include "base/threading/sequenced_task_runner_handle.h"
+#include "base/threading/thread.h"
+#include "base/time/time.h"
+#include "build/build_config.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
+#include "media/video/fake_video_encode_accelerator.h"
+#include "media/video/gpu_video_accelerator_factories.h"
+#include "media/video/mock_gpu_video_accelerator_factories.h"
+#include "media/video/mock_video_encode_accelerator.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/libyuv/include/libyuv.h"
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::Field;
+using ::testing::Invoke;
+using ::testing::Return;
+using ::testing::SaveArg;
+using ::testing::Values;
+using ::testing::WithArgs;
+
+namespace media {
+
+class VideoEncodeAcceleratorAdapterTest
+ : public ::testing::TestWithParam<VideoPixelFormat> {
+ public:
+ VideoEncodeAcceleratorAdapterTest() = default;
+
+ void SetUp() override {
+ vea_runner_ = base::ThreadPool::CreateSequencedTaskRunner({});
+
+ vea_ = new FakeVideoEncodeAccelerator(vea_runner_);
+ gpu_factories_ =
+ std::make_unique<MockGpuVideoAcceleratorFactories>(nullptr);
+ EXPECT_CALL(*gpu_factories_.get(), DoCreateVideoEncodeAccelerator())
+ .WillRepeatedly(Return(vea_));
+ EXPECT_CALL(*gpu_factories_.get(), GetTaskRunner())
+ .WillRepeatedly(Return(vea_runner_));
+
+ callback_runner_ = base::SequencedTaskRunnerHandle::Get();
+ vae_adapter_ = std::make_unique<VideoEncodeAcceleratorAdapter>(
+ gpu_factories_.get(), callback_runner_);
+ }
+
+ void TearDown() override {
+ vea_runner_->DeleteSoon(FROM_HERE, std::move(vae_adapter_));
+ RunUntilIdle();
+ }
+
+ void RunUntilIdle() { task_environment_.RunUntilIdle(); }
+ VideoEncodeAcceleratorAdapter* adapter() { return vae_adapter_.get(); }
+ FakeVideoEncodeAccelerator* vea() { return vea_; }
+
+ scoped_refptr<VideoFrame> CreateGreenGpuFrame(gfx::Size size,
+ base::TimeDelta timestamp) {
+ auto gmb = gpu_factories_->CreateGpuMemoryBuffer(
+ size, gfx::BufferFormat::YUV_420_BIPLANAR,
+ gfx::BufferUsage::VEA_READ_CAMERA_AND_CPU_READ_WRITE);
+
+ if (!gmb || !gmb->Map())
+ return nullptr;
+
+ // Green NV12 frame (Y:0x96, U:0x40, V:0x40)
+ const auto gmb_size = gmb->GetSize();
+ memset(static_cast<uint8_t*>(gmb->memory(0)), 0x96,
+ gmb->stride(0) * gmb_size.height());
+ memset(static_cast<uint8_t*>(gmb->memory(1)), 0x28,
+ gmb->stride(1) * gmb_size.height() / 2);
+ gmb->Unmap();
+
+ gpu::MailboxHolder empty_mailboxes[media::VideoFrame::kMaxPlanes];
+ return VideoFrame::WrapExternalGpuMemoryBuffer(
+ gfx::Rect(gmb_size), size, std::move(gmb), empty_mailboxes,
+ base::NullCallback(), timestamp);
+ }
+
+ scoped_refptr<VideoFrame> CreateGreenCpuFrame(gfx::Size size,
+ base::TimeDelta timestamp) {
+ auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_I420, size,
+ gfx::Rect(size), size, timestamp);
+
+ // Green I420 frame (Y:0x96, U:0x40, V:0x40)
+ libyuv::I420Rect(
+ frame->data(VideoFrame::kYPlane), frame->stride(VideoFrame::kYPlane),
+ frame->data(VideoFrame::kUPlane), frame->stride(VideoFrame::kUPlane),
+ frame->data(VideoFrame::kVPlane), frame->stride(VideoFrame::kVPlane),
+ 0, // left
+ 0, // top
+ frame->visible_rect().width(), // right
+ frame->visible_rect().height(), // bottom
+ 0x96, // Y color
+ 0x40, // U color
+ 0x40); // V color
+
+ return frame;
+ }
+
+ scoped_refptr<VideoFrame> CreateGreenCpuFrameARGB(gfx::Size size,
+ base::TimeDelta timestamp) {
+ auto frame = VideoFrame::CreateFrame(PIXEL_FORMAT_XRGB, size,
+ gfx::Rect(size), size, timestamp);
+
+ // Green XRGB frame (R:0x3B, G:0xD9, B:0x24)
+ libyuv::ARGBRect(frame->data(VideoFrame::kARGBPlane),
+ frame->stride(VideoFrame::kARGBPlane),
+ 0, // left
+ 0, // top
+ frame->visible_rect().width(), // right
+ frame->visible_rect().height(), // bottom
+ 0x24D93B00); // V color
+
+ return frame;
+ }
+
+ scoped_refptr<VideoFrame> CreateGreenFrame(gfx::Size size,
+ VideoPixelFormat format,
+ base::TimeDelta timestamp) {
+ switch (format) {
+ case PIXEL_FORMAT_I420:
+ return CreateGreenCpuFrame(size, timestamp);
+ case PIXEL_FORMAT_NV12:
+ return CreateGreenGpuFrame(size, timestamp);
+ case PIXEL_FORMAT_XRGB:
+ return CreateGreenCpuFrameARGB(size, timestamp);
+ default:
+ EXPECT_TRUE(false) << "not supported pixel format";
+ return nullptr;
+ }
+ }
+
+ VideoEncoder::StatusCB ValidatingStatusCB(base::Location loc = FROM_HERE) {
+ struct CallEnforcer {
+ bool called = false;
+ std::string location;
+ ~CallEnforcer() {
+ EXPECT_TRUE(called) << "Callback created: " << location;
+ }
+ };
+ auto enforcer = std::make_unique<CallEnforcer>();
+ enforcer->location = loc.ToString();
+ return base::BindLambdaForTesting(
+ [this, enforcer{std::move(enforcer)}](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ EXPECT_TRUE(s.is_ok()) << " Callback created: " << enforcer->location
+ << " Error: " << s.message();
+ enforcer->called = true;
+ });
+ }
+
+ protected:
+ VideoCodecProfile profile_ = VP8PROFILE_ANY;
+ base::test::TaskEnvironment task_environment_;
+ FakeVideoEncodeAccelerator* vea_; // owned by |vae_adapter_|
+ std::unique_ptr<MockGpuVideoAcceleratorFactories> gpu_factories_;
+ std::unique_ptr<VideoEncodeAcceleratorAdapter> vae_adapter_;
+ scoped_refptr<base::SequencedTaskRunner> vea_runner_;
+ scoped_refptr<base::SequencedTaskRunner> callback_runner_;
+};
+
+TEST_F(VideoEncodeAcceleratorAdapterTest, PreInitialize) {
+ VideoEncoder::Options options;
+ options.frame_size = gfx::Size(640, 480);
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ });
+
+ adapter()->Initialize(profile_, options, std::move(output_cb),
+ ValidatingStatusCB());
+ RunUntilIdle();
+}
+
+TEST_F(VideoEncodeAcceleratorAdapterTest, InitializeAfterFirstFrame) {
+ VideoEncoder::Options options;
+ options.frame_size = gfx::Size(640, 480);
+ int outputs_count = 0;
+ auto pixel_format = PIXEL_FORMAT_I420;
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ outputs_count++;
+ });
+
+ vea()->SetEncodingCallback(base::BindLambdaForTesting(
+ [&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
+ EXPECT_EQ(keyframe, true);
+ EXPECT_EQ(frame->format(), pixel_format);
+ EXPECT_EQ(frame->coded_size(), options.frame_size);
+ return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
+ }));
+ adapter()->Initialize(profile_, options, std::move(output_cb),
+ ValidatingStatusCB());
+
+ auto frame = CreateGreenFrame(options.frame_size, pixel_format,
+ base::TimeDelta::FromMilliseconds(1));
+ adapter()->Encode(frame, true, ValidatingStatusCB());
+ RunUntilIdle();
+ EXPECT_EQ(outputs_count, 1);
+}
+
+TEST_F(VideoEncodeAcceleratorAdapterTest, FlushDuringInitialize) {
+ VideoEncoder::Options options;
+ options.frame_size = gfx::Size(640, 480);
+ int outputs_count = 0;
+ auto pixel_format = PIXEL_FORMAT_I420;
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ outputs_count++;
+ });
+
+ vea()->SetEncodingCallback(base::BindLambdaForTesting(
+ [&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
+ EXPECT_EQ(keyframe, true);
+ EXPECT_EQ(frame->format(), pixel_format);
+ EXPECT_EQ(frame->coded_size(), options.frame_size);
+ return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
+ }));
+ adapter()->Initialize(profile_, options, std::move(output_cb),
+ ValidatingStatusCB());
+
+ auto frame = CreateGreenFrame(options.frame_size, pixel_format,
+ base::TimeDelta::FromMilliseconds(1));
+ adapter()->Encode(frame, true, ValidatingStatusCB());
+ adapter()->Flush(base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(s.is_ok());
+ EXPECT_EQ(outputs_count, 1);
+ }));
+ RunUntilIdle();
+}
+
+TEST_F(VideoEncodeAcceleratorAdapterTest, InitializationError) {
+ VideoEncoder::Options options;
+ options.frame_size = gfx::Size(640, 480);
+ int outputs_count = 0;
+ auto pixel_format = PIXEL_FORMAT_I420;
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ outputs_count++;
+ });
+
+ VideoEncoder::StatusCB expect_error_done_cb =
+ base::BindLambdaForTesting([&](Status s) { EXPECT_FALSE(s.is_ok()); });
+
+ vea()->SetEncodingCallback(base::BindLambdaForTesting(
+ [&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
+ EXPECT_TRUE(false) << "should never come here";
+ return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
+ }));
+ adapter()->Initialize(VIDEO_CODEC_PROFILE_UNKNOWN, options,
+ std::move(output_cb), ValidatingStatusCB());
+
+ auto frame = CreateGreenFrame(options.frame_size, pixel_format,
+ base::TimeDelta::FromMilliseconds(1));
+ adapter()->Encode(frame, true, std::move(expect_error_done_cb));
+ RunUntilIdle();
+ EXPECT_EQ(outputs_count, 0);
+}
+
+TEST_P(VideoEncodeAcceleratorAdapterTest, TwoFramesResize) {
+ VideoEncoder::Options options;
+ options.frame_size = gfx::Size(640, 480);
+ int outputs_count = 0;
+ gfx::Size small_size(480, 320);
+ gfx::Size large_size(800, 600);
+ auto pixel_format = GetParam();
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ outputs_count++;
+ });
+
+ vea()->SetEncodingCallback(base::BindLambdaForTesting(
+ [&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
+#if defined(OS_LINUX) || defined(OS_CHROMEOS)
+ EXPECT_EQ(frame->format(),
+ IsYuvPlanar(pixel_format) ? pixel_format : PIXEL_FORMAT_I420);
+#else
+ // Everywhere except on Linux resize switches frame into CPU mode.
+ EXPECT_EQ(frame->format(), PIXEL_FORMAT_I420);
+#endif
+ EXPECT_EQ(frame->coded_size(), options.frame_size);
+ return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
+ }));
+ adapter()->Initialize(profile_, options, std::move(output_cb),
+ ValidatingStatusCB());
+
+ auto small_frame = CreateGreenFrame(small_size, pixel_format,
+ base::TimeDelta::FromMilliseconds(1));
+ auto large_frame = CreateGreenFrame(large_size, pixel_format,
+ base::TimeDelta::FromMilliseconds(2));
+ adapter()->Encode(small_frame, true, ValidatingStatusCB());
+ adapter()->Encode(large_frame, false, ValidatingStatusCB());
+ RunUntilIdle();
+ EXPECT_EQ(outputs_count, 2);
+}
+
+TEST_F(VideoEncodeAcceleratorAdapterTest, AutomaticResizeSupport) {
+ VideoEncoder::Options options;
+ options.frame_size = gfx::Size(640, 480);
+ int outputs_count = 0;
+ gfx::Size small_size(480, 320);
+ auto pixel_format = PIXEL_FORMAT_NV12;
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ outputs_count++;
+ });
+
+ vea()->SetEncodingCallback(base::BindLambdaForTesting(
+ [&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
+ EXPECT_EQ(frame->coded_size(), small_size);
+ return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
+ }));
+ vea()->SupportResize();
+ adapter()->Initialize(profile_, options, std::move(output_cb),
+ ValidatingStatusCB());
+
+ auto frame1 = CreateGreenFrame(small_size, pixel_format,
+ base::TimeDelta::FromMilliseconds(1));
+ auto frame2 = CreateGreenFrame(small_size, pixel_format,
+ base::TimeDelta::FromMilliseconds(2));
+ adapter()->Encode(frame1, true, ValidatingStatusCB());
+ adapter()->Encode(frame2, false, ValidatingStatusCB());
+ RunUntilIdle();
+ EXPECT_EQ(outputs_count, 2);
+}
+
+TEST_P(VideoEncodeAcceleratorAdapterTest, RunWithAllPossibleInputConversions) {
+ VideoEncoder::Options options;
+ options.frame_size = gfx::Size(640, 480);
+ int outputs_count = 0;
+ gfx::Size small_size(480, 320);
+ gfx::Size same_size = options.frame_size;
+ gfx::Size large_size(800, 600);
+ int frames_to_encode = 33;
+ auto pixel_format = GetParam();
+ auto input_kind =
+ (pixel_format == PIXEL_FORMAT_NV12)
+ ? VideoEncodeAcceleratorAdapter::InputBufferKind::GpuMemBuf
+ : VideoEncodeAcceleratorAdapter::InputBufferKind::CpuMemBuf;
+ adapter()->SetInputBufferPreferenceForTesting(input_kind);
+
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ outputs_count++;
+ });
+
+ vea()->SetEncodingCallback(base::BindLambdaForTesting(
+ [&](BitstreamBuffer&, bool keyframe, scoped_refptr<VideoFrame> frame) {
+ EXPECT_EQ(frame->format(),
+ IsYuvPlanar(pixel_format) ? pixel_format : PIXEL_FORMAT_I420);
+ EXPECT_EQ(frame->coded_size(), options.frame_size);
+ return BitstreamBufferMetadata(1, keyframe, frame->timestamp());
+ }));
+ adapter()->Initialize(profile_, options, std::move(output_cb),
+ ValidatingStatusCB());
+
+ for (int frame_index = 0; frame_index < frames_to_encode; frame_index++) {
+ gfx::Size size;
+ if (frame_index % 4 == 0)
+ size = large_size;
+ else if (frame_index % 4 == 1)
+ size = small_size;
+ else
+ size = same_size;
+
+ // Every 4 frames switch between the 3 supported formats.
+ const int rem = frame_index % 12;
+ auto format = PIXEL_FORMAT_XRGB;
+ if (rem < 4)
+ format = PIXEL_FORMAT_I420;
+ else if (rem < 8)
+ format = PIXEL_FORMAT_NV12;
+ bool key = frame_index % 9 == 0;
+ auto frame = CreateGreenFrame(
+ size, format, base::TimeDelta::FromMilliseconds(frame_index));
+ adapter()->Encode(frame, key, ValidatingStatusCB());
+ }
+
+ RunUntilIdle();
+ EXPECT_EQ(outputs_count, frames_to_encode);
+}
+
+INSTANTIATE_TEST_SUITE_P(VideoEncodeAcceleratorAdapterTest,
+ VideoEncodeAcceleratorAdapterTest,
+ ::testing::Values(PIXEL_FORMAT_I420,
+ PIXEL_FORMAT_NV12,
+ PIXEL_FORMAT_XRGB));
+
+} // namespace media
diff --git a/chromium/media/video/video_encoder_info.cc b/chromium/media/video/video_encoder_info.cc
index 485d1e4d1da..1817f894c58 100644
--- a/chromium/media/video/video_encoder_info.cc
+++ b/chromium/media/video/video_encoder_info.cc
@@ -54,6 +54,5 @@ bool operator==(const VideoEncoderInfo& l, const VideoEncoderInfo& r) {
l.supports_simulcast == r.supports_simulcast &&
l.scaling_settings == r.scaling_settings &&
l.resolution_bitrate_limits == r.resolution_bitrate_limits;
- return true;
}
} // namespace media
diff --git a/chromium/media/video/vpx_video_encoder.cc b/chromium/media/video/vpx_video_encoder.cc
index 41a37eba393..1d3be745af4 100644
--- a/chromium/media/video/vpx_video_encoder.cc
+++ b/chromium/media/video/vpx_video_encoder.cc
@@ -11,6 +11,7 @@
#include "base/time/time.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_frame.h"
+#include "media/base/video_util.h"
#include "third_party/libvpx/source/libvpx/vpx/vp8cx.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@@ -81,6 +82,23 @@ Status SetUpVpxConfig(const VideoEncoder::Options& opts,
return Status();
}
+Status ReallocateVpxImageIfNeeded(vpx_image_t* vpx_image,
+ const vpx_img_fmt fmt,
+ int width,
+ int height) {
+ if (vpx_image->fmt != fmt || int{vpx_image->w} != width ||
+ int{vpx_image->h} != height) {
+ vpx_img_free(vpx_image);
+ if (vpx_image != vpx_img_alloc(vpx_image, fmt, width, height, 1)) {
+ return Status(StatusCode::kEncoderFailedEncode,
+ "Invalid format or frame size.");
+ }
+ vpx_image->bit_depth = (fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 16 : 8;
+ }
+ // else no-op since the image don't need to change format.
+ return Status();
+}
+
void FreeCodecCtx(vpx_codec_ctx_t* codec_ctx) {
if (codec_ctx->name) {
// Codec has been initialized, we need to destroy it.
@@ -104,15 +122,15 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
std::move(done_cb).Run(StatusCode::kEncoderInitializeTwice);
return;
}
-
profile_ = profile;
+ bool is_vp9 = false;
vpx_codec_iface_t* iface = nullptr;
if (profile == VP8PROFILE_ANY) {
iface = vpx_codec_vp8_cx();
} else if (profile == VP9PROFILE_PROFILE0 || profile == VP9PROFILE_PROFILE2) {
// TODO(https://crbug.com/1116617): Consider support for profiles 1 and 3.
- is_vp9_ = true;
+ is_vp9 = true;
iface = vpx_codec_vp9_cx();
} else {
auto status = Status(StatusCode::kEncoderUnsupportedProfile)
@@ -189,9 +207,9 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
return;
}
- if (&vpx_image_ != vpx_img_wrap(&vpx_image_, img_fmt,
- options.frame_size.width(),
- options.frame_size.height(), 1, nullptr)) {
+ if (&vpx_image_ != vpx_img_alloc(&vpx_image_, img_fmt,
+ options.frame_size.width(),
+ options.frame_size.height(), 1)) {
status = Status(StatusCode::kEncoderInitializationError,
"Invalid format or frame size.");
std::move(done_cb).Run(status);
@@ -199,7 +217,7 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
}
vpx_image_.bit_depth = bits_for_storage;
- if (is_vp9_) {
+ if (is_vp9) {
// Set the number of column tiles in encoding an input frame, with number of
// tile columns (in Log2 unit) as the parameter.
// The minimum width of a tile column is 256 pixels, the maximum is 4096.
@@ -233,7 +251,14 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
- if (!frame->IsMappable() || frame->format() != PIXEL_FORMAT_I420) {
+ bool supported_format = frame->format() == PIXEL_FORMAT_NV12 ||
+ frame->format() == PIXEL_FORMAT_I420 ||
+ frame->format() == PIXEL_FORMAT_XBGR ||
+ frame->format() == PIXEL_FORMAT_XRGB ||
+ frame->format() == PIXEL_FORMAT_ABGR ||
+ frame->format() == PIXEL_FORMAT_ARGB;
+ if ((!frame->IsMappable() && !frame->HasGpuMemoryBuffer()) ||
+ !supported_format) {
status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
.WithData("IsMappable", frame->IsMappable())
@@ -242,11 +267,38 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
return;
}
+ if (frame->format() == PIXEL_FORMAT_NV12 && frame->HasGpuMemoryBuffer()) {
+ frame = ConvertToMemoryMappedFrame(frame);
+ if (!frame) {
+ std::move(done_cb).Run(
+ Status(StatusCode::kEncoderFailedEncode,
+ "Convert GMB frame to MemoryMappedFrame failed."));
+ return;
+ }
+ }
+
+ const bool is_yuv = IsYuvPlanar(frame->format());
+ if (frame->visible_rect().size() != options_.frame_size || !is_yuv) {
+ auto resized_frame = frame_pool_.CreateFrame(
+ is_yuv ? frame->format() : PIXEL_FORMAT_I420, options_.frame_size,
+ gfx::Rect(options_.frame_size), options_.frame_size,
+ frame->timestamp());
+ if (resized_frame) {
+ status = ConvertAndScaleFrame(*frame, *resized_frame, resize_buf_);
+ } else {
+ status = Status(StatusCode::kEncoderFailedEncode,
+ "Can't allocate a resized frame.");
+ }
+ if (!status.is_ok()) {
+ std::move(done_cb).Run(std::move(status));
+ return;
+ }
+ frame = std::move(resized_frame);
+ }
+
switch (profile_) {
- case VP9PROFILE_PROFILE1:
- NOTREACHED();
- break;
case VP9PROFILE_PROFILE2:
+ // Profile 2 uses 10bit color,
libyuv::I420ToI010(
frame->visible_data(VideoFrame::kYPlane),
frame->stride(VideoFrame::kYPlane),
@@ -259,22 +311,46 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
reinterpret_cast<uint16_t*>(vpx_image_.planes[VPX_PLANE_U]),
vpx_image_.stride[VPX_PLANE_U] / 2,
reinterpret_cast<uint16_t*>(vpx_image_.planes[VPX_PLANE_V]),
- vpx_image_.stride[VPX_PLANE_V] / 2, frame->coded_size().width(),
- frame->coded_size().height());
+ vpx_image_.stride[VPX_PLANE_V] / 2, frame->visible_rect().width(),
+ frame->visible_rect().height());
break;
+ case VP9PROFILE_PROFILE1:
case VP9PROFILE_PROFILE3:
NOTREACHED();
break;
default:
- vpx_image_.planes[VPX_PLANE_Y] =
- const_cast<uint8_t*>(frame->visible_data(VideoFrame::kYPlane));
- vpx_image_.planes[VPX_PLANE_U] =
- const_cast<uint8_t*>(frame->visible_data(VideoFrame::kUPlane));
- vpx_image_.planes[VPX_PLANE_V] =
- const_cast<uint8_t*>(frame->visible_data(VideoFrame::kVPlane));
- vpx_image_.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane);
- vpx_image_.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane);
- vpx_image_.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane);
+ vpx_img_fmt_t fmt = frame->format() == PIXEL_FORMAT_NV12
+ ? VPX_IMG_FMT_NV12
+ : VPX_IMG_FMT_I420;
+ Status status = ReallocateVpxImageIfNeeded(
+ &vpx_image_, fmt, codec_config_.g_w, codec_config_.g_h);
+ if (!status.is_ok()) {
+ std::move(done_cb).Run(status);
+ return;
+ }
+ if (fmt == VPX_IMG_FMT_NV12) {
+ vpx_image_.planes[VPX_PLANE_Y] =
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kYPlane));
+ vpx_image_.planes[VPX_PLANE_U] =
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kUVPlane));
+ // In NV12 Y and U samples are combined in one plane (bytes go YUYUYU),
+ // but libvpx treats them as two planes with the same stride but shifted
+ // by one byte.
+ vpx_image_.planes[VPX_PLANE_V] = vpx_image_.planes[VPX_PLANE_U] + 1;
+ vpx_image_.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane);
+ vpx_image_.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUVPlane);
+ vpx_image_.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kUVPlane);
+ } else {
+ vpx_image_.planes[VPX_PLANE_Y] =
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kYPlane));
+ vpx_image_.planes[VPX_PLANE_U] =
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kUPlane));
+ vpx_image_.planes[VPX_PLANE_V] =
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kVPlane));
+ vpx_image_.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane);
+ vpx_image_.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane);
+ vpx_image_.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane);
+ }
break;
}
@@ -354,20 +430,12 @@ void VpxVideoEncoder::ChangeOptions(const Options& options,
return;
}
- if (options_.frame_size != options.frame_size) {
- // Need to re-allocate |vpx_image_| because the size has changed.
- auto img_fmt = vpx_image_.fmt;
- auto bit_depth = vpx_image_.bit_depth;
- vpx_img_free(&vpx_image_);
- if (&vpx_image_ != vpx_img_wrap(&vpx_image_, img_fmt,
- options.frame_size.width(),
- options.frame_size.height(), 1, nullptr)) {
- status = Status(StatusCode::kEncoderInitializationError,
- "Invalid format or frame size.");
- std::move(done_cb).Run(status);
- return;
- }
- vpx_image_.bit_depth = bit_depth;
+ status = ReallocateVpxImageIfNeeded(&vpx_image_, vpx_image_.fmt,
+ options.frame_size.width(),
+ options.frame_size.height());
+ if (!status.is_ok()) {
+ std::move(done_cb).Run(status);
+ return;
}
auto vpx_error = vpx_codec_enc_config_set(codec_.get(), &new_config);
@@ -387,8 +455,8 @@ void VpxVideoEncoder::ChangeOptions(const Options& options,
base::TimeDelta VpxVideoEncoder::GetFrameDuration(const VideoFrame& frame) {
// Frame has duration in metadata, use it.
- if (frame.metadata()->frame_duration.has_value())
- return frame.metadata()->frame_duration.value();
+ if (frame.metadata().frame_duration.has_value())
+ return frame.metadata().frame_duration.value();
// Options have framerate specified, use it.
if (options_.framerate.has_value())
diff --git a/chromium/media/video/vpx_video_encoder.h b/chromium/media/video/vpx_video_encoder.h
index cd5a17bbcfb..9a613537fe5 100644
--- a/chromium/media/video/vpx_video_encoder.h
+++ b/chromium/media/video/vpx_video_encoder.h
@@ -6,10 +6,12 @@
#define MEDIA_VIDEO_VPX_VIDEO_ENCODER_H_
#include <memory>
+#include <vector>
#include "base/callback_forward.h"
#include "media/base/media_export.h"
#include "media/base/video_encoder.h"
+#include "media/base/video_frame_pool.h"
#include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h"
#include "ui/gfx/geometry/size.h"
@@ -41,12 +43,13 @@ class MEDIA_EXPORT VpxVideoEncoder : public VideoEncoder {
std::unique_ptr<vpx_codec_ctx_t, void (*)(vpx_codec_ctx_t*)>;
vpx_codec_unique_ptr codec_;
- bool is_vp9_ = false;
vpx_codec_enc_cfg_t codec_config_ = {};
vpx_image_t vpx_image_ = {};
gfx::Size originally_configured_size_;
base::TimeDelta last_frame_timestamp_;
VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
+ VideoFramePool frame_pool_;
+ std::vector<uint8_t> resize_buf_;
Options options_;
OutputCB output_cb_;
};
diff --git a/chromium/media/webrtc/OWNERS b/chromium/media/webrtc/OWNERS
index 094bddf3394..217dfcd2975 100644
--- a/chromium/media/webrtc/OWNERS
+++ b/chromium/media/webrtc/OWNERS
@@ -1,4 +1,3 @@
olka@chromium.org
dalecurtis@chromium.org
-miu@chromium.org
ossu@chromium.org