summaryrefslogtreecommitdiff
path: root/chromium/media
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-01-20 13:40:20 +0100
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-01-22 12:41:23 +0000
commit7961cea6d1041e3e454dae6a1da660b453efd238 (patch)
treec0eeb4a9ff9ba32986289c1653d9608e53ccb444 /chromium/media
parentb7034d0803538058e5c9d904ef03cf5eab34f6ef (diff)
downloadqtwebengine-chromium-7961cea6d1041e3e454dae6a1da660b453efd238.tar.gz
BASELINE: Update Chromium to 78.0.3904.130
Change-Id: If185e0c0061b3437531c97c9c8c78f239352a68b Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/media')
-rw-r--r--chromium/media/BUILD.gn6
-rw-r--r--chromium/media/DEPS8
-rw-r--r--chromium/media/audio/BUILD.gn2
-rw-r--r--chromium/media/audio/OWNERS1
-rw-r--r--chromium/media/audio/alive_checker_unittest.cc4
-rw-r--r--chromium/media/audio/android/audio_manager_android.cc7
-rw-r--r--chromium/media/audio/android/audio_manager_android.h7
-rw-r--r--chromium/media/audio/android/opensles_output.cc26
-rw-r--r--chromium/media/audio/android/opensles_output.h11
-rw-r--r--chromium/media/audio/audio_debug_file_writer.h5
-rw-r--r--chromium/media/audio/audio_debug_file_writer_unittest.cc22
-rw-r--r--chromium/media/audio/audio_debug_recording_helper_unittest.cc6
-rw-r--r--chromium/media/audio/audio_debug_recording_manager_unittest.cc6
-rw-r--r--chromium/media/audio/audio_debug_recording_session_impl.cc4
-rw-r--r--chromium/media/audio/audio_debug_recording_session_impl_unittest.cc4
-rw-r--r--chromium/media/audio/audio_debug_recording_test.h4
-rw-r--r--chromium/media/audio/audio_device_description.cc4
-rw-r--r--chromium/media/audio/audio_device_description.h6
-rw-r--r--chromium/media/audio/audio_features.cc8
-rw-r--r--chromium/media/audio/audio_input_controller_unittest.cc4
-rw-r--r--chromium/media/audio/audio_input_device_unittest.cc8
-rw-r--r--chromium/media/audio/audio_input_ipc.h2
-rw-r--r--chromium/media/audio/audio_input_sync_writer_unittest.cc4
-rw-r--r--chromium/media/audio/audio_input_unittest.cc4
-rw-r--r--chromium/media/audio/audio_io.h3
-rw-r--r--chromium/media/audio/audio_low_latency_input_output_unittest.cc6
-rw-r--r--chromium/media/audio/audio_output_device.h2
-rw-r--r--chromium/media/audio/audio_output_device_unittest.cc53
-rw-r--r--chromium/media/audio/audio_output_ipc.h9
-rw-r--r--chromium/media/audio/audio_output_proxy_unittest.cc8
-rw-r--r--chromium/media/audio/audio_output_unittest.cc5
-rw-r--r--chromium/media/audio/audio_sink_parameters.cc5
-rw-r--r--chromium/media/audio/audio_sink_parameters.h5
-rw-r--r--chromium/media/audio/audio_source_parameters.cc3
-rw-r--r--chromium/media/audio/audio_source_parameters.h4
-rw-r--r--chromium/media/audio/audio_sync_reader_unittest.cc4
-rw-r--r--chromium/media/audio/audio_system_impl_unittest.cc5
-rw-r--r--chromium/media/audio/audio_thread_hang_monitor.cc2
-rw-r--r--chromium/media/audio/audio_thread_hang_monitor_unittest.cc8
-rw-r--r--chromium/media/audio/audio_thread_impl.cc3
-rw-r--r--chromium/media/audio/cras/cras_input.cc3
-rw-r--r--chromium/media/audio/cras/cras_unified.cc3
-rw-r--r--chromium/media/audio/fake_audio_input_stream.cc79
-rw-r--r--chromium/media/audio/fake_audio_input_stream.h13
-rw-r--r--chromium/media/audio/fuchsia/OWNERS3
-rw-r--r--chromium/media/audio/mac/audio_auhal_mac_unittest.cc4
-rw-r--r--chromium/media/audio/mac/audio_device_listener_mac_unittest.cc8
-rw-r--r--chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc12
-rw-r--r--chromium/media/audio/mac/coreaudio_dispatch_override.cc13
-rw-r--r--chromium/media/audio/power_observer_helper_unittest.cc4
-rw-r--r--chromium/media/audio/pulse/pulse_util.cc3
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.cc31
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win_unittest.cc12
-rw-r--r--chromium/media/audio/win/audio_output_win_unittest.cc4
-rw-r--r--chromium/media/audio/win/core_audio_util_win.cc2
-rw-r--r--chromium/media/base/BUILD.gn10
-rw-r--r--chromium/media/base/android/android_cdm_factory.cc3
-rw-r--r--chromium/media/base/android/android_cdm_factory.h2
-rw-r--r--chromium/media/base/android/android_overlay.cc2
-rw-r--r--chromium/media/base/android/android_overlay.h2
-rw-r--r--chromium/media/base/android/media_codec_loop.cc3
-rw-r--r--chromium/media/base/android/media_codec_loop.h2
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc3
-rw-r--r--chromium/media/base/android/media_drm_bridge.h2
-rw-r--r--chromium/media/base/android/media_drm_bridge_factory.cc3
-rw-r--r--chromium/media/base/android/media_drm_bridge_factory.h2
-rw-r--r--chromium/media/base/android/media_drm_bridge_unittest.cc4
-rw-r--r--chromium/media/base/android/media_drm_storage_bridge.cc2
-rw-r--r--chromium/media/base/android/media_drm_storage_bridge.h2
-rw-r--r--chromium/media/base/android/media_player_bridge.cc49
-rw-r--r--chromium/media/base/android/media_player_bridge.h19
-rw-r--r--chromium/media/base/android/media_player_bridge_unittest.cc14
-rw-r--r--chromium/media/base/android/media_resource_getter.h5
-rw-r--r--chromium/media/base/android/media_service_throttler_unittest.cc4
-rw-r--r--chromium/media/base/android/mock_android_overlay.cc2
-rw-r--r--chromium/media/base/android/mock_android_overlay.h2
-rw-r--r--chromium/media/base/android/test_destruction_observable.cc2
-rw-r--r--chromium/media/base/android/test_destruction_observable.h2
-rw-r--r--chromium/media/base/audio_parameters.h21
-rw-r--r--chromium/media/base/audio_renderer_mixer_input_unittest.cc16
-rw-r--r--chromium/media/base/audio_renderer_mixer_unittest.cc4
-rw-r--r--chromium/media/base/bind_to_current_loop_unittest.cc4
-rw-r--r--chromium/media/base/bitstream_buffer.cc12
-rw-r--r--chromium/media/base/bitstream_buffer.h8
-rw-r--r--chromium/media/base/callback_registry_unittest.cc30
-rw-r--r--chromium/media/base/fake_audio_worker_unittest.cc117
-rw-r--r--chromium/media/base/fake_demuxer_stream_unittest.cc4
-rw-r--r--chromium/media/base/fallback_video_decoder_unittest.cc4
-rw-r--r--chromium/media/base/media_resource.cc8
-rw-r--r--chromium/media/base/media_resource.h2
-rw-r--r--chromium/media/base/media_switches.cc38
-rw-r--r--chromium/media/base/media_switches.h11
-rw-r--r--chromium/media/base/media_url_demuxer.cc6
-rw-r--r--chromium/media/base/media_url_demuxer.h3
-rw-r--r--chromium/media/base/media_url_demuxer_unittest.cc14
-rw-r--r--chromium/media/base/media_url_params.cc23
-rw-r--r--chromium/media/base/media_url_params.h14
-rw-r--r--chromium/media/base/mock_filters.h3
-rw-r--r--chromium/media/base/null_video_sink_unittest.cc6
-rw-r--r--chromium/media/base/pipeline_impl_unittest.cc22
-rw-r--r--chromium/media/base/renderer_client.cc13
-rw-r--r--chromium/media/base/renderer_client.h8
-rw-r--r--chromium/media/base/renderer_factory_selector.cc11
-rw-r--r--chromium/media/base/renderer_factory_selector.h19
-rw-r--r--chromium/media/base/serial_runner_unittest.cc8
-rw-r--r--chromium/media/base/simple_watch_timer.cc66
-rw-r--r--chromium/media/base/simple_watch_timer.h56
-rw-r--r--chromium/media/base/text_renderer_unittest.cc8
-rw-r--r--chromium/media/base/user_input_monitor_unittest.cc18
-rw-r--r--chromium/media/base/video_codecs.h2
-rw-r--r--chromium/media/base/video_frame.cc246
-rw-r--r--chromium/media/base/video_frame.h136
-rw-r--r--chromium/media/base/video_frame_layout.cc1
-rw-r--r--chromium/media/base/video_frame_unittest.cc58
-rw-r--r--chromium/media/base/video_thumbnail_decoder_unittest.cc4
-rw-r--r--chromium/media/base/video_types.cc5
-rw-r--r--chromium/media/base/video_types.h10
-rw-r--r--chromium/media/blink/BUILD.gn4
-rw-r--r--chromium/media/blink/cdm_result_promise.h35
-rw-r--r--chromium/media/blink/multibuffer_data_source.cc7
-rw-r--r--chromium/media/blink/multibuffer_data_source_unittest.cc22
-rw-r--r--chromium/media/blink/new_session_cdm_result_promise.cc2
-rw-r--r--chromium/media/blink/run_all_unittests.cc6
-rw-r--r--chromium/media/blink/video_decode_stats_reporter.cc2
-rw-r--r--chromium/media/blink/video_decode_stats_reporter.h2
-rw-r--r--chromium/media/blink/video_decode_stats_reporter_unittest.cc4
-rw-r--r--chromium/media/blink/watch_time_component.h2
-rw-r--r--chromium/media/blink/watch_time_reporter.h4
-rw-r--r--chromium/media/blink/watch_time_reporter_unittest.cc8
-rw-r--r--chromium/media/blink/webcontentdecryptionmodule_impl.cc16
-rw-r--r--chromium/media/blink/webcontentdecryptionmodulesession_impl.cc12
-rw-r--r--chromium/media/blink/webmediaplayer_impl.cc33
-rw-r--r--chromium/media/blink/webmediaplayer_impl.h12
-rw-r--r--chromium/media/blink/webmediaplayer_impl_unittest.cc16
-rw-r--r--chromium/media/blink/webmediaplayer_params.h2
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc40
-rw-r--r--chromium/media/capabilities/learning_helper.cc6
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl.cc78
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl.h16
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc48
-rw-r--r--chromium/media/capture/BUILD.gn25
-rw-r--r--chromium/media/capture/mojom/BUILD.gn8
-rw-r--r--chromium/media/capture/mojom/image_capture.mojom8
-rw-r--r--chromium/media/capture/mojom/image_capture_types.cc2
-rw-r--r--chromium/media/capture/mojom/video_capture.mojom29
-rw-r--r--chromium/media/capture/mojom/video_capture_types.mojom10
-rw-r--r--chromium/media/capture/mojom/video_capture_types.typemap26
-rw-r--r--chromium/media/capture/mojom/video_capture_types_for_blink.typemap32
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.cc13
-rw-r--r--chromium/media/capture/run_all_unittests.cc4
-rw-r--r--chromium/media/capture/video/android/video_capture_device_android.cc3
-rw-r--r--chromium/media/capture/video/android/video_capture_device_android.h2
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.cc3
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.h4
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc80
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h65
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.cc256
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_impl.h146
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc51
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h50
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_helper_impl.cc21
-rw-r--r--chromium/media/capture/video/chromeos/camera_app_helper_impl.h38
-rw-r--r--chromium/media/capture/video/chromeos/camera_buffer_factory.h2
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc71
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.h12
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc23
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc34
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.h8
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc4
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc2
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h2
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc8
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.cc1
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.h2
-rw-r--r--chromium/media/capture/video/chromeos/cros_image_capture_impl.cc61
-rw-r--r--chromium/media/capture/video/chromeos/cros_image_capture_impl.h56
-rw-r--r--chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc11
-rw-r--r--chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h4
-rw-r--r--chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc57
-rw-r--r--chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.h5
-rw-r--r--chromium/media/capture/video/chromeos/mock_camera_module.h4
-rw-r--r--chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h2
-rw-r--r--chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom64
-rw-r--r--chromium/media/capture/video/chromeos/mojom/BUILD.gn (renamed from chromium/media/capture/video/chromeos/mojo/BUILD.gn)9
-rw-r--r--chromium/media/capture/video/chromeos/mojom/OWNERS (renamed from chromium/media/capture/video/chromeos/mojo/OWNERS)0
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera3.mojom (renamed from chromium/media/capture/video/chromeos/mojo/camera3.mojom)2
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_app.mojom135
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_common.mojom (renamed from chromium/media/capture/video/chromeos/mojo/camera_common.mojom)4
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_metadata.mojom (renamed from chromium/media/capture/video/chromeos/mojo/camera_metadata.mojom)2
-rw-r--r--chromium/media/capture/video/chromeos/mojom/camera_metadata_tags.mojom (renamed from chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom)0
-rw-r--r--chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom (renamed from chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom)2
-rw-r--r--chromium/media/capture/video/chromeos/pixel_format_utils.h2
-rw-r--r--chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc111
-rw-r--r--chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h85
-rw-r--r--chromium/media/capture/video/chromeos/reprocess_manager.cc260
-rw-r--r--chromium/media/capture/video/chromeos/reprocess_manager.h162
-rw-r--r--chromium/media/capture/video/chromeos/request_builder.h2
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.cc51
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.h21
-rw-r--r--chromium/media/capture/video/chromeos/request_manager_unittest.cc18
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc10
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.h4
-rw-r--r--chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h2
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc14
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h11
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc43
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h22
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc111
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h12
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.cc84
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.h9
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.cc29
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.h10
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_factory.cc7
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_unittest.cc27
-rw-r--r--chromium/media/capture/video/file_video_capture_device.cc4
-rw-r--r--chromium/media/capture/video/file_video_capture_device_unittest.cc4
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.cc131
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate.h2
-rw-r--r--chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc4
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc5
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm7
-rw-r--r--chromium/media/capture/video/shared_memory_buffer_tracker.cc55
-rw-r--r--chromium/media/capture/video/shared_memory_buffer_tracker.h14
-rw-r--r--chromium/media/capture/video/shared_memory_handle_provider.cc169
-rw-r--r--chromium/media/capture/video/shared_memory_handle_provider.h104
-rw-r--r--chromium/media/capture/video/shared_memory_handle_provider_unittest.cc78
-rw-r--r--chromium/media/capture/video/video_capture_buffer_handle.cc2
-rw-r--r--chromium/media/capture/video/video_capture_buffer_handle.h2
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool.h16
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.cc35
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.h8
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker.h12
-rw-r--r--chromium/media/capture/video/video_capture_device.h17
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc28
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.cc6
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h4
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc7
-rw-r--r--chromium/media/capture/video/video_capture_system.h10
-rw-r--r--chromium/media/capture/video/video_capture_system_impl.cc21
-rw-r--r--chromium/media/capture/video/video_capture_system_impl.h9
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc5
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.h2
-rw-r--r--chromium/media/capture/video_capture_types.h5
-rw-r--r--chromium/media/cast/cast_config.h4
-rw-r--r--chromium/media/cast/net/udp_packet_pipe_unittest.cc12
-rw-r--r--chromium/media/cast/net/udp_transport_unittest.cc11
-rw-r--r--chromium/media/cast/sender/external_video_encoder.cc115
-rw-r--r--chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc13
-rw-r--r--chromium/media/cast/sender/fake_video_encode_accelerator_factory.h18
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder_unittest.cc10
-rw-r--r--chromium/media/cdm/BUILD.gn9
-rw-r--r--chromium/media/cdm/aes_decryptor_unittest.cc4
-rw-r--r--chromium/media/cdm/cdm_adapter_unittest.cc6
-rw-r--r--chromium/media/cdm/cdm_paths.cc62
-rw-r--r--chromium/media/cdm/cdm_paths_unittest.cc110
-rw-r--r--chromium/media/cdm/external_clear_key_test_helper.cc6
-rw-r--r--chromium/media/cdm/library_cdm/cdm_paths.gni24
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.cc109
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h29
-rw-r--r--chromium/media/device_monitors/device_monitor_udev.cc5
-rw-r--r--chromium/media/device_monitors/system_message_window_win_unittest.cc6
-rw-r--r--chromium/media/filters/BUILD.gn12
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.cc3
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.h2
-rw-r--r--chromium/media/filters/android/video_frame_extractor.cc2
-rw-r--r--chromium/media/filters/android/video_frame_extractor.h2
-rw-r--r--chromium/media/filters/android/video_frame_extractor_unittest.cc4
-rw-r--r--chromium/media/filters/aom_video_decoder_unittest.cc4
-rw-r--r--chromium/media/filters/audio_decoder_stream_unittest.cc4
-rw-r--r--chromium/media/filters/audio_decoder_unittest.cc8
-rw-r--r--chromium/media/filters/chunk_demuxer_unittest.cc14
-rw-r--r--chromium/media/filters/dav1d_video_decoder_unittest.cc4
-rw-r--r--chromium/media/filters/decoder_selector_unittest.cc10
-rw-r--r--chromium/media/filters/decrypting_audio_decoder_unittest.cc6
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.cc69
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.h21
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream_unittest.cc6
-rw-r--r--chromium/media/filters/decrypting_media_resource_unittest.cc18
-rw-r--r--chromium/media/filters/decrypting_video_decoder_unittest.cc6
-rw-r--r--chromium/media/filters/demuxer_perftest.cc4
-rw-r--r--chromium/media/filters/fake_video_decoder_unittest.cc5
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.cc5
-rw-r--r--chromium/media/filters/ffmpeg_demuxer_unittest.cc31
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder_unittest.cc4
-rw-r--r--chromium/media/filters/frame_processor_unittest.cc4
-rw-r--r--chromium/media/filters/fuchsia/OWNERS3
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder.cc823
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder.h12
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc180
-rw-r--r--chromium/media/filters/offloading_video_decoder.cc4
-rw-r--r--chromium/media/filters/offloading_video_decoder_unittest.cc10
-rw-r--r--chromium/media/filters/pipeline_controller_unittest.cc4
-rw-r--r--chromium/media/filters/stream_parser_factory.cc67
-rw-r--r--chromium/media/filters/stream_parser_factory.h13
-rw-r--r--chromium/media/filters/video_decoder_stream_unittest.cc10
-rw-r--r--chromium/media/filters/vp9_parser.cc6
-rw-r--r--chromium/media/filters/vp9_parser.h4
-rw-r--r--chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc6
-rw-r--r--chromium/media/filters/vp9_parser_fuzzertest.cc2
-rw-r--r--chromium/media/filters/vp9_parser_unittest.cc12
-rw-r--r--chromium/media/filters/vpx_video_decoder_unittest.cc4
-rw-r--r--chromium/media/fuchsia/OWNERS3
-rw-r--r--chromium/media/fuchsia/cdm/BUILD.gn22
-rw-r--r--chromium/media/fuchsia/cdm/DEPS4
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.cc398
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.h94
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc (renamed from chromium/media/cdm/fuchsia/fuchsia_cdm_factory.cc)34
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h (renamed from chromium/media/cdm/fuchsia/fuchsia_cdm_factory.h)18
-rw-r--r--chromium/media/fuchsia/cdm/service/BUILD.gn23
-rw-r--r--chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc282
-rw-r--r--chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h87
-rw-r--r--chromium/media/fuchsia/mojom/BUILD.gn11
-rw-r--r--chromium/media/fuchsia/mojom/DEPS3
-rw-r--r--chromium/media/fuchsia/mojom/OWNERS (renamed from chromium/media/mojo/interfaces/OWNERS)2
-rw-r--r--chromium/media/fuchsia/mojom/cdm_request.typemap16
-rw-r--r--chromium/media/fuchsia/mojom/cdm_request_mojom_traits.h24
-rw-r--r--chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom21
-rw-r--r--chromium/media/fuchsia/mojom/typemaps.gni5
-rw-r--r--chromium/media/gpu/BUILD.gn29
-rw-r--r--chromium/media/gpu/OWNERS9
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl.cc4
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl.h2
-rw-r--r--chromium/media/gpu/android/codec_allocator.cc4
-rw-r--r--chromium/media/gpu/android/codec_allocator_unittest.cc18
-rw-r--r--chromium/media/gpu/android/codec_buffer_wait_coordinator.cc86
-rw-r--r--chromium/media/gpu/android/codec_buffer_wait_coordinator.h66
-rw-r--r--chromium/media/gpu/android/codec_image.cc123
-rw-r--r--chromium/media/gpu/android/codec_image.h61
-rw-r--r--chromium/media/gpu/android/codec_image_group.cc17
-rw-r--r--chromium/media/gpu/android/codec_image_group.h5
-rw-r--r--chromium/media/gpu/android/codec_image_group_unittest.cc4
-rw-r--r--chromium/media/gpu/android/codec_image_unittest.cc88
-rw-r--r--chromium/media/gpu/android/codec_surface_bundle.cc26
-rw-r--r--chromium/media/gpu/android/codec_surface_bundle.h17
-rw-r--r--chromium/media/gpu/android/codec_wrapper_unittest.cc4
-rw-r--r--chromium/media/gpu/android/direct_shared_image_video_provider.cc15
-rw-r--r--chromium/media/gpu/android/direct_shared_image_video_provider.h10
-rw-r--r--chromium/media/gpu/android/image_reader_gl_owner.cc485
-rw-r--r--chromium/media/gpu/android/image_reader_gl_owner.h137
-rw-r--r--chromium/media/gpu/android/image_reader_gl_owner_unittest.cc205
-rw-r--r--chromium/media/gpu/android/maybe_render_early_manager.cc12
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.cc52
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.h15
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder_unittest.cc22
-rw-r--r--chromium/media/gpu/android/mock_abstract_texture.cc19
-rw-r--r--chromium/media/gpu/android/mock_abstract_texture.h45
-rw-r--r--chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.cc33
-rw-r--r--chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.h39
-rw-r--r--chromium/media/gpu/android/mock_texture_owner.cc47
-rw-r--r--chromium/media/gpu/android/mock_texture_owner.h66
-rw-r--r--chromium/media/gpu/android/promotion_hint_aggregator_impl.cc3
-rw-r--r--chromium/media/gpu/android/promotion_hint_aggregator_impl.h2
-rw-r--r--chromium/media/gpu/android/shared_image_video.cc524
-rw-r--r--chromium/media/gpu/android/shared_image_video.h98
-rw-r--r--chromium/media/gpu/android/shared_image_video_provider.h2
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner.cc164
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner.h72
-rw-r--r--chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc128
-rw-r--r--chromium/media/gpu/android/texture_owner.cc92
-rw-r--r--chromium/media/gpu/android/texture_owner.h153
-rw-r--r--chromium/media/gpu/android/texture_pool_unittest.cc4
-rw-r--r--chromium/media/gpu/android/video_frame_factory.h4
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.cc76
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.h16
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl_unittest.cc26
-rw-r--r--chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc61
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.cc18
-rw-r--r--chromium/media/gpu/image_processor.cc11
-rw-r--r--chromium/media/gpu/image_processor.h13
-rw-r--r--chromium/media/gpu/image_processor_test.cc144
-rw-r--r--chromium/media/gpu/ipc/service/BUILD.gn1
-rw-r--r--chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc4
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc4
-rw-r--r--chromium/media/gpu/linux/mailbox_video_frame_converter.cc234
-rw-r--r--chromium/media/gpu/linux/mailbox_video_frame_converter.h82
-rw-r--r--chromium/media/gpu/linux/platform_video_frame_pool_unittest.cc23
-rw-r--r--chromium/media/gpu/linux/video_decoder_pipeline.cc295
-rw-r--r--chromium/media/gpu/linux/video_decoder_pipeline.h96
-rw-r--r--chromium/media/gpu/v4l2/BUILD.gn11
-rw-r--r--chromium/media/gpu/v4l2/tegra_v4l2_device.cc25
-rw-r--r--chromium/media/gpu/v4l2/tegra_v4l2_device.h5
-rw-r--r--chromium/media/gpu/v4l2/v4l2_decode_surface.cc69
-rw-r--r--chromium/media/gpu/v4l2/v4l2_decode_surface.h56
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.cc92
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.h23
-rw-r--r--chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc31
-rw-r--r--chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.cc484
-rw-r--r--chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.h77
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor.cc11
-rw-r--r--chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc5
-rw-r--r--chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc503
-rw-r--r--chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h60
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc1167
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h115
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc101
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h22
-rw-r--r--chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vda_helpers.cc152
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vda_helpers.h58
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc123
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc603
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h28
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vp8_accelerator.cc103
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.cc261
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h44
-rw-r--r--chromium/media/gpu/vaapi/BUILD.gn12
-rw-r--r--chromium/media/gpu/vaapi/OWNERS1
-rw-r--r--chromium/media/gpu/vaapi/h264_encoder.cc23
-rw-r--r--chromium/media/gpu/vaapi/h264_encoder.h2
-rw-r--r--chromium/media/gpu/vaapi/h264_encoder_unittest.cc95
-rw-r--r--chromium/media/gpu/vaapi/test_utils.cc7
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc211
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h78
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker_unittest.cc282
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decoder.cc20
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decoder.h21
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.cc127
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.h65
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc42
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h1
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc130
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc108
-rw-r--r--chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc511
-rw-r--r--chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h61
-rw-r--r--chromium/media/gpu/vaapi/vaapi_utils_unittest.cc15
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc15
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc4
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.cc54
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.h17
-rw-r--r--chromium/media/gpu/vaapi/vaapi_webp_decoder.cc4
-rw-r--r--chromium/media/gpu/vaapi/vaapi_webp_decoder.h1
-rw-r--r--chromium/media/gpu/vaapi/vaapi_webp_decoder_unittest.cc51
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.cc124
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.h76
-rw-r--r--chromium/media/gpu/vaapi/vp8_encoder.cc10
-rw-r--r--chromium/media/gpu/video_decode_accelerator_perf_tests.cc36
-rw-r--r--chromium/media/gpu/video_decode_accelerator_tests.cc66
-rw-r--r--chromium/media/gpu/video_decode_accelerator_unittest.cc46
-rw-r--r--chromium/media/gpu/video_encode_accelerator_unittest.cc193
-rw-r--r--chromium/media/gpu/video_frame_converter.cc33
-rw-r--r--chromium/media/gpu/video_frame_converter.h55
-rw-r--r--chromium/media/gpu/video_frame_mapper_factory.cc8
-rw-r--r--chromium/media/gpu/vp9_decoder.cc4
-rw-r--r--chromium/media/gpu/windows/d3d11_cdm_proxy.cc7
-rw-r--r--chromium/media/gpu/windows/d3d11_cdm_proxy.h2
-rw-r--r--chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc4
-rw-r--r--chromium/media/gpu/windows/d3d11_decryptor.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_decryptor.h2
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.h1
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector.cc5
-rw-r--r--chromium/media/gpu/windows/d3d11_video_context_wrapper.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.cc34
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.h5
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_impl.cc3
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_impl.h2
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc4
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.h1
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc5
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h2
-rw-r--r--chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc3
-rw-r--r--chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h2
-rw-r--r--chromium/media/learning/common/BUILD.gn3
-rw-r--r--chromium/media/learning/common/feature_dictionary.cc38
-rw-r--r--chromium/media/learning/common/feature_dictionary.h52
-rw-r--r--chromium/media/learning/common/feature_dictionary_unittest.cc45
-rw-r--r--chromium/media/learning/common/learning_session.h6
-rw-r--r--chromium/media/learning/common/learning_task.cc7
-rw-r--r--chromium/media/learning/common/learning_task.h3
-rw-r--r--chromium/media/learning/common/learning_task_controller.h3
-rw-r--r--chromium/media/learning/impl/distribution_reporter_unittest.cc4
-rw-r--r--chromium/media/learning/impl/extra_trees_trainer_unittest.cc6
-rw-r--r--chromium/media/learning/impl/learning_fuzzertest.cc9
-rw-r--r--chromium/media/learning/impl/learning_session_impl.cc22
-rw-r--r--chromium/media/learning/impl/learning_session_impl.h7
-rw-r--r--chromium/media/learning/impl/learning_session_impl_unittest.cc40
-rw-r--r--chromium/media/learning/impl/learning_task_controller_helper_unittest.cc28
-rw-r--r--chromium/media/learning/impl/learning_task_controller_impl.cc4
-rw-r--r--chromium/media/learning/impl/learning_task_controller_impl.h1
-rw-r--r--chromium/media/learning/impl/learning_task_controller_impl_unittest.cc8
-rw-r--r--chromium/media/learning/impl/lookup_table_trainer_unittest.cc6
-rw-r--r--chromium/media/learning/impl/random_tree_trainer_unittest.cc6
-rw-r--r--chromium/media/learning/mojo/mojo_learning_task_controller_service_unittest.cc8
-rw-r--r--chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.cc4
-rw-r--r--chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h1
-rw-r--r--chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc10
-rw-r--r--chromium/media/learning/mojo/public/mojom/learning_types.typemap8
-rw-r--r--chromium/media/media_options.gni13
-rw-r--r--chromium/media/midi/midi_input_port_android.cc1
-rw-r--r--chromium/media/midi/midi_input_port_android.h1
-rw-r--r--chromium/media/midi/midi_manager_android.cc7
-rw-r--r--chromium/media/midi/midi_manager_android.h7
-rw-r--r--chromium/media/midi/midi_manager_unittest.cc6
-rw-r--r--chromium/media/midi/midi_message_queue.h11
-rw-r--r--chromium/media/midi/midi_service.mojom3
-rw-r--r--chromium/media/midi/task_service.cc4
-rw-r--r--chromium/media/midi/usb_midi_device_android.cc1
-rw-r--r--chromium/media/midi/usb_midi_device_android.h1
-rw-r--r--chromium/media/midi/usb_midi_device_factory_android.cc3
-rw-r--r--chromium/media/midi/usb_midi_device_factory_android.h3
-rw-r--r--chromium/media/mojo/BUILD.gn6
-rw-r--r--chromium/media/mojo/clients/BUILD.gn4
-rw-r--r--chromium/media/mojo/clients/mojo_android_overlay.h2
-rw-r--r--chromium/media/mojo/clients/mojo_android_overlay_unittest.cc4
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.h4
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc8
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.cc4
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.h2
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_factory.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_unittest.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_decoder_factory.cc4
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor.h2
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor_unittest.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_demuxer_stream_impl.h2
-rw-r--r--chromium/media/mojo/clients/mojo_media_log_service.h2
-rw-r--r--chromium/media/mojo/clients/mojo_renderer.cc5
-rw-r--r--chromium/media/mojo/clients/mojo_renderer.h2
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.cc20
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.h11
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_unittest.cc4
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.cc39
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.h9
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator.cc41
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator.h2
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc23
-rw-r--r--chromium/media/mojo/common/BUILD.gn8
-rw-r--r--chromium/media/mojo/common/OWNERS4
-rw-r--r--chromium/media/mojo/common/media_type_converters.h4
-rw-r--r--chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc10
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter.h2
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc30
-rw-r--r--chromium/media/mojo/common/mojo_shared_buffer_video_frame.h2
-rw-r--r--chromium/media/mojo/interfaces/cdm_key_information.typemap23
-rw-r--r--chromium/media/mojo/interfaces/hdr_metadata.typemap11
-rw-r--r--chromium/media/mojo/interfaces/media_types.typemap69
-rw-r--r--chromium/media/mojo/interfaces/pipeline_status.typemap11
-rw-r--r--chromium/media/mojo/interfaces/typemaps.gni23
-rw-r--r--chromium/media/mojo/interfaces/video_color_space.typemap17
-rw-r--r--chromium/media/mojo/interfaces/video_decoder.typemap30
-rw-r--r--chromium/media/mojo/interfaces/video_encode_accelerator.typemap35
-rw-r--r--chromium/media/mojo/mojom/BUILD.gn (renamed from chromium/media/mojo/interfaces/BUILD.gn)17
-rw-r--r--chromium/media/mojo/mojom/OWNERS6
-rw-r--r--chromium/media/mojo/mojom/android_overlay.mojom (renamed from chromium/media/mojo/interfaces/android_overlay.mojom)4
-rw-r--r--chromium/media/mojo/mojom/audio_data_pipe.mojom (renamed from chromium/media/mojo/interfaces/audio_data_pipe.mojom)0
-rw-r--r--chromium/media/mojo/mojom/audio_decoder.mojom (renamed from chromium/media/mojo/interfaces/audio_decoder.mojom)2
-rw-r--r--chromium/media/mojo/mojom/audio_decoder_config.typemap (renamed from chromium/media/mojo/interfaces/audio_decoder_config.typemap)7
-rw-r--r--chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/audio_decoder_config_struct_traits.cc)2
-rw-r--r--chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h (renamed from chromium/media/mojo/interfaces/audio_decoder_config_struct_traits.h)10
-rw-r--r--chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc (renamed from chromium/media/mojo/interfaces/audio_decoder_config_struct_traits_unittest.cc)2
-rw-r--r--chromium/media/mojo/mojom/audio_input_stream.mojom (renamed from chromium/media/mojo/interfaces/audio_input_stream.mojom)4
-rw-r--r--chromium/media/mojo/mojom/audio_logging.mojom (renamed from chromium/media/mojo/interfaces/audio_logging.mojom)2
-rw-r--r--chromium/media/mojo/mojom/audio_output_stream.mojom (renamed from chromium/media/mojo/interfaces/audio_output_stream.mojom)6
-rw-r--r--chromium/media/mojo/mojom/audio_parameters.mojom (renamed from chromium/media/mojo/interfaces/audio_parameters.mojom)0
-rw-r--r--chromium/media/mojo/mojom/audio_parameters.typemap (renamed from chromium/media/mojo/interfaces/audio_parameters.typemap)6
-rw-r--r--chromium/media/mojo/mojom/cast_application_media_info_manager.mojom (renamed from chromium/media/mojo/interfaces/cast_application_media_info_manager.mojom)0
-rw-r--r--chromium/media/mojo/mojom/cdm_key_information.typemap22
-rw-r--r--chromium/media/mojo/mojom/cdm_key_information_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/cdm_key_information_mojom_traits.cc)2
-rw-r--r--chromium/media/mojo/mojom/cdm_key_information_mojom_traits.h (renamed from chromium/media/mojo/interfaces/cdm_key_information_mojom_traits.h)8
-rw-r--r--chromium/media/mojo/mojom/cdm_key_information_mojom_traits_unittest.cc (renamed from chromium/media/mojo/interfaces/cdm_key_information_mojom_traits_unittest.cc)2
-rw-r--r--chromium/media/mojo/mojom/cdm_proxy.mojom (renamed from chromium/media/mojo/interfaces/cdm_proxy.mojom)0
-rw-r--r--chromium/media/mojo/mojom/cdm_proxy.typemap (renamed from chromium/media/mojo/interfaces/cdm_proxy.typemap)10
-rw-r--r--chromium/media/mojo/mojom/cdm_service.mojom (renamed from chromium/media/mojo/interfaces/cdm_service.mojom)2
-rw-r--r--chromium/media/mojo/mojom/cdm_storage.mojom (renamed from chromium/media/mojo/interfaces/cdm_storage.mojom)3
-rw-r--r--chromium/media/mojo/mojom/constants.mojom (renamed from chromium/media/mojo/interfaces/constants.mojom)0
-rw-r--r--chromium/media/mojo/mojom/content_decryption_module.mojom (renamed from chromium/media/mojo/interfaces/content_decryption_module.mojom)2
-rw-r--r--chromium/media/mojo/mojom/content_decryption_module.typemap (renamed from chromium/media/mojo/interfaces/content_decryption_module.typemap)14
-rw-r--r--chromium/media/mojo/mojom/decryptor.mojom (renamed from chromium/media/mojo/interfaces/decryptor.mojom)2
-rw-r--r--chromium/media/mojo/mojom/decryptor.typemap (renamed from chromium/media/mojo/interfaces/decryptor.typemap)6
-rw-r--r--chromium/media/mojo/mojom/demuxer_stream.mojom (renamed from chromium/media/mojo/interfaces/demuxer_stream.mojom)2
-rw-r--r--chromium/media/mojo/mojom/demuxer_stream.typemap (renamed from chromium/media/mojo/interfaces/demuxer_stream.typemap)6
-rw-r--r--chromium/media/mojo/mojom/display_media_information.mojom (renamed from chromium/media/mojo/interfaces/display_media_information.mojom)0
-rw-r--r--chromium/media/mojo/mojom/encryption_scheme.typemap (renamed from chromium/media/mojo/interfaces/encryption_scheme.typemap)10
-rw-r--r--chromium/media/mojo/mojom/encryption_scheme_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/encryption_scheme_struct_traits.cc)2
-rw-r--r--chromium/media/mojo/mojom/encryption_scheme_mojom_traits.h (renamed from chromium/media/mojo/interfaces/encryption_scheme_struct_traits.h)8
-rw-r--r--chromium/media/mojo/mojom/encryption_scheme_mojom_traits_unittest.cc (renamed from chromium/media/mojo/interfaces/encryption_scheme_struct_traits_unittest.cc)2
-rw-r--r--chromium/media/mojo/mojom/hdr_metadata.typemap11
-rw-r--r--chromium/media/mojo/mojom/hdr_metadata_mojom_traits.h (renamed from chromium/media/mojo/interfaces/hdr_metadata_struct_traits.h)8
-rw-r--r--chromium/media/mojo/mojom/interface_factory.mojom (renamed from chromium/media/mojo/interfaces/interface_factory.mojom)14
-rw-r--r--chromium/media/mojo/mojom/key_system_support.mojom (renamed from chromium/media/mojo/interfaces/key_system_support.mojom)4
-rw-r--r--chromium/media/mojo/mojom/media_drm_storage.mojom (renamed from chromium/media/mojo/interfaces/media_drm_storage.mojom)0
-rw-r--r--chromium/media/mojo/mojom/media_drm_storage.typemap (renamed from chromium/media/mojo/interfaces/media_drm_storage.typemap)4
-rw-r--r--chromium/media/mojo/mojom/media_log.mojom (renamed from chromium/media/mojo/interfaces/media_log.mojom)2
-rw-r--r--chromium/media/mojo/mojom/media_metrics_provider.mojom (renamed from chromium/media/mojo/interfaces/media_metrics_provider.mojom)13
-rw-r--r--chromium/media/mojo/mojom/media_service.mojom (renamed from chromium/media/mojo/interfaces/media_service.mojom)2
-rw-r--r--chromium/media/mojo/mojom/media_types.mojom (renamed from chromium/media/mojo/interfaces/media_types.mojom)4
-rw-r--r--chromium/media/mojo/mojom/media_types.typemap69
-rw-r--r--chromium/media/mojo/mojom/mirror_service_remoting.mojom (renamed from chromium/media/mojo/interfaces/mirror_service_remoting.mojom)2
-rw-r--r--chromium/media/mojo/mojom/output_protection.mojom (renamed from chromium/media/mojo/interfaces/output_protection.mojom)0
-rw-r--r--chromium/media/mojo/mojom/pipeline_status.typemap11
-rw-r--r--chromium/media/mojo/mojom/pipeline_status_mojom_traits.h (renamed from chromium/media/mojo/interfaces/pipeline_status_struct_traits.h)8
-rw-r--r--chromium/media/mojo/mojom/platform_verification.mojom (renamed from chromium/media/mojo/interfaces/platform_verification.mojom)0
-rw-r--r--chromium/media/mojo/mojom/provision_fetcher.mojom (renamed from chromium/media/mojo/interfaces/provision_fetcher.mojom)0
-rw-r--r--chromium/media/mojo/mojom/remoting.mojom (renamed from chromium/media/mojo/interfaces/remoting.mojom)2
-rw-r--r--chromium/media/mojo/mojom/remoting_common.mojom (renamed from chromium/media/mojo/interfaces/remoting_common.mojom)0
-rw-r--r--chromium/media/mojo/mojom/renderer.mojom (renamed from chromium/media/mojo/interfaces/renderer.mojom)8
-rw-r--r--chromium/media/mojo/mojom/renderer_extensions.mojom (renamed from chromium/media/mojo/interfaces/renderer_extensions.mojom)4
-rw-r--r--chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/supported_video_decoder_config_struct_traits.cc)2
-rw-r--r--chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h (renamed from chromium/media/mojo/interfaces/supported_video_decoder_config_struct_traits.h)12
-rw-r--r--chromium/media/mojo/mojom/traits_test_service.mojom (renamed from chromium/media/mojo/interfaces/traits_test_service.mojom)2
-rw-r--r--chromium/media/mojo/mojom/typemaps.gni23
-rw-r--r--chromium/media/mojo/mojom/video_color_space.typemap17
-rw-r--r--chromium/media/mojo/mojom/video_color_space_mojom_traits.h (renamed from chromium/media/mojo/interfaces/video_color_space_struct_traits.h)8
-rw-r--r--chromium/media/mojo/mojom/video_decode_perf_history.mojom (renamed from chromium/media/mojo/interfaces/video_decode_perf_history.mojom)4
-rw-r--r--chromium/media/mojo/mojom/video_decode_stats_recorder.mojom (renamed from chromium/media/mojo/interfaces/video_decode_stats_recorder.mojom)4
-rw-r--r--chromium/media/mojo/mojom/video_decoder.mojom (renamed from chromium/media/mojo/interfaces/video_decoder.mojom)8
-rw-r--r--chromium/media/mojo/mojom/video_decoder.typemap30
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config.typemap (renamed from chromium/media/mojo/interfaces/video_decoder_config.typemap)11
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc)2
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h (renamed from chromium/media/mojo/interfaces/video_decoder_config_struct_traits.h)18
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc (renamed from chromium/media/mojo/interfaces/video_decoder_config_struct_traits_unittest.cc)2
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator.mojom (renamed from chromium/media/mojo/interfaces/video_encode_accelerator.mojom)4
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator.typemap35
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/video_encode_accelerator_mojom_traits.cc)2
-rw-r--r--chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h (renamed from chromium/media/mojo/interfaces/video_encode_accelerator_mojom_traits.h)12
-rw-r--r--chromium/media/mojo/mojom/video_frame.typemap (renamed from chromium/media/mojo/interfaces/video_frame.typemap)12
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/video_frame_struct_traits.cc)13
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.h (renamed from chromium/media/mojo/interfaces/video_frame_struct_traits.h)12
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc (renamed from chromium/media/mojo/interfaces/video_frame_struct_traits_unittest.cc)8
-rw-r--r--chromium/media/mojo/mojom/video_transformation_mojom_traits.cc (renamed from chromium/media/mojo/interfaces/video_transformation_mojom_traits.cc)2
-rw-r--r--chromium/media/mojo/mojom/video_transformation_mojom_traits.h (renamed from chromium/media/mojo/interfaces/video_transformation_mojom_traits.h)8
-rw-r--r--chromium/media/mojo/mojom/watch_time_recorder.mojom (renamed from chromium/media/mojo/interfaces/watch_time_recorder.mojom)4
-rw-r--r--chromium/media/mojo/services/BUILD.gn15
-rw-r--r--chromium/media/mojo/services/android_mojo_media_client.cc4
-rw-r--r--chromium/media/mojo/services/cdm_manifest.cc4
-rw-r--r--chromium/media/mojo/services/cdm_service.h4
-rw-r--r--chromium/media/mojo/services/cdm_service_unittest.cc10
-rw-r--r--chromium/media/mojo/services/deferred_destroy_strong_binding_set_unittest.cc4
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.cc15
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.cc2
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.h14
-rw-r--r--chromium/media/mojo/services/media_manifest.cc4
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.cc44
-rw-r--r--chromium/media/mojo/services/media_metrics_provider.h14
-rw-r--r--chromium/media/mojo/services/media_metrics_provider_unittest.cc4
-rw-r--r--chromium/media/mojo/services/media_service.h4
-rw-r--r--chromium/media/mojo/services/media_service_unittest.cc20
-rw-r--r--chromium/media/mojo/services/mojo_audio_decoder_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream.h4
-rw-r--r--chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc4
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream.cc2
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream.h2
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider.cc2
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider.h2
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc6
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc6
-rw-r--r--chromium/media/mojo/services/mojo_cdm_file_io.h2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_file_io_unittest.cc4
-rw-r--r--chromium/media/mojo/services/mojo_cdm_helper.h8
-rw-r--r--chromium/media/mojo/services/mojo_cdm_helper_unittest.cc6
-rw-r--r--chromium/media/mojo/services/mojo_cdm_promise.h2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_proxy.h2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_proxy_service.cc7
-rw-r--r--chromium/media/mojo/services/mojo_cdm_proxy_service.h4
-rw-r--r--chromium/media/mojo/services/mojo_cdm_proxy_unittest.cc8
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.cc2
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_demuxer_stream_adapter.h2
-rw-r--r--chromium/media/mojo/services/mojo_media_client.h2
-rw-r--r--chromium/media/mojo/services/mojo_media_drm_storage.cc3
-rw-r--r--chromium/media/mojo/services/mojo_media_drm_storage.h4
-rw-r--r--chromium/media/mojo/services/mojo_media_log.h2
-rw-r--r--chromium/media/mojo/services/mojo_provision_fetcher.h2
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.cc4
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h2
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc6
-rw-r--r--chromium/media/mojo/services/test_helpers.h2
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.cc11
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.h7
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history.cc33
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history.h8
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history_unittest.cc175
-rw-r--r--chromium/media/mojo/services/video_decode_stats_recorder.h4
-rw-r--r--chromium/media/mojo/services/video_decode_stats_recorder_unittest.cc2
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.cc3
-rw-r--r--chromium/media/mojo/services/watch_time_recorder.h2
-rw-r--r--chromium/media/mojo/services/watch_time_recorder_unittest.cc10
-rw-r--r--chromium/media/remoting/BUILD.gn4
-rw-r--r--chromium/media/remoting/courier_renderer.h2
-rw-r--r--chromium/media/remoting/courier_renderer_unittest.cc4
-rw-r--r--chromium/media/remoting/demuxer_stream_adapter.h2
-rw-r--r--chromium/media/remoting/end2end_test_renderer.cc2
-rw-r--r--chromium/media/remoting/fake_remoter.h2
-rw-r--r--chromium/media/remoting/media_remoting_rpc.proto1
-rw-r--r--chromium/media/remoting/proto_enum_utils.cc1
-rw-r--r--chromium/media/remoting/renderer_controller.h4
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc7
-rw-r--r--chromium/media/renderers/audio_renderer_impl_unittest.cc4
-rw-r--r--chromium/media/renderers/decrypting_renderer_unittest.cc26
-rw-r--r--chromium/media/renderers/default_decoder_factory.cc7
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc400
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.h25
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer_unittest.cc4
-rw-r--r--chromium/media/renderers/renderer_impl.cc15
-rw-r--r--chromium/media/renderers/renderer_impl_unittest.cc6
-rw-r--r--chromium/media/renderers/video_renderer_impl_unittest.cc4
-rw-r--r--chromium/media/renderers/video_resource_updater.cc33
-rw-r--r--chromium/media/renderers/video_resource_updater_unittest.cc4
-rw-r--r--chromium/media/test/BUILD.gn4
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.cc66
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc20
-rw-r--r--chromium/media/video/gpu_video_accelerator_factories.h11
-rw-r--r--chromium/media/video/h264_level_limits.cc24
-rw-r--r--chromium/media/video/h264_level_limits.h9
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.cc7
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.h6
-rw-r--r--chromium/media/video/video_encode_accelerator.cc3
-rw-r--r--chromium/media/video/video_encode_accelerator.h15
-rw-r--r--chromium/media/webrtc/OWNERS1
-rw-r--r--chromium/media/webrtc/audio_processor.cc11
-rw-r--r--chromium/media/webrtc/audio_processor_unittest.cc4
716 files changed, 13185 insertions, 8983 deletions
diff --git a/chromium/media/BUILD.gn b/chromium/media/BUILD.gn
index d53a0e58025..d88a4887d33 100644
--- a/chromium/media/BUILD.gn
+++ b/chromium/media/BUILD.gn
@@ -8,6 +8,7 @@ import("//build/config/arm.gni")
import("//build/config/features.gni")
import("//build/config/linux/pkg_config.gni")
import("//build/config/ui.gni")
+import("//media/cdm/library_cdm/cdm_paths.gni")
import("//media/media_options.gni")
import("//testing/libfuzzer/fuzzer_test.gni")
import("//testing/test.gni")
@@ -17,7 +18,8 @@ buildflag_header("media_buildflags") {
header = "media_buildflags.h"
flags = [
- "ALTERNATE_CDM_STORAGE_ID_KEY=$alternate_cdm_storage_id_key",
+ "ALTERNATE_CDM_STORAGE_ID_KEY=\"$alternate_cdm_storage_id_key\"",
+ "CDM_PLATFORM_SPECIFIC_PATH=\"$cdm_platform_specific_path\"",
"ENABLE_AC3_EAC3_AUDIO_DEMUXING=$enable_ac3_eac3_audio_demuxing",
"ENABLE_CBCS_ENCRYPTION_SCHEME=$enable_cbcs_encryption_scheme",
"ENABLE_CDM_HOST_VERIFICATION=$enable_cdm_host_verification",
@@ -129,7 +131,6 @@ source_set("test_support") {
"//media/base/android:test_support",
"//media/filters:test_support",
"//media/formats:test_support",
- "//media/gpu:test_support",
"//media/video:test_support",
]
}
@@ -171,6 +172,7 @@ test("media_unittests") {
deps += [
# The test needs the java dependencies to add the java classes for their
# native counterparts to the test apk.
+ "//gpu/ipc/common:android_texture_owner_unittests",
"//media/base/android:media_java",
"//media/base/android:unit_tests",
"//media/gpu:android_video_decode_accelerator_unittests",
diff --git a/chromium/media/DEPS b/chromium/media/DEPS
index f7a21ac5b33..ac7984ec9a2 100644
--- a/chromium/media/DEPS
+++ b/chromium/media/DEPS
@@ -40,15 +40,13 @@ specific_include_rules = {
"cras_unified_unittest.cc": [
"+chromeos/dbus"
],
+ "fuchsia_video_decoder_unittest.cc": [
+ "+components/viz/test/test_context_support.h",
+ ],
"gpu_memory_buffer_video_frame_pool_unittest.cc": [
"+components/viz/test/test_context_provider.h",
],
"null_video_sink_unittest.cc": [
"+components/viz/common/frame_sinks/begin_frame_args.h",
],
-
- # Dependencies specific for fuzz targets and other fuzzing-related code.
- ".*fuzz.*": [
- "+third_party/libFuzzer/src/utils", # This contains FuzzedDataProvider.
- ],
}
diff --git a/chromium/media/audio/BUILD.gn b/chromium/media/audio/BUILD.gn
index d8ae83ce63a..35fd1d7da0a 100644
--- a/chromium/media/audio/BUILD.gn
+++ b/chromium/media/audio/BUILD.gn
@@ -279,6 +279,8 @@ source_set("audio") {
"pulse/pulse_util.h",
]
+ deps += [ "//build:branding_buildflags" ]
+
if (link_pulseaudio) {
configs += [ ":libpulse" ]
} else {
diff --git a/chromium/media/audio/OWNERS b/chromium/media/audio/OWNERS
index b85a81bc9b5..c13e013dfd7 100644
--- a/chromium/media/audio/OWNERS
+++ b/chromium/media/audio/OWNERS
@@ -1,6 +1,5 @@
tommi@chromium.org
olka@chromium.org
-maxmorin@chromium.org
# Windows
henrika@chromium.org
diff --git a/chromium/media/audio/alive_checker_unittest.cc b/chromium/media/audio/alive_checker_unittest.cc
index ac2bf7342bf..a2f13bd3129 100644
--- a/chromium/media/audio/alive_checker_unittest.cc
+++ b/chromium/media/audio/alive_checker_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "base/sequenced_task_runner.h"
#include "base/synchronization/waitable_event.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "media/audio/alive_checker.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -159,7 +159,7 @@ class AliveCheckerTest : public testing::Test {
}
// The test task environment.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
// The thread the checker is run on.
base::Thread alive_checker_thread_;
diff --git a/chromium/media/audio/android/audio_manager_android.cc b/chromium/media/audio/android/audio_manager_android.cc
index 79bebf1a8ff..5d74f7e662b 100644
--- a/chromium/media/audio/android/audio_manager_android.cc
+++ b/chromium/media/audio/android/audio_manager_android.cc
@@ -311,6 +311,13 @@ bool AudioManagerAndroid::HasOutputVolumeOverride(double* out_volume) const {
return output_volume_override_set_;
}
+base::TimeDelta AudioManagerAndroid::GetOutputLatency() {
+ DCHECK(GetTaskRunner()->BelongsToCurrentThread());
+ JNIEnv* env = AttachCurrentThread();
+ return base::TimeDelta::FromMilliseconds(
+ Java_AudioManagerAndroid_getOutputLatency(env, GetJavaAudioManager()));
+}
+
AudioParameters AudioManagerAndroid::GetPreferredOutputStreamParameters(
const std::string& output_device_id,
const AudioParameters& input_params) {
diff --git a/chromium/media/audio/android/audio_manager_android.h b/chromium/media/audio/android/audio_manager_android.h
index 5075ed514f1..df27f8fafe2 100644
--- a/chromium/media/audio/android/audio_manager_android.h
+++ b/chromium/media/audio/android/audio_manager_android.h
@@ -81,6 +81,13 @@ class MEDIA_EXPORT AudioManagerAndroid : public AudioManagerBase {
void SetOutputVolumeOverride(double volume);
bool HasOutputVolumeOverride(double* out_volume) const;
+ // Get the latency introduced by the hardware. It relies on
+ // AudioManager.getOutputLatency, which is both (a) hidden and (b) not
+ // guaranteed to be meaningful. Do not use this, except in the context of
+ // b/80326798 to adjust (hackily) for hardware latency that OpenSLES isn't
+ // otherwise accounting for.
+ base::TimeDelta GetOutputLatency();
+
protected:
void ShutdownOnAudioThread() override;
AudioParameters GetPreferredOutputStreamParameters(
diff --git a/chromium/media/audio/android/opensles_output.cc b/chromium/media/audio/android/opensles_output.cc
index 594174c6667..e4e9274e0cf 100644
--- a/chromium/media/audio/android/opensles_output.cc
+++ b/chromium/media/audio/android/opensles_output.cc
@@ -5,6 +5,7 @@
#include "media/audio/android/opensles_output.h"
#include "base/android/build_info.h"
+#include "base/feature_list.h"
#include "base/logging.h"
#include "base/stl_util.h"
#include "base/strings/string_util.h"
@@ -13,6 +14,7 @@
#include "media/audio/android/audio_manager_android.h"
#include "media/base/audio_sample_types.h"
#include "media/base/audio_timestamp_helper.h"
+#include "media/base/media_switches.h"
#define LOG_ON_FAILURE_AND_RETURN(op, ...) \
do { \
@@ -136,6 +138,8 @@ void OpenSLESOutputStream::Start(AudioSourceCallback* callback) {
DCHECK(!callback_);
callback_ = callback;
+ CacheHardwareLatencyIfNeeded();
+
// Fill audio data with silence to avoid start-up glitches. Don't use
// FillBufferQueueNoLock() since it can trigger recursive entry if an error
// occurs while writing into the stream. See http://crbug.com/624877.
@@ -400,6 +404,7 @@ void OpenSLESOutputStream::FillBufferQueueNoLock() {
// Calculate the position relative to the number of frames written.
uint32_t position_in_ms = 0;
SLresult err = (*player_)->GetPosition(player_, &position_in_ms);
+
// Given the position of the playback head, compute the approximate number of
// frames that have been queued to the buffer but not yet played out.
// Note that the value returned by GetFramesToTarget() is negative because
@@ -409,7 +414,7 @@ void OpenSLESOutputStream::FillBufferQueueNoLock() {
const int delay_frames =
err == SL_RESULT_SUCCESS
? -delay_calculator_.GetFramesToTarget(
- base::TimeDelta::FromMilliseconds(position_in_ms))
+ AdjustPositionForHardwareLatency(position_in_ms))
: 0;
DCHECK_GE(delay_frames, 0);
@@ -480,4 +485,23 @@ void OpenSLESOutputStream::HandleError(SLresult error) {
callback_->OnError();
}
+void OpenSLESOutputStream::CacheHardwareLatencyIfNeeded() {
+ // If the feature is turned off, then leave it at its default (zero) value.
+ // In general, GetOutputLatency is not reliable.
+ if (!base::FeatureList::IsEnabled(kUseAudioLatencyFromHAL))
+ return;
+
+ hardware_latency_ = audio_manager_->GetOutputLatency();
+}
+
+base::TimeDelta OpenSLESOutputStream::AdjustPositionForHardwareLatency(
+ uint32_t position_in_ms) {
+ base::TimeDelta position = base::TimeDelta::FromMilliseconds(position_in_ms);
+
+ if (position <= hardware_latency_)
+ return base::TimeDelta::FromMilliseconds(0);
+
+ return position - hardware_latency_;
+}
+
} // namespace media
diff --git a/chromium/media/audio/android/opensles_output.h b/chromium/media/audio/android/opensles_output.h
index 4c1cec16350..aafc81657d5 100644
--- a/chromium/media/audio/android/opensles_output.h
+++ b/chromium/media/audio/android/opensles_output.h
@@ -77,6 +77,13 @@ class OpenSLESOutputStream : public MuteableAudioOutputStream {
// the attached AudioOutputCallback::OnError().
void HandleError(SLresult error);
+ // Cache |hardware_latency_in_ms_| by asking |audio_manager_| for it, if the
+ // kUseAudioLatencyFromHAL is enabled.
+ void CacheHardwareLatencyIfNeeded();
+
+ // Adjust |position_in_ms| for hardware latency, and return the result.
+ base::TimeDelta AdjustPositionForHardwareLatency(uint32_t position_in_ms);
+
base::ThreadChecker thread_checker_;
// Protects |callback_|, |active_buffer_index_|, |audio_data_|,
@@ -140,6 +147,10 @@ class OpenSLESOutputStream : public MuteableAudioOutputStream {
// Container for retrieving data from AudioSourceCallback::OnMoreData().
std::unique_ptr<AudioBus> audio_bus_;
+ // Adjustment for hardware latency. Needed for some cast targets, since
+ // OpenSLES's GetPosition doesn't properly account for HAL latency.
+ base::TimeDelta hardware_latency_;
+
DISALLOW_COPY_AND_ASSIGN(OpenSLESOutputStream);
};
diff --git a/chromium/media/audio/audio_debug_file_writer.h b/chromium/media/audio/audio_debug_file_writer.h
index 66f0f91d910..b128025dc04 100644
--- a/chromium/media/audio/audio_debug_file_writer.h
+++ b/chromium/media/audio/audio_debug_file_writer.h
@@ -63,8 +63,9 @@ class MEDIA_EXPORT AudioDebugFileWriter {
// The task runner to do file output operations on.
const scoped_refptr<base::SequencedTaskRunner> file_task_runner_ =
- base::CreateSequencedTaskRunnerWithTraits(
- {base::MayBlock(), base::TaskPriority::BEST_EFFORT,
+ base::CreateSequencedTaskRunner(
+ {base::ThreadPool(), base::MayBlock(),
+ base::TaskPriority::BEST_EFFORT,
base::TaskShutdownBehavior::BLOCK_SHUTDOWN});
AudioFileWriterUniquePtr file_writer_;
diff --git a/chromium/media/audio/audio_debug_file_writer_unittest.cc b/chromium/media/audio/audio_debug_file_writer_unittest.cc
index 4d498690b43..5f8974da252 100644
--- a/chromium/media/audio/audio_debug_file_writer_unittest.cc
+++ b/chromium/media/audio/audio_debug_file_writer_unittest.cc
@@ -11,7 +11,7 @@
#include "base/memory/ptr_util.h"
#include "base/synchronization/waitable_event.h"
#include "base/sys_byteorder.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "media/audio/audio_debug_file_writer.h"
#include "media/base/audio_bus.h"
@@ -50,10 +50,9 @@ class AudioDebugFileWriterTest
: public testing::TestWithParam<AudioDebugFileWriterTestData> {
public:
explicit AudioDebugFileWriterTest(
- base::test::ScopedTaskEnvironment::ThreadPoolExecutionMode execution_mode)
- : scoped_task_environment_(
- base::test::ScopedTaskEnvironment::MainThreadType::DEFAULT,
- execution_mode),
+ base::test::TaskEnvironment::ThreadPoolExecutionMode execution_mode)
+ : task_environment_(base::test::TaskEnvironment::MainThreadType::DEFAULT,
+ execution_mode),
params_(AudioParameters::Format::AUDIO_PCM_LINEAR,
std::get<0>(GetParam()),
std::get<1>(GetParam()),
@@ -67,8 +66,7 @@ class AudioDebugFileWriterTest
}
AudioDebugFileWriterTest()
: AudioDebugFileWriterTest(
- base::test::ScopedTaskEnvironment::ThreadPoolExecutionMode::ASYNC) {
- }
+ base::test::TaskEnvironment::ThreadPoolExecutionMode::ASYNC) {}
protected:
virtual ~AudioDebugFileWriterTest() = default;
@@ -192,7 +190,7 @@ class AudioDebugFileWriterTest
debug_writer_->Stop();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
VerifyRecording(file_path);
@@ -206,7 +204,7 @@ class AudioDebugFileWriterTest
protected:
// The test task environment.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
// Writer under test.
std::unique_ptr<AudioDebugFileWriter> debug_writer_;
@@ -232,8 +230,8 @@ class AudioDebugFileWriterBehavioralTest : public AudioDebugFileWriterTest {};
class AudioDebugFileWriterSingleThreadTest : public AudioDebugFileWriterTest {
public:
AudioDebugFileWriterSingleThreadTest()
- : AudioDebugFileWriterTest(base::test::ScopedTaskEnvironment::
- ThreadPoolExecutionMode::QUEUED) {}
+ : AudioDebugFileWriterTest(
+ base::test::TaskEnvironment::ThreadPoolExecutionMode::QUEUED) {}
};
TEST_P(AudioDebugFileWriterTest, WaveRecordingTest) {
@@ -256,7 +254,7 @@ TEST_P(AudioDebugFileWriterSingleThreadTest,
debug_writer_.reset();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
VerifyRecording(file_path);
diff --git a/chromium/media/audio/audio_debug_recording_helper_unittest.cc b/chromium/media/audio/audio_debug_recording_helper_unittest.cc
index 9bd581d8f1a..8798fb00fe2 100644
--- a/chromium/media/audio/audio_debug_recording_helper_unittest.cc
+++ b/chromium/media/audio/audio_debug_recording_helper_unittest.cc
@@ -15,7 +15,7 @@
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_sample_types.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -112,7 +112,7 @@ class AudioDebugRecordingHelperTest : public ::testing::Test {
const AudioParameters& params,
base::OnceClosure on_destruction_closure) {
return std::make_unique<AudioDebugRecordingHelperUnderTest>(
- params, scoped_task_environment_.GetMainThreadTaskRunner(),
+ params, task_environment_.GetMainThreadTaskRunner(),
std::move(on_destruction_closure));
}
@@ -146,7 +146,7 @@ class AudioDebugRecordingHelperTest : public ::testing::Test {
const uint32_t id_ = 1;
// The test task environment.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
private:
DISALLOW_COPY_AND_ASSIGN(AudioDebugRecordingHelperTest);
diff --git a/chromium/media/audio/audio_debug_recording_manager_unittest.cc b/chromium/media/audio/audio_debug_recording_manager_unittest.cc
index 014491b59de..46b977fd92e 100644
--- a/chromium/media/audio/audio_debug_recording_manager_unittest.cc
+++ b/chromium/media/audio/audio_debug_recording_manager_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/audio/audio_debug_recording_helper.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -114,7 +114,7 @@ class AudioDebugRecordingManagerUnderTest : public AudioDebugRecordingManager {
class AudioDebugRecordingManagerTest : public ::testing::Test {
public:
AudioDebugRecordingManagerTest()
- : manager_(scoped_task_environment_.GetMainThreadTaskRunner()) {}
+ : manager_(task_environment_.GetMainThreadTaskRunner()) {}
~AudioDebugRecordingManagerTest() override = default;
@@ -127,7 +127,7 @@ class AudioDebugRecordingManagerTest : public ::testing::Test {
protected:
// The test task environment.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
AudioDebugRecordingManagerUnderTest manager_;
diff --git a/chromium/media/audio/audio_debug_recording_session_impl.cc b/chromium/media/audio/audio_debug_recording_session_impl.cc
index 125e7a15f2f..2d433cd61d0 100644
--- a/chromium/media/audio/audio_debug_recording_session_impl.cc
+++ b/chromium/media/audio/audio_debug_recording_session_impl.cc
@@ -58,9 +58,9 @@ void CreateWavFile(const base::FilePath& debug_recording_file_path,
return;
}
- base::PostTaskWithTraitsAndReplyWithResult(
+ base::PostTaskAndReplyWithResult(
FROM_HERE,
- {base::MayBlock(), base::TaskPriority::BEST_EFFORT,
+ {base::ThreadPool(), base::MayBlock(), base::TaskPriority::BEST_EFFORT,
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN},
base::BindOnce(
[](const base::FilePath& file_name) {
diff --git a/chromium/media/audio/audio_debug_recording_session_impl_unittest.cc b/chromium/media/audio/audio_debug_recording_session_impl_unittest.cc
index 47067985e49..c5f7149ff2a 100644
--- a/chromium/media/audio/audio_debug_recording_session_impl_unittest.cc
+++ b/chromium/media/audio/audio_debug_recording_session_impl_unittest.cc
@@ -10,7 +10,7 @@
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/strings/string_number_conversions.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/audio/audio_debug_recording_test.h"
#include "media/audio/mock_audio_debug_recording_manager.h"
@@ -125,7 +125,7 @@ TEST_F(AudioDebugRecordingSessionImplTest, CreateWavFileCreatesExpectedFiles) {
CreateDebugRecordingSession();
// Wait for files to be created.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Check that expected files were created.
base::FilePath input_recording_filename(GetFileName(kInput, kId));
diff --git a/chromium/media/audio/audio_debug_recording_test.h b/chromium/media/audio/audio_debug_recording_test.h
index 4191b937399..1ccae83206e 100644
--- a/chromium/media/audio/audio_debug_recording_test.h
+++ b/chromium/media/audio/audio_debug_recording_test.h
@@ -7,7 +7,7 @@
#include <memory>
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/media_export.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -28,7 +28,7 @@ class AudioDebugRecordingTest : public testing::Test {
void ShutdownAudioManager();
void InitializeAudioDebugRecordingManager();
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<MockAudioManager> mock_audio_manager_;
MockAudioDebugRecordingManager* mock_debug_recording_manager_;
diff --git a/chromium/media/audio/audio_device_description.cc b/chromium/media/audio/audio_device_description.cc
index 87f8f37d366..3bdc7be1612 100644
--- a/chromium/media/audio/audio_device_description.cc
+++ b/chromium/media/audio/audio_device_description.cc
@@ -37,9 +37,9 @@ bool AudioDeviceDescription::IsLoopbackDevice(const std::string& device_id) {
// static
bool AudioDeviceDescription::UseSessionIdToSelectDevice(
- int session_id,
+ const base::UnguessableToken& session_id,
const std::string& device_id) {
- return session_id && device_id.empty();
+ return !session_id.is_empty() && device_id.empty();
}
// static
diff --git a/chromium/media/audio/audio_device_description.h b/chromium/media/audio/audio_device_description.h
index 970ec0e822d..ccba18edbbb 100644
--- a/chromium/media/audio/audio_device_description.h
+++ b/chromium/media/audio/audio_device_description.h
@@ -8,6 +8,7 @@
#include <string>
#include <vector>
+#include "base/unguessable_token.h"
#include "media/base/media_export.h"
namespace media {
@@ -50,8 +51,9 @@ struct MEDIA_EXPORT AudioDeviceDescription {
// If |device_id| is empty and |session_id| is nonzero, output device
// associated with the opened input device designated by |session_id| should
// be used.
- static bool UseSessionIdToSelectDevice(int session_id,
- const std::string& device_id);
+ static bool UseSessionIdToSelectDevice(
+ const base::UnguessableToken& session_id,
+ const std::string& device_id);
// The functions dealing with localization are not reliable in the audio
// service, and should be avoided there.
diff --git a/chromium/media/audio/audio_features.cc b/chromium/media/audio/audio_features.cc
index a36d1fcc840..dfbeaad8e21 100644
--- a/chromium/media/audio/audio_features.cc
+++ b/chromium/media/audio/audio_features.cc
@@ -9,7 +9,13 @@ namespace features {
// When the audio service in a separate process, kill it when a hang is
// detected. It will be restarted when needed.
const base::Feature kAudioServiceOutOfProcessKillAtHang{
- "AudioServiceOutOfProcessKillAtHang", base::FEATURE_DISABLED_BY_DEFAULT};
+ "AudioServiceOutOfProcessKillAtHang",
+#if defined(OS_WIN) || defined(OS_MACOSX)
+ base::FEATURE_ENABLED_BY_DEFAULT
+#else
+ base::FEATURE_DISABLED_BY_DEFAULT
+#endif
+};
// If enabled, base::DumpWithoutCrashing is called whenever an audio service
// hang is detected.
diff --git a/chromium/media/audio/audio_input_controller_unittest.cc b/chromium/media/audio/audio_input_controller_unittest.cc
index 00a3b5ab6c0..dbf5e29b31e 100644
--- a/chromium/media/audio/audio_input_controller_unittest.cc
+++ b/chromium/media/audio/audio_input_controller_unittest.cc
@@ -8,7 +8,7 @@
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/timer/timer.h"
#include "media/audio/audio_manager.h"
#include "media/audio/fake_audio_input_stream.h"
@@ -144,7 +144,7 @@ class AudioInputControllerTest : public testing::TestWithParam<bool> {
run_loop.Run();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
// Parameterize tests to run AudioInputController either on audio thread
// (synchronously), or on a different thread (non-blocking).
diff --git a/chromium/media/audio/audio_input_device_unittest.cc b/chromium/media/audio/audio_input_device_unittest.cc
index 01fa2add753..942097505b8 100644
--- a/chromium/media/audio/audio_input_device_unittest.cc
+++ b/chromium/media/audio/audio_input_device_unittest.cc
@@ -8,12 +8,11 @@
#include "base/memory/ptr_util.h"
#include "base/memory/shared_memory.h"
-#include "base/message_loop/message_loop.h"
#include "base/process/process_handle.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/sync_socket.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gmock_mutant.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -67,7 +66,8 @@ class MockCaptureCallback : public AudioCapturerSource::CaptureCallback {
// Regular construction.
TEST(AudioInputDeviceTest, Noop) {
- base::MessageLoopForIO io_loop;
+ base::test::SingleThreadTaskEnvironment task_environment(
+ base::test::SingleThreadTaskEnvironment::MainThreadType::IO);
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
base::WrapUnique(input_ipc), AudioInputDevice::Purpose::kUserInput));
@@ -117,7 +117,7 @@ TEST(AudioInputDeviceTest, CreateStream) {
shared_memory.region.Duplicate();
ASSERT_TRUE(duplicated_shared_memory_region.IsValid());
- base::test::ScopedTaskEnvironment ste;
+ base::test::TaskEnvironment ste;
MockCaptureCallback callback;
MockAudioInputIPC* input_ipc = new MockAudioInputIPC();
scoped_refptr<AudioInputDevice> device(new AudioInputDevice(
diff --git a/chromium/media/audio/audio_input_ipc.h b/chromium/media/audio/audio_input_ipc.h
index 4abfcf01b6d..b8defb609bd 100644
--- a/chromium/media/audio/audio_input_ipc.h
+++ b/chromium/media/audio/audio_input_ipc.h
@@ -22,7 +22,7 @@ class AudioProcessorControls;
class MEDIA_EXPORT AudioInputIPCDelegate {
public:
// Called when an AudioInputController has been created.
- // See media/mojo/interfaces/audio_data_pipe.mojom for documentation of
+ // See media/mojo/mojom/audio_data_pipe.mojom for documentation of
// |handle| and |socket_handle|.
virtual void OnStreamCreated(
base::ReadOnlySharedMemoryRegion shared_memory_region,
diff --git a/chromium/media/audio/audio_input_sync_writer_unittest.cc b/chromium/media/audio/audio_input_sync_writer_unittest.cc
index c887c331c1b..79583f6759b 100644
--- a/chromium/media/audio/audio_input_sync_writer_unittest.cc
+++ b/chromium/media/audio/audio_input_sync_writer_unittest.cc
@@ -17,7 +17,7 @@
#include "base/memory/read_only_shared_memory_region.h"
#include "base/sync_socket.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/audio_bus.h"
@@ -155,7 +155,7 @@ class AudioInputSyncWriterTest : public testing::Test {
using MockLogger =
base::MockCallback<base::RepeatingCallback<void(const std::string&)>>;
- base::test::ScopedTaskEnvironment env_;
+ base::test::TaskEnvironment env_;
MockLogger mock_logger_;
std::unique_ptr<AudioInputSyncWriter> writer_;
MockCancelableSyncSocket* socket_;
diff --git a/chromium/media/audio/audio_input_unittest.cc b/chromium/media/audio/audio_input_unittest.cc
index 2e908188daf..3763e798e06 100644
--- a/chromium/media/audio/audio_input_unittest.cc
+++ b/chromium/media/audio/audio_input_unittest.cc
@@ -8,7 +8,7 @@
#include "base/callback.h"
#include "base/environment.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_pump_type.h"
#include "base/run_loop.h"
#include "base/test/test_message_loop.h"
#include "base/threading/platform_thread.h"
@@ -69,7 +69,7 @@ class TestInputCallback : public AudioInputStream::AudioInputCallback {
class AudioInputTest : public testing::Test {
public:
AudioInputTest()
- : message_loop_(base::MessageLoop::TYPE_UI),
+ : message_loop_(base::MessagePumpType::UI),
audio_manager_(AudioManager::CreateForTesting(
std::make_unique<TestAudioThread>())),
audio_input_stream_(NULL) {
diff --git a/chromium/media/audio/audio_io.h b/chromium/media/audio/audio_io.h
index 8f56ef08512..5ceb326205c 100644
--- a/chromium/media/audio/audio_io.h
+++ b/chromium/media/audio/audio_io.h
@@ -110,6 +110,9 @@ class MEDIA_EXPORT AudioOutputStream {
// Close the stream.
// After calling this method, the object should not be used anymore.
+ // After calling this method, no further AudioSourceCallback methods
+ // should be called on the callback object that was supplied to Start()
+ // by the AudioOutputStream implementation.
virtual void Close() = 0;
// Flushes the stream. This should only be called if the stream is not
diff --git a/chromium/media/audio/audio_low_latency_input_output_unittest.cc b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
index 4ed02eccb21..c1042ccbdbc 100644
--- a/chromium/media/audio/audio_low_latency_input_output_unittest.cc
+++ b/chromium/media/audio/audio_low_latency_input_output_unittest.cc
@@ -16,7 +16,7 @@
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/synchronization/lock.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
@@ -87,8 +87,8 @@ class AudioLowLatencyInputOutputTest : public testing::Test {
}
private:
- base::test::ScopedTaskEnvironment task_environment_{
- base::test::ScopedTaskEnvironment::MainThreadType::UI};
+ base::test::TaskEnvironment task_environment_{
+ base::test::TaskEnvironment::MainThreadType::UI};
std::unique_ptr<AudioManager> audio_manager_;
DISALLOW_COPY_AND_ASSIGN(AudioLowLatencyInputOutputTest);
diff --git a/chromium/media/audio/audio_output_device.h b/chromium/media/audio/audio_output_device.h
index ff0ea95e6c8..0a305332677 100644
--- a/chromium/media/audio/audio_output_device.h
+++ b/chromium/media/audio/audio_output_device.h
@@ -199,7 +199,7 @@ class MEDIA_EXPORT AudioOutputDevice : public AudioRendererSink,
// The media session ID used to identify which input device to be started.
// Only used by Unified IO.
- int session_id_;
+ base::UnguessableToken session_id_;
// ID of hardware output device to be used (provided |session_id_| is zero)
const std::string device_id_;
diff --git a/chromium/media/audio/audio_output_device_unittest.cc b/chromium/media/audio/audio_output_device_unittest.cc
index 90a51bdfe97..88ec75c89ca 100644
--- a/chromium/media/audio/audio_output_device_unittest.cc
+++ b/chromium/media/audio/audio_output_device_unittest.cc
@@ -19,7 +19,7 @@
#include "base/single_thread_task_runner.h"
#include "base/sync_socket.h"
#include "base/task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/audio/audio_sync_reader.h"
@@ -76,7 +76,7 @@ class MockAudioOutputIPC : public AudioOutputIPC {
MOCK_METHOD3(RequestDeviceAuthorization,
void(AudioOutputIPCDelegate* delegate,
- int session_id,
+ const base::UnguessableToken& session_id,
const std::string& device_id));
MOCK_METHOD3(
CreateStream,
@@ -109,8 +109,8 @@ class AudioOutputDeviceTest : public testing::Test {
MOCK_METHOD1(OnDeviceInfoReceived, void(OutputDeviceInfo));
protected:
- base::test::ScopedTaskEnvironment task_env_{
- base::test::ScopedTaskEnvironment::TimeSource::MOCK_TIME};
+ base::test::TaskEnvironment task_env_{
+ base::test::TaskEnvironment::TimeSource::MOCK_TIME};
AudioParameters default_audio_parameters_;
StrictMock<MockRenderCallback> callback_;
MockAudioOutputIPC* audio_output_ipc_; // owned by audio_device_
@@ -148,13 +148,14 @@ void AudioOutputDeviceTest::CreateDevice(const std::string& device_id,
audio_output_ipc_ = new NiceMock<MockAudioOutputIPC>();
audio_device_ = new AudioOutputDevice(
base::WrapUnique(audio_output_ipc_), task_env_.GetMainThreadTaskRunner(),
- AudioSinkParameters(0, device_id), timeout);
+ AudioSinkParameters(base::UnguessableToken(), device_id), timeout);
}
void AudioOutputDeviceTest::SetDevice(const std::string& device_id) {
CreateDevice(device_id);
EXPECT_CALL(*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, device_id));
+ RequestDeviceAuthorization(audio_device_.get(),
+ base::UnguessableToken(), device_id));
audio_device_->RequestDeviceAuthorization();
task_env_.FastForwardBy(base::TimeDelta());
@@ -277,7 +278,8 @@ TEST_F(AudioOutputDeviceTest, NonDefaultStartStopStartStop) {
StopAudioDevice();
EXPECT_CALL(*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, _));
+ RequestDeviceAuthorization(audio_device_.get(),
+ base::UnguessableToken(), _));
StartAudioDevice();
// Simulate reply from browser
ReceiveAuthorization(OUTPUT_DEVICE_STATUS_OK);
@@ -307,10 +309,12 @@ TEST_F(AudioOutputDeviceTest, AuthorizationFailsBeforeInitialize_NoError) {
audio_output_ipc_ = new NiceMock<MockAudioOutputIPC>();
audio_device_ = new AudioOutputDevice(
base::WrapUnique(audio_output_ipc_), task_env_.GetMainThreadTaskRunner(),
- AudioSinkParameters(0, kDefaultDeviceId), kAuthTimeout);
+ AudioSinkParameters(base::UnguessableToken(), kDefaultDeviceId),
+ kAuthTimeout);
EXPECT_CALL(
*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, kDefaultDeviceId));
+ RequestDeviceAuthorization(audio_device_.get(), base::UnguessableToken(),
+ kDefaultDeviceId));
audio_device_->RequestDeviceAuthorization();
audio_device_->Initialize(default_audio_parameters_, &callback_);
@@ -329,7 +333,8 @@ TEST_F(AudioOutputDeviceTest, AuthorizationTimedOut) {
CreateDevice(kNonDefaultDeviceId);
EXPECT_CALL(
*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, kNonDefaultDeviceId));
+ RequestDeviceAuthorization(audio_device_.get(), base::UnguessableToken(),
+ kNonDefaultDeviceId));
EXPECT_CALL(*audio_output_ipc_, CloseStream());
// Request authorization; no reply from the browser.
@@ -344,9 +349,10 @@ TEST_F(AudioOutputDeviceTest, AuthorizationTimedOut) {
TEST_F(AudioOutputDeviceTest, GetOutputDeviceInfoAsync_Error) {
CreateDevice(kUnauthorizedDeviceId, base::TimeDelta());
- EXPECT_CALL(*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0,
- kUnauthorizedDeviceId));
+ EXPECT_CALL(
+ *audio_output_ipc_,
+ RequestDeviceAuthorization(audio_device_.get(), base::UnguessableToken(),
+ kUnauthorizedDeviceId));
audio_device_->RequestDeviceAuthorization();
audio_device_->GetOutputDeviceInfoAsync(base::BindOnce(
&AudioOutputDeviceTest::OnDeviceInfoReceived, base::Unretained(this)));
@@ -372,7 +378,8 @@ TEST_F(AudioOutputDeviceTest, GetOutputDeviceInfoAsync_Okay) {
CreateDevice(kDefaultDeviceId, base::TimeDelta());
EXPECT_CALL(
*audio_output_ipc_,
- RequestDeviceAuthorization(audio_device_.get(), 0, kDefaultDeviceId));
+ RequestDeviceAuthorization(audio_device_.get(), base::UnguessableToken(),
+ kDefaultDeviceId));
audio_device_->RequestDeviceAuthorization();
audio_device_->GetOutputDeviceInfoAsync(base::BindOnce(
&AudioOutputDeviceTest::OnDeviceInfoReceived, base::Unretained(this)));
@@ -455,13 +462,15 @@ TEST_F(AudioOutputDeviceTest, MAYBE_VerifyDataFlow) {
auto* ipc = new MockAudioOutputIPC(); // owned by |audio_device|.
auto audio_device = base::MakeRefCounted<AudioOutputDevice>(
base::WrapUnique(ipc), task_env_.GetMainThreadTaskRunner(),
- AudioSinkParameters(0, kDefaultDeviceId), kAuthTimeout);
+ AudioSinkParameters(base::UnguessableToken(), kDefaultDeviceId),
+ kAuthTimeout);
// Start a stream.
audio_device->RequestDeviceAuthorization();
audio_device->Initialize(params, &env.callback);
audio_device->Start();
- EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(), 0,
+ EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(),
+ base::UnguessableToken(),
kDefaultDeviceId));
EXPECT_CALL(*ipc, CreateStream(audio_device.get(), _, _));
EXPECT_CALL(*ipc, PlayStream());
@@ -517,12 +526,14 @@ TEST_F(AudioOutputDeviceTest, CreateNondefaultDevice) {
auto* ipc = new MockAudioOutputIPC(); // owned by |audio_device|.
auto audio_device = base::MakeRefCounted<AudioOutputDevice>(
base::WrapUnique(ipc), task_env_.GetMainThreadTaskRunner(),
- AudioSinkParameters(0, kNonDefaultDeviceId), kAuthTimeout);
+ AudioSinkParameters(base::UnguessableToken(), kNonDefaultDeviceId),
+ kAuthTimeout);
audio_device->RequestDeviceAuthorization();
audio_device->Initialize(params, &env.callback);
audio_device->Start();
- EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(), 0,
+ EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(),
+ base::UnguessableToken(),
kNonDefaultDeviceId));
EXPECT_CALL(*ipc, CreateStream(audio_device.get(), _, _));
EXPECT_CALL(*ipc, PlayStream());
@@ -552,13 +563,15 @@ TEST_F(AudioOutputDeviceTest, CreateBitStreamStream) {
auto* ipc = new MockAudioOutputIPC(); // owned by |audio_device|.
auto audio_device = base::MakeRefCounted<AudioOutputDevice>(
base::WrapUnique(ipc), task_env_.GetMainThreadTaskRunner(),
- AudioSinkParameters(0, kNonDefaultDeviceId), kAuthTimeout);
+ AudioSinkParameters(base::UnguessableToken(), kNonDefaultDeviceId),
+ kAuthTimeout);
// Start a stream.
audio_device->RequestDeviceAuthorization();
audio_device->Initialize(params, &env.callback);
audio_device->Start();
- EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(), 0,
+ EXPECT_CALL(*ipc, RequestDeviceAuthorization(audio_device.get(),
+ base::UnguessableToken(),
kNonDefaultDeviceId));
EXPECT_CALL(*ipc, CreateStream(audio_device.get(), _, _));
EXPECT_CALL(*ipc, PlayStream());
diff --git a/chromium/media/audio/audio_output_ipc.h b/chromium/media/audio/audio_output_ipc.h
index f41054e573b..abe70531502 100644
--- a/chromium/media/audio/audio_output_ipc.h
+++ b/chromium/media/audio/audio_output_ipc.h
@@ -32,7 +32,7 @@ class MEDIA_EXPORT AudioOutputIPCDelegate {
const std::string& matched_device_id) = 0;
// Called when an audio stream has been created.
- // See media/mojo/interfaces/audio_data_pipe.mojom for documentation of
+ // See media/mojo/mojom/audio_data_pipe.mojom for documentation of
// |handle| and |socket_handle|. |playing_automatically| indicates if the
// AudioOutputIPCDelegate is playing right away due to an earlier call to
// Play();
@@ -71,9 +71,10 @@ class MEDIA_EXPORT AudioOutputIPC {
// the default device.
// Once the authorization process is complete, the implementation will
// notify |delegate| by calling OnDeviceAuthorized().
- virtual void RequestDeviceAuthorization(AudioOutputIPCDelegate* delegate,
- int session_id,
- const std::string& device_id) = 0;
+ virtual void RequestDeviceAuthorization(
+ AudioOutputIPCDelegate* delegate,
+ const base::UnguessableToken& session_id,
+ const std::string& device_id) = 0;
// Sends a request to create an AudioOutputController object in the peer
// process and configures it to use the specified audio |params| including
diff --git a/chromium/media/audio/audio_output_proxy_unittest.cc b/chromium/media/audio/audio_output_proxy_unittest.cc
index e4ba36f2852..e56d1cef484 100644
--- a/chromium/media/audio/audio_output_proxy_unittest.cc
+++ b/chromium/media/audio/audio_output_proxy_unittest.cc
@@ -11,7 +11,7 @@
#include "base/bind.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/audio/audio_manager.h"
@@ -488,7 +488,7 @@ class AudioOutputProxyTest : public testing::Test {
proxy->Close();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
MockAudioManager manager_;
std::unique_ptr<AudioOutputDispatcherImpl> dispatcher_impl_;
MockAudioSourceCallback callback_;
@@ -511,7 +511,7 @@ class AudioOutputResamplerTest : public AudioOutputProxyTest {
void OnStart() override {
// Let Start() run for a bit.
base::RunLoop run_loop;
- scoped_task_environment_.GetMainThreadTaskRunner()->PostDelayedTask(
+ task_environment_.GetMainThreadTaskRunner()->PostDelayedTask(
FROM_HERE, run_loop.QuitClosure(),
base::TimeDelta::FromMilliseconds(kStartRunTimeMs));
run_loop.Run();
@@ -824,7 +824,7 @@ TEST_F(AudioOutputResamplerTest, FallbackRecovery) {
// Once all proxies have been closed, AudioOutputResampler will start the
// reinitialization timer and execute it after the close delay elapses.
base::RunLoop run_loop;
- scoped_task_environment_.GetMainThreadTaskRunner()->PostDelayedTask(
+ task_environment_.GetMainThreadTaskRunner()->PostDelayedTask(
FROM_HERE, run_loop.QuitClosure(),
base::TimeDelta::FromMilliseconds(2 * kTestCloseDelayMs));
run_loop.Run();
diff --git a/chromium/media/audio/audio_output_unittest.cc b/chromium/media/audio/audio_output_unittest.cc
index 41e4aeceb06..dd67725698d 100644
--- a/chromium/media/audio/audio_output_unittest.cc
+++ b/chromium/media/audio/audio_output_unittest.cc
@@ -7,9 +7,9 @@
#include <memory>
#include "base/memory/aligned_memory.h"
-#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/test/bind_test_util.h"
+#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
@@ -56,7 +56,8 @@ class AudioOutputTest : public ::testing::Test {
}
protected:
- base::MessageLoopForIO message_loop_;
+ base::test::SingleThreadTaskEnvironment task_environment_{
+ base::test::SingleThreadTaskEnvironment::MainThreadType::IO};
std::unique_ptr<AudioManager> audio_manager_;
std::unique_ptr<AudioDeviceInfoAccessorForTests> audio_manager_device_info_;
AudioParameters stream_params_;
diff --git a/chromium/media/audio/audio_sink_parameters.cc b/chromium/media/audio/audio_sink_parameters.cc
index 2d0b4e8a2d1..8287fa07779 100644
--- a/chromium/media/audio/audio_sink_parameters.cc
+++ b/chromium/media/audio/audio_sink_parameters.cc
@@ -7,8 +7,9 @@
namespace media {
AudioSinkParameters::AudioSinkParameters() = default;
-AudioSinkParameters::AudioSinkParameters(int session_id,
- const std::string& device_id)
+AudioSinkParameters::AudioSinkParameters(
+ const base::UnguessableToken& session_id,
+ const std::string& device_id)
: session_id(session_id), device_id(device_id) {}
AudioSinkParameters::AudioSinkParameters(const AudioSinkParameters& params) =
default;
diff --git a/chromium/media/audio/audio_sink_parameters.h b/chromium/media/audio/audio_sink_parameters.h
index 88e3fab21d3..a51f1b82bc7 100644
--- a/chromium/media/audio/audio_sink_parameters.h
+++ b/chromium/media/audio/audio_sink_parameters.h
@@ -24,11 +24,12 @@ namespace media {
// association.
struct MEDIA_EXPORT AudioSinkParameters final {
AudioSinkParameters();
- AudioSinkParameters(int session_id, const std::string& device_id);
+ AudioSinkParameters(const base::UnguessableToken& session_id,
+ const std::string& device_id);
AudioSinkParameters(const AudioSinkParameters& params);
~AudioSinkParameters();
- int session_id = 0;
+ base::UnguessableToken session_id;
std::string device_id;
base::Optional<base::UnguessableToken> processing_id;
};
diff --git a/chromium/media/audio/audio_source_parameters.cc b/chromium/media/audio/audio_source_parameters.cc
index 3f05147eeeb..b8c631af30b 100644
--- a/chromium/media/audio/audio_source_parameters.cc
+++ b/chromium/media/audio/audio_source_parameters.cc
@@ -7,7 +7,8 @@
namespace media {
AudioSourceParameters::AudioSourceParameters() = default;
-AudioSourceParameters::AudioSourceParameters(int session_id)
+AudioSourceParameters::AudioSourceParameters(
+ const base::UnguessableToken& session_id)
: session_id(session_id) {}
AudioSourceParameters::AudioSourceParameters(
const AudioSourceParameters& params) = default;
diff --git a/chromium/media/audio/audio_source_parameters.h b/chromium/media/audio/audio_source_parameters.h
index 3de2dbbbb2b..96e1c1960ab 100644
--- a/chromium/media/audio/audio_source_parameters.h
+++ b/chromium/media/audio/audio_source_parameters.h
@@ -20,11 +20,11 @@ namespace media {
// input device will be selected. This is the state when default constructed.
struct MEDIA_EXPORT AudioSourceParameters final {
AudioSourceParameters();
- explicit AudioSourceParameters(int session_id);
+ explicit AudioSourceParameters(const base::UnguessableToken& session_id);
AudioSourceParameters(const AudioSourceParameters& params);
~AudioSourceParameters();
- int session_id = 0;
+ base::UnguessableToken session_id;
struct MEDIA_EXPORT ProcessingConfig {
ProcessingConfig(base::UnguessableToken id,
diff --git a/chromium/media/audio/audio_sync_reader_unittest.cc b/chromium/media/audio/audio_sync_reader_unittest.cc
index f92eaba5794..b1416d8d7e5 100644
--- a/chromium/media/audio/audio_sync_reader_unittest.cc
+++ b/chromium/media/audio/audio_sync_reader_unittest.cc
@@ -13,7 +13,7 @@
#include "base/bind.h"
#include "base/memory/shared_memory.h"
#include "base/sync_socket.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_parameters.h"
@@ -49,7 +49,7 @@ class AudioSyncReaderBitstreamTest : public TestWithParam<OverflowTestCase> {
~AudioSyncReaderBitstreamTest() override {}
private:
- base::test::ScopedTaskEnvironment env_;
+ base::test::TaskEnvironment env_;
};
TEST_P(AudioSyncReaderBitstreamTest, BitstreamBufferOverflow_DoesNotWriteOOB) {
diff --git a/chromium/media/audio/audio_system_impl_unittest.cc b/chromium/media/audio/audio_system_impl_unittest.cc
index cf85ff0036c..8375d35908c 100644
--- a/chromium/media/audio/audio_system_impl_unittest.cc
+++ b/chromium/media/audio/audio_system_impl_unittest.cc
@@ -3,7 +3,8 @@
// found in the LICENSE file.
#include "media/audio/audio_system_impl.h"
-#include "base/test/scoped_task_environment.h"
+
+#include "base/test/task_environment.h"
#include "media/audio/audio_system_test_util.h"
#include "media/audio/audio_thread_impl.h"
#include "media/audio/mock_audio_manager.h"
@@ -35,7 +36,7 @@ class AudioSystemImplTestBase : public testing::Test {
MockAudioManager* audio_manager() { return audio_manager_.get(); }
AudioSystem* audio_system() { return audio_system_.get(); }
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
std::unique_ptr<MockAudioManager> audio_manager_;
std::unique_ptr<AudioSystem> audio_system_;
// AudioSystemTester tester_;
diff --git a/chromium/media/audio/audio_thread_hang_monitor.cc b/chromium/media/audio/audio_thread_hang_monitor.cc
index 727e198fafa..37351e1a60a 100644
--- a/chromium/media/audio/audio_thread_hang_monitor.cc
+++ b/chromium/media/audio/audio_thread_hang_monitor.cc
@@ -47,7 +47,7 @@ AudioThreadHangMonitor::Ptr AudioThreadHangMonitor::Create(
scoped_refptr<base::SingleThreadTaskRunner> audio_thread_task_runner,
scoped_refptr<base::SequencedTaskRunner> monitor_task_runner) {
if (!monitor_task_runner)
- monitor_task_runner = base::CreateSequencedTaskRunnerWithTraits({});
+ monitor_task_runner = base::CreateSequencedTaskRunner({base::ThreadPool()});
auto monitor =
Ptr(new AudioThreadHangMonitor(hang_action, hang_deadline, clock,
diff --git a/chromium/media/audio/audio_thread_hang_monitor_unittest.cc b/chromium/media/audio/audio_thread_hang_monitor_unittest.cc
index ac5b3de6c32..a173c726d5a 100644
--- a/chromium/media/audio/audio_thread_hang_monitor_unittest.cc
+++ b/chromium/media/audio/audio_thread_hang_monitor_unittest.cc
@@ -10,7 +10,7 @@
#include "base/synchronization/waitable_event.h"
#include "base/task/post_task.h"
#include "base/test/metrics/histogram_tester.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -38,13 +38,13 @@ constexpr base::TimeDelta kLongHangDeadline = base::TimeDelta::FromMinutes(30);
class AudioThreadHangMonitorTest : public Test {
public:
AudioThreadHangMonitorTest()
- : task_env_(base::test::ScopedTaskEnvironment::TimeSource::MOCK_TIME),
+ : task_env_(base::test::TaskEnvironment::TimeSource::MOCK_TIME),
histograms_(),
audio_thread_("Audio thread"),
hang_monitor_({nullptr, base::OnTaskRunnerDeleter(nullptr)}) {
CHECK(audio_thread_.Start());
// We must inject the main thread task runner as the hang monitor task
- // runner since ScopedTaskEnvironment::FastForwardBy only works for the main
+ // runner since TaskEnvironment::FastForwardBy only works for the main
// thread.
hang_monitor_ = AudioThreadHangMonitor::Create(
HangAction::kDoNothing, base::nullopt, task_env_.GetMockTickClock(),
@@ -87,7 +87,7 @@ class AudioThreadHangMonitorTest : public Test {
MOCK_METHOD0(HangActionTerminate, void());
base::WaitableEvent event_;
- base::test::ScopedTaskEnvironment task_env_;
+ base::test::TaskEnvironment task_env_;
base::HistogramTester histograms_;
base::Thread audio_thread_;
AudioThreadHangMonitor::Ptr hang_monitor_;
diff --git a/chromium/media/audio/audio_thread_impl.cc b/chromium/media/audio/audio_thread_impl.cc
index c3dfddf7335..4f7b07c135c 100644
--- a/chromium/media/audio/audio_thread_impl.cc
+++ b/chromium/media/audio/audio_thread_impl.cc
@@ -4,6 +4,7 @@
#include "media/audio/audio_thread_impl.h"
+#include "base/message_loop/message_pump_type.h"
#include "base/optional.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/default_tick_clock.h"
@@ -20,7 +21,7 @@ AudioThreadImpl::AudioThreadImpl()
thread_.init_com_with_mta(true);
#elif defined(OS_FUCHSIA)
// FIDL-based APIs require async_t, which is initialized on IO thread.
- thread_options.message_loop_type = base::MessageLoop::TYPE_IO;
+ thread_options.message_pump_type = base::MessagePumpType::IO;
#endif
CHECK(thread_.StartWithOptions(thread_options));
diff --git a/chromium/media/audio/cras/cras_input.cc b/chromium/media/audio/cras/cras_input.cc
index 4b4276f2fae..954fc2ad001 100644
--- a/chromium/media/audio/cras/cras_input.cc
+++ b/chromium/media/audio/cras/cras_input.cc
@@ -210,6 +210,9 @@ void CrasInputStream::Start(AudioInputCallback* callback) {
return;
}
+ cras_client_stream_params_set_client_type(stream_params,
+ CRAS_CLIENT_TYPE_CHROME);
+
if (UseCrasAec())
cras_client_stream_params_enable_aec(stream_params);
diff --git a/chromium/media/audio/cras/cras_unified.cc b/chromium/media/audio/cras/cras_unified.cc
index 0719abca359..002d611146b 100644
--- a/chromium/media/audio/cras/cras_unified.cc
+++ b/chromium/media/audio/cras/cras_unified.cc
@@ -202,6 +202,9 @@ void CrasUnifiedStream::Start(AudioSourceCallback* callback) {
return;
}
+ cras_client_stream_params_set_client_type(stream_params,
+ CRAS_CLIENT_TYPE_CHROME);
+
// Before starting the stream, save the number of bytes in a frame for use in
// the callback.
bytes_per_frame_ = cras_client_format_bytes_per_frame(audio_format);
diff --git a/chromium/media/audio/fake_audio_input_stream.cc b/chromium/media/audio/fake_audio_input_stream.cc
index 0c03c657972..ce75d1e175e 100644
--- a/chromium/media/audio/fake_audio_input_stream.cc
+++ b/chromium/media/audio/fake_audio_input_stream.cc
@@ -4,18 +4,29 @@
#include "media/audio/fake_audio_input_stream.h"
+#include <memory>
+#include <string>
+
#include "base/atomicops.h"
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/command_line.h"
#include "base/files/file_path.h"
+#include "base/macros.h"
#include "base/memory/ptr_util.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_refptr.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_split.h"
+#include "base/synchronization/lock.h"
+#include "base/thread_annotations.h"
+#include "base/threading/platform_thread.h"
+#include "base/threading/thread.h"
#include "base/time/time.h"
#include "media/audio/audio_manager_base.h"
#include "media/audio/simple_sources.h"
#include "media/base/audio_bus.h"
+#include "media/base/audio_parameters.h"
#include "media/base/media_switches.h"
namespace media {
@@ -33,15 +44,20 @@ AudioInputStream* FakeAudioInputStream::MakeFakeStream(
FakeAudioInputStream::FakeAudioInputStream(AudioManagerBase* manager,
const AudioParameters& params)
: audio_manager_(manager),
- callback_(NULL),
- fake_audio_worker_(manager->GetWorkerTaskRunner(), params),
+ callback_(nullptr),
params_(params),
- audio_bus_(AudioBus::Create(params)) {
+ audio_bus_(AudioBus::Create(params)),
+ capture_thread_(
+ nullptr,
+ base::OnTaskRunnerDeleter(manager->GetWorkerTaskRunner())) {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
}
FakeAudioInputStream::~FakeAudioInputStream() {
+ // |worker_| should be null as Stop() should have been called before.
+ DCHECK(!capture_thread_);
DCHECK(!callback_);
+ DCHECK(!fake_audio_worker_);
}
bool FakeAudioInputStream::Open() {
@@ -51,17 +67,49 @@ bool FakeAudioInputStream::Open() {
return true;
}
-void FakeAudioInputStream::Start(AudioInputCallback* callback) {
+void FakeAudioInputStream::Start(AudioInputCallback* callback) {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
- callback_ = callback;
- fake_audio_worker_.Start(base::BindRepeating(
+ DCHECK(!capture_thread_);
+ DCHECK(callback);
+ DCHECK(!fake_audio_worker_);
+
+ capture_thread_.reset(new base::Thread("FakeAudioInput"));
+ base::Thread::Options options;
+ // REALTIME_AUDIO priority is needed to avoid audio playout delays.
+ // See crbug.com/971265
+ options.priority = base::ThreadPriority::REALTIME_AUDIO;
+ CHECK(capture_thread_->StartWithOptions(options));
+
+ {
+ base::AutoLock lock(callback_lock_);
+ DCHECK(!callback_);
+ callback_ = callback;
+ }
+
+ fake_audio_worker_ = std::make_unique<FakeAudioWorker>(
+ capture_thread_->task_runner(), params_);
+ fake_audio_worker_->Start(base::BindRepeating(
&FakeAudioInputStream::ReadAudioFromSource, base::Unretained(this)));
}
void FakeAudioInputStream::Stop() {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
- fake_audio_worker_.Stop();
- callback_ = NULL;
+ // Start has not been called yet.
+ if (!capture_thread_) {
+ return;
+ }
+
+ {
+ base::AutoLock lock(callback_lock_);
+ DCHECK(callback_);
+ callback_ = nullptr;
+ }
+
+ DCHECK(fake_audio_worker_);
+ fake_audio_worker_->Stop();
+ fake_audio_worker_.reset();
+
+ capture_thread_.reset();
}
void FakeAudioInputStream::Close() {
@@ -104,8 +152,7 @@ void FakeAudioInputStream::SetOutputDeviceForAec(
void FakeAudioInputStream::ReadAudioFromSource(base::TimeTicks ideal_time,
base::TimeTicks now) {
- DCHECK(audio_manager_->GetWorkerTaskRunner()->BelongsToCurrentThread());
- DCHECK(callback_);
+ DCHECK(capture_thread_->task_runner()->BelongsToCurrentThread());
if (!audio_source_)
audio_source_ = ChooseSource();
@@ -121,13 +168,19 @@ void FakeAudioInputStream::ReadAudioFromSource(base::TimeTicks ideal_time,
//
// However, it would be pointless to add a FIFO queue here to delay the signal
// in this "fake" implementation. So, just hack the timing and carry-on.
- audio_source_->OnMoreData(base::TimeDelta(), ideal_time, 0, audio_bus_.get());
- callback_->OnData(audio_bus_.get(), ideal_time, 1.0);
+ {
+ base::AutoLock lock(callback_lock_);
+ if (audio_bus_ && callback_) {
+ audio_source_->OnMoreData(base::TimeDelta(), ideal_time, 0,
+ audio_bus_.get());
+ callback_->OnData(audio_bus_.get(), ideal_time, 1.0);
+ }
+ }
}
using AudioSourceCallback = AudioOutputStream::AudioSourceCallback;
std::unique_ptr<AudioSourceCallback> FakeAudioInputStream::ChooseSource() {
- DCHECK(audio_manager_->GetWorkerTaskRunner()->BelongsToCurrentThread());
+ DCHECK(capture_thread_->task_runner()->BelongsToCurrentThread());
if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseFileForFakeAudioCapture)) {
diff --git a/chromium/media/audio/fake_audio_input_stream.h b/chromium/media/audio/fake_audio_input_stream.h
index 29f74dfed22..b76f12fdde5 100644
--- a/chromium/media/audio/fake_audio_input_stream.h
+++ b/chromium/media/audio/fake_audio_input_stream.h
@@ -8,10 +8,13 @@
#define MEDIA_AUDIO_FAKE_AUDIO_INPUT_STREAM_H_
#include <memory>
+#include <string>
#include <vector>
#include "base/callback_forward.h"
#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/threading/thread.h"
#include "media/audio/audio_io.h"
#include "media/base/audio_parameters.h"
#include "media/base/fake_audio_worker.h"
@@ -68,12 +71,18 @@ class MEDIA_EXPORT FakeAudioInputStream
void ReadAudioFromSource(base::TimeTicks ideal_time, base::TimeTicks now);
AudioManagerBase* audio_manager_;
- AudioInputCallback* callback_;
- FakeAudioWorker fake_audio_worker_;
+ // |callback_| needs the lock as ReadAudioFromSource reads callback_
+ // on the capture thread, while callback_ is set on the audio thread.
+ base::Lock callback_lock_;
+ AudioInputCallback* callback_ GUARDED_BY(callback_lock_);
AudioParameters params_;
+ std::unique_ptr<FakeAudioWorker> fake_audio_worker_;
std::unique_ptr<AudioOutputStream::AudioSourceCallback> audio_source_;
std::unique_ptr<media::AudioBus> audio_bus_;
+ // We will delete the capture thread on the AudioManager worker task runner
+ // since the audio thread is the main UI thread on Mac.
+ std::unique_ptr<base::Thread, base::OnTaskRunnerDeleter> capture_thread_;
DISALLOW_COPY_AND_ASSIGN(FakeAudioInputStream);
};
diff --git a/chromium/media/audio/fuchsia/OWNERS b/chromium/media/audio/fuchsia/OWNERS
index e7034eabb1e..c1b584511a6 100644
--- a/chromium/media/audio/fuchsia/OWNERS
+++ b/chromium/media/audio/fuchsia/OWNERS
@@ -1 +1,4 @@
file://build/fuchsia/OWNERS
+# COMPONENT: Fuchsia
+# OS: Fuchsia
+# TEAM: cr-fuchsia@chromium.org
diff --git a/chromium/media/audio/mac/audio_auhal_mac_unittest.cc b/chromium/media/audio/mac/audio_auhal_mac_unittest.cc
index 03eb7544f46..281b9415219 100644
--- a/chromium/media/audio/mac/audio_auhal_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_auhal_mac_unittest.cc
@@ -4,7 +4,7 @@
#include "base/bind.h"
#include "base/macros.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_pump_type.h"
#include "base/run_loop.h"
#include "base/synchronization/waitable_event.h"
#include "base/test/test_message_loop.h"
@@ -39,7 +39,7 @@ ACTION_P3(MaybeSignalEvent, counter, signal_at_count, event) {
class AUHALStreamTest : public testing::Test {
public:
AUHALStreamTest()
- : message_loop_(base::MessageLoop::TYPE_UI),
+ : message_loop_(base::MessagePumpType::UI),
manager_(AudioManager::CreateForTesting(
std::make_unique<TestAudioThread>())),
manager_device_info_(manager_.get()) {
diff --git a/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc b/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
index b222c5932a0..245a2cd4714 100644
--- a/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
@@ -13,7 +13,7 @@
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/bind_to_current_loop.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -25,7 +25,7 @@ class AudioDeviceListenerMacTest : public testing::Test {
AudioDeviceListenerMacTest() {
// It's important to create the device listener from the message loop in
// order to ensure we don't end up with unbalanced TaskObserver calls.
- scoped_task_environment_.GetMainThreadTaskRunner()->PostTask(
+ task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&AudioDeviceListenerMacTest::CreateDeviceListener,
base::Unretained(this)));
@@ -35,7 +35,7 @@ class AudioDeviceListenerMacTest : public testing::Test {
virtual ~AudioDeviceListenerMacTest() {
// It's important to destroy the device listener from the message loop in
// order to ensure we don't end up with unbalanced TaskObserver calls.
- scoped_task_environment_.GetMainThreadTaskRunner()->PostTask(
+ task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(&AudioDeviceListenerMacTest::DestroyDeviceListener,
base::Unretained(this)));
@@ -98,7 +98,7 @@ class AudioDeviceListenerMacTest : public testing::Test {
MOCK_METHOD0(OnDeviceChange, void());
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
std::unique_ptr<AudioDeviceListenerMac> device_listener_;
DISALLOW_COPY_AND_ASSIGN(AudioDeviceListenerMacTest);
diff --git a/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc b/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
index 6f46b9f2358..cb47dae8204 100644
--- a/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
@@ -11,7 +11,7 @@
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/platform_thread.h"
#include "media/audio/audio_device_description.h"
@@ -113,8 +113,8 @@ class WriteToFileAudioSink : public AudioInputStream::AudioInputCallback {
class MacAudioInputTest : public testing::Test {
protected:
MacAudioInputTest()
- : scoped_task_environment_(
- base::test::ScopedTaskEnvironment::MainThreadType::UI),
+ : task_environment_(
+ base::test::SingleThreadTaskEnvironment::MainThreadType::UI),
audio_manager_(AudioManager::CreateForTesting(
std::make_unique<TestAudioThread>())) {
// Wait for the AudioManager to finish any initialization on the audio loop.
@@ -159,7 +159,7 @@ class MacAudioInputTest : public testing::Test {
void OnLogMessage(const std::string& message) { log_message_ = message; }
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
std::unique_ptr<AudioManager> audio_manager_;
std::string log_message_;
};
@@ -219,7 +219,7 @@ TEST_F(MacAudioInputTest, AUAudioInputStreamVerifyMonoRecording) {
EXPECT_CALL(sink, OnData(NotNull(), _, _))
.Times(AtLeast(10))
.WillRepeatedly(CheckCountAndPostQuitTask(
- &count, 10, scoped_task_environment_.GetMainThreadTaskRunner(),
+ &count, 10, task_environment_.GetMainThreadTaskRunner(),
run_loop.QuitClosure()));
ais->Start(&sink);
run_loop.Run();
@@ -255,7 +255,7 @@ TEST_F(MacAudioInputTest, AUAudioInputStreamVerifyStereoRecording) {
EXPECT_CALL(sink, OnData(NotNull(), _, _))
.Times(AtLeast(10))
.WillRepeatedly(CheckCountAndPostQuitTask(
- &count, 10, scoped_task_environment_.GetMainThreadTaskRunner(),
+ &count, 10, task_environment_.GetMainThreadTaskRunner(),
run_loop.QuitClosure()));
ais->Start(&sink);
run_loop.Run();
diff --git a/chromium/media/audio/mac/coreaudio_dispatch_override.cc b/chromium/media/audio/mac/coreaudio_dispatch_override.cc
index 2e555e16e4d..68a9618819c 100644
--- a/chromium/media/audio/mac/coreaudio_dispatch_override.cc
+++ b/chromium/media/audio/mac/coreaudio_dispatch_override.cc
@@ -97,14 +97,23 @@ bool AddressIsPauseOrResume(intptr_t address) {
DCHECK_EQ(strcmp(info.dli_fname, kCoreAudioPath), 0);
+ // Before Mac OSX 10.10, this code is not applied because dyld is not
+ // available.
+ // From Mac OSX 10.10 to 10.15 (excluded) the target functions that trigger
+ // the interposition are HALC_IOContext_ResumeIO and HALC_IOContext_PauseIO
+ // for respectively resume and pause.
+ // With MacOSX 10.15 the target functions have changed to _XIOContext_ResumeIO
+ // and _XIOContext_PauseIO for respectively resume and pause.
if (!resumeio_callsite && info.dli_sname &&
- strcmp(info.dli_sname, "HALC_IOContext_ResumeIO") == 0) {
+ (strcmp(info.dli_sname, "HALC_IOContext_ResumeIO") == 0 ||
+ strcmp(info.dli_sname, "_XIOContext_ResumeIO") == 0)) {
resumeio_callsite = address;
base::subtle::NoBarrier_CompareAndSwap(&g_resumeio_callsite, 0,
resumeio_callsite);
LogCallsiteLookupEvent(LOOKUP_RESUMEIO_CALLSITE_FOUND);
} else if (!pauseio_callsite && info.dli_sname &&
- strcmp(info.dli_sname, "HALC_IOContext_PauseIO") == 0) {
+ (strcmp(info.dli_sname, "HALC_IOContext_PauseIO") == 0 ||
+ strcmp(info.dli_sname, "_XIOContext_PauseIO") == 0)) {
pauseio_callsite = address;
base::subtle::NoBarrier_CompareAndSwap(&g_pauseio_callsite, 0,
pauseio_callsite);
diff --git a/chromium/media/audio/power_observer_helper_unittest.cc b/chromium/media/audio/power_observer_helper_unittest.cc
index 5a1f0b41c55..0d68d4ea207 100644
--- a/chromium/media/audio/power_observer_helper_unittest.cc
+++ b/chromium/media/audio/power_observer_helper_unittest.cc
@@ -6,7 +6,7 @@
#include "base/bind.h"
#include "base/synchronization/waitable_event.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "media/audio/power_observer_helper.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -102,7 +102,7 @@ class PowerObserverHelperTest : public testing::Test {
}
// The test task environment.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
// The thread the helper is run on.
base::Thread power_observer_helper_thread_;
diff --git a/chromium/media/audio/pulse/pulse_util.cc b/chromium/media/audio/pulse/pulse_util.cc
index 97debbdbc07..66d7149bfec 100644
--- a/chromium/media/audio/pulse/pulse_util.cc
+++ b/chromium/media/audio/pulse/pulse_util.cc
@@ -13,6 +13,7 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
+#include "build/branding_buildflags.h"
#include "media/audio/audio_device_description.h"
#include "media/base/audio_timestamp_helper.h"
@@ -30,7 +31,7 @@ namespace pulse {
namespace {
-#if defined(GOOGLE_CHROME_BUILD)
+#if BUILDFLAG(GOOGLE_CHROME_BRANDING)
static const char kBrowserDisplayName[] = "google-chrome";
#else
static const char kBrowserDisplayName[] = "chromium-browser";
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.cc b/chromium/media/audio/win/audio_low_latency_input_win.cc
index bacb183b324..8fc6929e4be 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win.cc
@@ -32,6 +32,8 @@ namespace media {
namespace {
+constexpr uint32_t KSAUDIO_SPEAKER_UNSUPPORTED = 0;
+
// Errors when initializing the audio client related to the audio format. Split
// by whether we're using format conversion or not. Used for reporting stats -
// do not renumber entries.
@@ -67,6 +69,25 @@ bool IsSupportedFormatForConversion(WAVEFORMATEXTENSIBLE* format_ex) {
return true;
}
+// Converts ChannelLayout to Microsoft's channel configuration but only discrete
+// and up to stereo is supported currently. All other multi-channel layouts
+// return KSAUDIO_SPEAKER_UNSUPPORTED.
+ChannelConfig ChannelLayoutToChannelConfig(ChannelLayout layout) {
+ switch (layout) {
+ case CHANNEL_LAYOUT_DISCRETE:
+ return KSAUDIO_SPEAKER_DIRECTOUT;
+ case CHANNEL_LAYOUT_MONO:
+ return KSAUDIO_SPEAKER_MONO;
+ case CHANNEL_LAYOUT_STEREO:
+ return KSAUDIO_SPEAKER_STEREO;
+ default:
+ LOG(WARNING) << "Unsupported channel layout: " << layout;
+ // KSAUDIO_SPEAKER_UNSUPPORTED equals 0 and corresponds to "no specific
+ // channel order".
+ return KSAUDIO_SPEAKER_UNSUPPORTED;
+ }
+}
+
} // namespace
WASAPIAudioInputStream::WASAPIAudioInputStream(
@@ -78,6 +99,10 @@ WASAPIAudioInputStream::WASAPIAudioInputStream(
DCHECK(manager_);
DCHECK(!device_id_.empty());
DCHECK(!log_callback_.is_null());
+ DCHECK_LE(params.channels(), 2);
+ DCHECK(params.channel_layout() == CHANNEL_LAYOUT_MONO ||
+ params.channel_layout() == CHANNEL_LAYOUT_STEREO ||
+ params.channel_layout() == CHANNEL_LAYOUT_DISCRETE);
// Load the Avrt DLL if not already loaded. Required to support MMCSS.
bool avrt_init = avrt::Initialize();
@@ -106,7 +131,7 @@ WASAPIAudioInputStream::WASAPIAudioInputStream(
format->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
input_format_.Samples.wValidBitsPerSample = format->wBitsPerSample;
input_format_.dwChannelMask =
- CoreAudioUtil::GetChannelConfig(device_id, eCapture);
+ ChannelLayoutToChannelConfig(params.channel_layout());
input_format_.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
DVLOG(1) << "Input: " << CoreAudioUtil::WaveFormatToString(&input_format_);
@@ -676,7 +701,7 @@ HRESULT WASAPIAudioInputStream::GetAudioEngineStreamFormat() {
hr = audio_client_->GetMixFormat(&format);
if (FAILED(hr))
return hr;
- DVLOG(2) << CoreAudioUtil::WaveFormatToString(format.get());
+ DVLOG(1) << CoreAudioUtil::WaveFormatToString(format.get());
#endif
return hr;
}
@@ -803,6 +828,8 @@ HRESULT WASAPIAudioInputStream::InitializeAudioEngine() {
// however cases when there are glitches anyway and it's avoided by setting a
// larger buffer size. The larger size does not create higher latency for
// properly implemented drivers.
+ DVLOG(1) << "Audio format used in IAudioClient::Initialize: "
+ << CoreAudioUtil::WaveFormatToString(&input_format_);
HRESULT hr = audio_client_->Initialize(
AUDCLNT_SHAREMODE_SHARED, flags,
100 * 1000 * 10, // Buffer duration, 100 ms expressed in 100-ns units.
diff --git a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
index 3f8b8fcadf2..81cf90abdab 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
@@ -4,10 +4,10 @@
#include "media/audio/win/audio_low_latency_input_win.h"
+#include <windows.h>
#include <mmsystem.h>
#include <stddef.h>
#include <stdint.h>
-#include <windows.h>
#include <memory>
@@ -19,7 +19,7 @@
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/stringprintf.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/win/scoped_com_initializer.h"
#include "media/audio/audio_device_description.h"
@@ -268,7 +268,7 @@ class WinAudioInputTest : public ::testing::Test {
~WinAudioInputTest() override { audio_manager_->Shutdown(); }
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
std::unique_ptr<AudioManager> audio_manager_;
};
@@ -414,7 +414,7 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
EXPECT_CALL(sink, OnData(NotNull(), _, _))
.Times(AtLeast(10))
.WillRepeatedly(CheckCountAndPostQuitTask(
- &count, 10, scoped_task_environment_.GetMainThreadTaskRunner(),
+ &count, 10, task_environment_.GetMainThreadTaskRunner(),
run_loop.QuitWhenIdleClosure()));
ais->Start(&sink);
run_loop.Run();
@@ -439,7 +439,7 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
EXPECT_CALL(sink, OnData(NotNull(), _, _))
.Times(AtLeast(10))
.WillRepeatedly(CheckCountAndPostQuitTask(
- &count, 10, scoped_task_environment_.GetMainThreadTaskRunner(),
+ &count, 10, task_environment_.GetMainThreadTaskRunner(),
run_loop.QuitWhenIdleClosure()));
ais->Start(&sink);
run_loop.Run();
@@ -460,7 +460,7 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamTestPacketSizes) {
EXPECT_CALL(sink, OnData(NotNull(), _, _))
.Times(AtLeast(10))
.WillRepeatedly(CheckCountAndPostQuitTask(
- &count, 10, scoped_task_environment_.GetMainThreadTaskRunner(),
+ &count, 10, task_environment_.GetMainThreadTaskRunner(),
run_loop.QuitWhenIdleClosure()));
ais->Start(&sink);
run_loop.Run();
diff --git a/chromium/media/audio/win/audio_output_win_unittest.cc b/chromium/media/audio/win/audio_output_win_unittest.cc
index 07c797390b0..e4d6c633f29 100644
--- a/chromium/media/audio/win/audio_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_output_win_unittest.cc
@@ -14,7 +14,7 @@
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
#include "base/sync_socket.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "base/win/scoped_com_initializer.h"
#include "media/audio/audio_device_info_accessor_for_tests.h"
@@ -164,7 +164,7 @@ class WinAudioTest : public ::testing::Test {
~WinAudioTest() override { audio_manager_->Shutdown(); }
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
std::unique_ptr<AudioManager> audio_manager_;
std::unique_ptr<AudioDeviceInfoAccessorForTests> audio_manager_device_info_;
};
diff --git a/chromium/media/audio/win/core_audio_util_win.cc b/chromium/media/audio/win/core_audio_util_win.cc
index 9062a3646cc..289b4042a20 100644
--- a/chromium/media/audio/win/core_audio_util_win.cc
+++ b/chromium/media/audio/win/core_audio_util_win.cc
@@ -41,7 +41,7 @@ const GUID kCommunicationsSessionId = {
namespace {
-enum { KSAUDIO_SPEAKER_UNSUPPORTED = 0 };
+constexpr uint32_t KSAUDIO_SPEAKER_UNSUPPORTED = 0xFFFFFFFF;
// Used for mapping UMA histograms with corresponding source of logging.
enum class UmaLogStep {
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index f7fea81c7db..849abc81b7c 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -28,6 +28,10 @@ jumbo_source_set("base") {
visibility += [ "//media/base/mac" ]
}
+ if (is_android) {
+ visibility += [ "//gpu/ipc/common:android_texture_owner_unittests" ]
+ }
+
sources = [
"android_overlay_config.cc",
"android_overlay_config.h",
@@ -187,6 +191,7 @@ jumbo_source_set("base") {
"media_types.h",
"media_url_demuxer.cc",
"media_url_demuxer.h",
+ "media_url_params.cc",
"media_url_params.h",
"media_util.cc",
"media_util.h",
@@ -220,6 +225,7 @@ jumbo_source_set("base") {
"reentrancy_checker.h",
"renderer.cc",
"renderer.h",
+ "renderer_client.cc",
"renderer_client.h",
"renderer_factory.cc",
"renderer_factory.h",
@@ -238,6 +244,8 @@ jumbo_source_set("base") {
"silent_sink_suspender.h",
"simple_sync_token_client.cc",
"simple_sync_token_client.h",
+ "simple_watch_timer.cc",
+ "simple_watch_timer.h",
"sinc_resampler.cc",
"sinc_resampler.h",
"stream_parser.cc",
@@ -383,7 +391,7 @@ jumbo_source_set("base") {
public_deps += [ "//media/base/win:d3d11" ]
}
- if (is_chromecast) {
+ if (is_chromecast || is_fuchsia) {
sources += [ "demuxer_memory_limit_low.cc" ]
} else if (is_android) {
sources += [ "demuxer_memory_limit_android.cc" ]
diff --git a/chromium/media/base/android/android_cdm_factory.cc b/chromium/media/base/android/android_cdm_factory.cc
index f671470195f..4844d1316eb 100644
--- a/chromium/media/base/android/android_cdm_factory.cc
+++ b/chromium/media/base/android/android_cdm_factory.cc
@@ -31,8 +31,7 @@ void ReportMediaDrmBridgeKeySystemSupport(bool supported) {
AndroidCdmFactory::AndroidCdmFactory(const CreateFetcherCB& create_fetcher_cb,
const CreateStorageCB& create_storage_cb)
: create_fetcher_cb_(create_fetcher_cb),
- create_storage_cb_(create_storage_cb),
- weak_factory_(this) {}
+ create_storage_cb_(create_storage_cb) {}
AndroidCdmFactory::~AndroidCdmFactory() {
weak_factory_.InvalidateWeakPtrs();
diff --git a/chromium/media/base/android/android_cdm_factory.h b/chromium/media/base/android/android_cdm_factory.h
index 854b6d9bba1..1ca1c77943f 100644
--- a/chromium/media/base/android/android_cdm_factory.h
+++ b/chromium/media/base/android/android_cdm_factory.h
@@ -53,7 +53,7 @@ class MEDIA_EXPORT AndroidCdmFactory : public CdmFactory {
std::pair<std::unique_ptr<MediaDrmBridgeFactory>, CdmCreatedCB>;
base::flat_map<uint32_t, PendingCreation> pending_creations_;
- base::WeakPtrFactory<AndroidCdmFactory> weak_factory_;
+ base::WeakPtrFactory<AndroidCdmFactory> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(AndroidCdmFactory);
};
diff --git a/chromium/media/base/android/android_overlay.cc b/chromium/media/base/android/android_overlay.cc
index fb05573b8d5..22a02a23f97 100644
--- a/chromium/media/base/android/android_overlay.cc
+++ b/chromium/media/base/android/android_overlay.cc
@@ -6,7 +6,7 @@
namespace media {
-AndroidOverlay::AndroidOverlay() : weak_factory_(this) {}
+AndroidOverlay::AndroidOverlay() {}
AndroidOverlay::~AndroidOverlay() {
// Don't permit any other callbacks once we start sending deletion cbs.
weak_factory_.InvalidateWeakPtrs();
diff --git a/chromium/media/base/android/android_overlay.h b/chromium/media/base/android/android_overlay.h
index 22ac77920b7..3f8998e651c 100644
--- a/chromium/media/base/android/android_overlay.h
+++ b/chromium/media/base/android/android_overlay.h
@@ -72,7 +72,7 @@ class MEDIA_EXPORT AndroidOverlay {
std::list<AndroidOverlayConfig::DestroyedCB> destruction_cbs_;
std::list<AndroidOverlayConfig::DeletedCB> deletion_cbs_;
- base::WeakPtrFactory<AndroidOverlay> weak_factory_;
+ base::WeakPtrFactory<AndroidOverlay> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(AndroidOverlay);
};
diff --git a/chromium/media/base/android/media_codec_loop.cc b/chromium/media/base/android/media_codec_loop.cc
index 1505cc43d0a..86f3884aa81 100644
--- a/chromium/media/base/android/media_codec_loop.cc
+++ b/chromium/media/base/android/media_codec_loop.cc
@@ -47,8 +47,7 @@ MediaCodecLoop::MediaCodecLoop(
media_codec_(std::move(media_codec)),
pending_input_buf_index_(kInvalidBufferIndex),
sdk_int_(sdk_int),
- disable_timer_(disable_timer),
- weak_factory_(this) {
+ disable_timer_(disable_timer) {
if (timer_task_runner)
io_timer_.SetTaskRunner(timer_task_runner);
// TODO(liberato): should this DCHECK?
diff --git a/chromium/media/base/android/media_codec_loop.h b/chromium/media/base/android/media_codec_loop.h
index cc400c8d272..a76226941d6 100644
--- a/chromium/media/base/android/media_codec_loop.h
+++ b/chromium/media/base/android/media_codec_loop.h
@@ -334,7 +334,7 @@ class MEDIA_EXPORT MediaCodecLoop {
const bool disable_timer_;
// NOTE: Weak pointers must be invalidated before all other member variables.
- base::WeakPtrFactory<MediaCodecLoop> weak_factory_;
+ base::WeakPtrFactory<MediaCodecLoop> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MediaCodecLoop);
};
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index 199df71ada1..d90fc4f7f2e 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -871,8 +871,7 @@ MediaDrmBridge::MediaDrmBridge(
session_keys_change_cb_(session_keys_change_cb),
session_expiration_update_cb_(session_expiration_update_cb),
task_runner_(base::ThreadTaskRunnerHandle::Get()),
- media_crypto_context_(this),
- weak_factory_(this) {
+ media_crypto_context_(this) {
DVLOG(1) << __func__;
DCHECK(storage_);
diff --git a/chromium/media/base/android/media_drm_bridge.h b/chromium/media/base/android/media_drm_bridge.h
index 79c12136cda..dc0829626ca 100644
--- a/chromium/media/base/android/media_drm_bridge.h
+++ b/chromium/media/base/android/media_drm_bridge.h
@@ -353,7 +353,7 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
MediaCryptoContextImpl media_crypto_context_;
// NOTE: Weak pointers must be invalidated before all other member variables.
- base::WeakPtrFactory<MediaDrmBridge> weak_factory_;
+ base::WeakPtrFactory<MediaDrmBridge> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MediaDrmBridge);
};
diff --git a/chromium/media/base/android/media_drm_bridge_factory.cc b/chromium/media/base/android/media_drm_bridge_factory.cc
index 5899aef9220..dc32b11d8ab 100644
--- a/chromium/media/base/android/media_drm_bridge_factory.cc
+++ b/chromium/media/base/android/media_drm_bridge_factory.cc
@@ -17,8 +17,7 @@ MediaDrmBridgeFactory::MediaDrmBridgeFactory(
const CreateFetcherCB& create_fetcher_cb,
const CreateStorageCB& create_storage_cb)
: create_fetcher_cb_(create_fetcher_cb),
- create_storage_cb_(create_storage_cb),
- weak_factory_(this) {
+ create_storage_cb_(create_storage_cb) {
DCHECK(create_fetcher_cb_);
DCHECK(create_storage_cb_);
}
diff --git a/chromium/media/base/android/media_drm_bridge_factory.h b/chromium/media/base/android/media_drm_bridge_factory.h
index 0d4eb37c8e2..088cbe67ad2 100644
--- a/chromium/media/base/android/media_drm_bridge_factory.h
+++ b/chromium/media/base/android/media_drm_bridge_factory.h
@@ -72,7 +72,7 @@ class MEDIA_EXPORT MediaDrmBridgeFactory : public CdmFactory {
std::unique_ptr<MediaDrmStorageBridge> storage_;
scoped_refptr<MediaDrmBridge> media_drm_bridge_;
- base::WeakPtrFactory<MediaDrmBridgeFactory> weak_factory_;
+ base::WeakPtrFactory<MediaDrmBridgeFactory> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MediaDrmBridgeFactory);
};
diff --git a/chromium/media/base/android/media_drm_bridge_unittest.cc b/chromium/media/base/android/media_drm_bridge_unittest.cc
index 1083e5e094e..ec6bef533aa 100644
--- a/chromium/media/base/android/media_drm_bridge_unittest.cc
+++ b/chromium/media/base/android/media_drm_bridge_unittest.cc
@@ -10,7 +10,7 @@
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/android/media_drm_bridge.h"
#include "media/base/provision_fetcher.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -116,7 +116,7 @@ class MediaDrmBridgeTest : public ProvisionFetcher, public testing::Test {
return std::make_unique<ProvisionFetcherWrapper>(this);
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
};
TEST_F(MediaDrmBridgeTest, IsKeySystemSupported_Widevine) {
diff --git a/chromium/media/base/android/media_drm_storage_bridge.cc b/chromium/media/base/android/media_drm_storage_bridge.cc
index 8942c677a64..23c0bfffb7c 100644
--- a/chromium/media/base/android/media_drm_storage_bridge.cc
+++ b/chromium/media/base/android/media_drm_storage_bridge.cc
@@ -34,7 +34,7 @@ using base::android::ToJavaByteArray;
namespace media {
MediaDrmStorageBridge::MediaDrmStorageBridge()
- : task_runner_(base::ThreadTaskRunnerHandle::Get()), weak_factory_(this) {}
+ : task_runner_(base::ThreadTaskRunnerHandle::Get()) {}
MediaDrmStorageBridge::~MediaDrmStorageBridge() = default;
diff --git a/chromium/media/base/android/media_drm_storage_bridge.h b/chromium/media/base/android/media_drm_storage_bridge.h
index 4610bee96bf..8e6dd2bcd61 100644
--- a/chromium/media/base/android/media_drm_storage_bridge.h
+++ b/chromium/media/base/android/media_drm_storage_bridge.h
@@ -89,7 +89,7 @@ class MediaDrmStorageBridge {
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
- base::WeakPtrFactory<MediaDrmStorageBridge> weak_factory_;
+ base::WeakPtrFactory<MediaDrmStorageBridge> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MediaDrmStorageBridge);
};
diff --git a/chromium/media/base/android/media_player_bridge.cc b/chromium/media/base/android/media_player_bridge.cc
index 6037d2ccc47..85d0cf8c9d5 100644
--- a/chromium/media/base/android/media_player_bridge.cc
+++ b/chromium/media/base/android/media_player_bridge.cc
@@ -38,19 +38,45 @@ enum UMAExitStatus {
const double kDefaultVolume = 1.0;
+const char kWatchTimeHistogram[] = "Media.Android.MediaPlayerWatchTime";
+
+// These values are persisted to logs. Entries should not be renumbered and
+// numeric values should never be reused.
+enum class WatchTimeType {
+ kNonHls = 0,
+ kHlsAudioOnly = 1,
+ kHlsVideo = 2,
+ kMaxValue = kHlsVideo,
+};
+
+void RecordWatchTimeUMA(bool is_hls, bool has_video) {
+ WatchTimeType type = WatchTimeType::kNonHls;
+ if (is_hls) {
+ if (!has_video) {
+ type = WatchTimeType::kHlsAudioOnly;
+ } else {
+ type = WatchTimeType::kHlsVideo;
+ }
+ }
+ UMA_HISTOGRAM_ENUMERATION(kWatchTimeHistogram, type);
+}
+
} // namespace
MediaPlayerBridge::MediaPlayerBridge(const GURL& url,
const GURL& site_for_cookies,
+ const url::Origin& top_frame_origin,
const std::string& user_agent,
bool hide_url_log,
Client* client,
- bool allow_credentials)
+ bool allow_credentials,
+ bool is_hls)
: prepared_(false),
pending_play_(false),
should_seek_on_prepare_(false),
url_(url),
site_for_cookies_(site_for_cookies),
+ top_frame_origin_(top_frame_origin),
user_agent_(user_agent),
hide_url_log_(hide_url_log),
width_(0),
@@ -62,8 +88,12 @@ MediaPlayerBridge::MediaPlayerBridge(const GURL& url,
is_active_(false),
has_error_(false),
has_ever_started_(false),
- client_(client),
- weak_factory_(this) {
+ is_hls_(is_hls),
+ watch_timer_(base::BindRepeating(&MediaPlayerBridge::OnWatchTimerTick,
+ base::Unretained(this)),
+ base::BindRepeating(&MediaPlayerBridge::GetCurrentTime,
+ base::Unretained(this))),
+ client_(client) {
listener_ = std::make_unique<MediaPlayerListener>(
base::ThreadTaskRunnerHandle::Get(), weak_factory_.GetWeakPtr());
}
@@ -95,7 +125,7 @@ void MediaPlayerBridge::Initialize() {
client_->GetMediaResourceGetter();
resource_getter->GetCookies(
- url_, site_for_cookies_,
+ url_, site_for_cookies_, top_frame_origin_,
base::BindOnce(&MediaPlayerBridge::OnCookiesRetrieved,
weak_factory_.GetWeakPtr()));
}
@@ -324,6 +354,7 @@ base::TimeDelta MediaPlayerBridge::GetDuration() {
}
void MediaPlayerBridge::Release() {
+ watch_timer_.Stop();
is_active_ = false;
if (j_media_player_bridge_.is_null())
@@ -446,9 +477,11 @@ void MediaPlayerBridge::UpdateAllowedOperations() {
void MediaPlayerBridge::StartInternal() {
JNIEnv* env = base::android::AttachCurrentThread();
Java_MediaPlayerBridge_start(env, j_media_player_bridge_);
+ watch_timer_.Start();
}
void MediaPlayerBridge::PauseInternal() {
+ watch_timer_.Stop();
JNIEnv* env = base::android::AttachCurrentThread();
Java_MediaPlayerBridge_pause(env, j_media_player_bridge_);
}
@@ -474,6 +507,10 @@ void MediaPlayerBridge::SeekInternal(base::TimeDelta time) {
return;
}
+ // Note: we do not want to count changes in media time due to seeks as watch
+ // time, but tracking pending seeks is not completely trivial. Instead seeks
+ // larger than kWatchTimeReportingInterval * 2 will be discarded by the sanity
+ // checks and shorter seeks will be counted.
JNIEnv* env = base::android::AttachCurrentThread();
CHECK(env);
int time_msec = static_cast<int>(time.InMilliseconds());
@@ -488,4 +525,8 @@ GURL MediaPlayerBridge::GetSiteForCookies() {
return site_for_cookies_;
}
+void MediaPlayerBridge::OnWatchTimerTick() {
+ RecordWatchTimeUMA(is_hls_, height_ > 0);
+}
+
} // namespace media
diff --git a/chromium/media/base/android/media_player_bridge.h b/chromium/media/base/android/media_player_bridge.h
index dfe106647a2..8de59c2c59a 100644
--- a/chromium/media/base/android/media_player_bridge.h
+++ b/chromium/media/base/android/media_player_bridge.h
@@ -21,8 +21,10 @@
#include "base/timer/timer.h"
#include "media/base/android/media_player_listener.h"
#include "media/base/media_export.h"
+#include "media/base/simple_watch_timer.h"
#include "ui/gl/android/scoped_java_surface.h"
#include "url/gurl.h"
+#include "url/origin.h"
namespace media {
@@ -77,10 +79,12 @@ class MEDIA_EXPORT MediaPlayerBridge {
// the |manager| when needed.
MediaPlayerBridge(const GURL& url,
const GURL& site_for_cookies,
+ const url::Origin& top_frame_origin,
const std::string& user_agent,
bool hide_url_log,
Client* client,
- bool allow_credentials);
+ bool allow_credentials,
+ bool is_hls);
virtual ~MediaPlayerBridge();
// Initialize this object and extract the metadata from the media.
@@ -188,6 +192,8 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Sets the underlying MediaPlayer's volume.
void UpdateVolumeInternal();
+ void OnWatchTimerTick();
+
base::WeakPtr<MediaPlayerBridge> WeakPtrForUIThread();
// Whether the player is prepared for playback.
@@ -205,9 +211,12 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Url for playback.
GURL url_;
- // First party url for cookies.
+ // Used to determine if cookies are accessed in a third-party context.
GURL site_for_cookies_;
+ // Used to check for cookie content settings.
+ url::Origin top_frame_origin_;
+
// User agent string to be used for media player.
const std::string user_agent_;
@@ -248,6 +257,10 @@ class MEDIA_EXPORT MediaPlayerBridge {
// The flag is set if Start() has been called at least once.
bool has_ever_started_;
+ // State for watch time reporting.
+ bool is_hls_;
+ SimpleWatchTimer watch_timer_;
+
// A reference to the owner of |this|.
Client* client_;
@@ -256,7 +269,7 @@ class MEDIA_EXPORT MediaPlayerBridge {
// Weak pointer passed to |listener_| for callbacks.
// NOTE: Weak pointers must be invalidated before all other member variables.
- base::WeakPtrFactory<MediaPlayerBridge> weak_factory_;
+ base::WeakPtrFactory<MediaPlayerBridge> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MediaPlayerBridge);
};
diff --git a/chromium/media/base/android/media_player_bridge_unittest.cc b/chromium/media/base/android/media_player_bridge_unittest.cc
index a34bfa8fad6..e232fd1d546 100644
--- a/chromium/media/base/android/media_player_bridge_unittest.cc
+++ b/chromium/media/base/android/media_player_bridge_unittest.cc
@@ -3,9 +3,10 @@
// found in the LICENSE file.
#include "media/base/android/media_player_bridge.h"
+
#include "base/bind.h"
#include "base/macros.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -31,7 +32,14 @@ class MockMediaPlayerBridgeClient : public MediaPlayerBridge::Client {
class MediaPlayerBridgeTest : public testing::Test {
public:
MediaPlayerBridgeTest()
- : bridge_(GURL(), GURL(), "", false, &client_, false) {}
+ : bridge_(GURL(),
+ GURL(),
+ url::Origin(),
+ "",
+ false,
+ &client_,
+ false,
+ false) {}
protected:
void SimulateDurationChange(base::TimeDelta duration) {
@@ -46,7 +54,7 @@ class MediaPlayerBridgeTest : public testing::Test {
void SimulatePlaybackCompleted() { bridge_.OnPlaybackComplete(); }
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<MockMediaPlayerBridgeClient> client_;
MediaPlayerBridge bridge_;
diff --git a/chromium/media/base/android/media_resource_getter.h b/chromium/media/base/android/media_resource_getter.h
index 200234bf154..62b9b782bf8 100644
--- a/chromium/media/base/android/media_resource_getter.h
+++ b/chromium/media/base/android/media_resource_getter.h
@@ -15,6 +15,10 @@
#include "media/base/media_export.h"
#include "url/gurl.h"
+namespace url {
+class Origin;
+}
+
namespace media {
// Class for asynchronously retrieving resources for a media URL. All callbacks
@@ -44,6 +48,7 @@ class MEDIA_EXPORT MediaResourceGetter {
// Method for getting the cookies for a given URL.
virtual void GetCookies(const GURL& url,
const GURL& site_for_cookies,
+ const url::Origin& top_frame_origin,
GetCookieCB callback) = 0;
// Method for getting the platform path from a file system URL.
diff --git a/chromium/media/base/android/media_service_throttler_unittest.cc b/chromium/media/base/android/media_service_throttler_unittest.cc
index 5b6f5f94b54..6df147e09c1 100644
--- a/chromium/media/base/android/media_service_throttler_unittest.cc
+++ b/chromium/media/base/android/media_service_throttler_unittest.cc
@@ -4,8 +4,8 @@
#include "media/base/android/media_service_throttler.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "media/base/android/media_server_crash_listener.h"
#include "media/base/fake_single_thread_task_runner.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -60,7 +60,7 @@ class MediaServiceThrottlerTest : public testing::Test {
scoped_refptr<FakeSingleThreadTaskRunner> test_task_runner_;
// Necessary, or else base::ThreadTaskRunnerHandle::Get() fails.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
private:
DISALLOW_COPY_AND_ASSIGN(MediaServiceThrottlerTest);
diff --git a/chromium/media/base/android/mock_android_overlay.cc b/chromium/media/base/android/mock_android_overlay.cc
index e20e5eb852c..67d58540cbc 100644
--- a/chromium/media/base/android/mock_android_overlay.cc
+++ b/chromium/media/base/android/mock_android_overlay.cc
@@ -15,7 +15,7 @@ MockAndroidOverlay::Callbacks::Callbacks() = default;
MockAndroidOverlay::Callbacks::Callbacks(const Callbacks&) = default;
MockAndroidOverlay::Callbacks::~Callbacks() = default;
-MockAndroidOverlay::MockAndroidOverlay() : weak_factory_(this) {}
+MockAndroidOverlay::MockAndroidOverlay() {}
MockAndroidOverlay::~MockAndroidOverlay() {}
diff --git a/chromium/media/base/android/mock_android_overlay.h b/chromium/media/base/android/mock_android_overlay.h
index dde8503d492..121218db983 100644
--- a/chromium/media/base/android/mock_android_overlay.h
+++ b/chromium/media/base/android/mock_android_overlay.h
@@ -63,7 +63,7 @@ class MockAndroidOverlay : public testing::NiceMock<AndroidOverlay>,
// Initial configuration, mostly for callbacks.
std::unique_ptr<AndroidOverlayConfig> config_;
- base::WeakPtrFactory<MockAndroidOverlay> weak_factory_;
+ base::WeakPtrFactory<MockAndroidOverlay> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MockAndroidOverlay);
};
diff --git a/chromium/media/base/android/test_destruction_observable.cc b/chromium/media/base/android/test_destruction_observable.cc
index ee9a8174ff5..cf323942865 100644
--- a/chromium/media/base/android/test_destruction_observable.cc
+++ b/chromium/media/base/android/test_destruction_observable.cc
@@ -19,7 +19,7 @@ DestructionObservable::CreateDestructionObserver() {
}
DestructionObserver::DestructionObserver(DestructionObservable* observable)
- : destructed_(false), weak_factory_(this) {
+ : destructed_(false) {
// Only one observer is allowed.
DCHECK(!observable->destruction_cb.Release());
observable->destruction_cb.ReplaceClosure(
diff --git a/chromium/media/base/android/test_destruction_observable.h b/chromium/media/base/android/test_destruction_observable.h
index aec50cb3faf..c2c0e3fbd27 100644
--- a/chromium/media/base/android/test_destruction_observable.h
+++ b/chromium/media/base/android/test_destruction_observable.h
@@ -54,7 +54,7 @@ class DestructionObserver {
// Whether to expect destruction. Unset if there is no expectation.
base::Optional<bool> expect_destruction_;
- base::WeakPtrFactory<DestructionObserver> weak_factory_;
+ base::WeakPtrFactory<DestructionObserver> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(DestructionObserver);
};
diff --git a/chromium/media/base/audio_parameters.h b/chromium/media/base/audio_parameters.h
index c5f9f6bc32c..9d571a3c6e2 100644
--- a/chromium/media/base/audio_parameters.h
+++ b/chromium/media/base/audio_parameters.h
@@ -152,16 +152,17 @@ class MEDIA_SHMEM_EXPORT AudioParameters {
// effects should be enabled.
enum PlatformEffectsMask {
NO_EFFECTS = 0x0,
- ECHO_CANCELLER = 0x1,
- DUCKING = 0x2, // Enables ducking if the OS supports it.
- KEYBOARD_MIC = 0x4,
- HOTWORD = 0x8,
- NOISE_SUPPRESSION = 0x10,
- AUTOMATIC_GAIN_CONTROL = 0x20,
- EXPERIMENTAL_ECHO_CANCELLER = 0x40, // Indicates an echo canceller is
- // available that should only
- // experimentally be enabled.
- MULTIZONE = 0x80,
+ ECHO_CANCELLER = 1 << 0,
+ DUCKING = 1 << 1, // Enables ducking if the OS supports it.
+ KEYBOARD_MIC = 1 << 2,
+ HOTWORD = 1 << 3,
+ NOISE_SUPPRESSION = 1 << 4,
+ AUTOMATIC_GAIN_CONTROL = 1 << 5,
+ EXPERIMENTAL_ECHO_CANCELLER = 1 << 6, // Indicates an echo canceller is
+ // available that should only
+ // experimentally be enabled.
+ MULTIZONE = 1 << 7,
+ AUDIO_PREFETCH = 1 << 8,
};
struct HardwareCapabilities {
diff --git a/chromium/media/base/audio_renderer_mixer_input_unittest.cc b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
index e1606f9ec6a..e71aad698cc 100644
--- a/chromium/media/base/audio_renderer_mixer_input_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/audio_latency.h"
#include "media/base/audio_renderer_mixer.h"
#include "media/base/audio_renderer_mixer_input.h"
@@ -53,7 +53,7 @@ class AudioRendererMixerInputTest : public testing::Test,
mixer_input_ = new AudioRendererMixerInput(this, kRenderFrameId, device_id,
AudioLatency::LATENCY_PLAYBACK);
mixer_input_->GetOutputDeviceInfoAsync(base::DoNothing());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
AudioRendererMixer* GetMixer(int owner_id,
@@ -109,7 +109,7 @@ class AudioRendererMixerInputTest : public testing::Test,
protected:
~AudioRendererMixerInputTest() override = default;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
AudioParameters audio_parameters_;
std::unique_ptr<AudioRendererMixer> mixers_[2];
scoped_refptr<AudioRendererMixerInput> mixer_input_;
@@ -166,7 +166,7 @@ TEST_F(AudioRendererMixerInputTest, StartAfterStop) {
mixer_input_->Stop();
mixer_input_->GetOutputDeviceInfoAsync(base::DoNothing());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
mixer_input_->Start();
mixer_input_->Stop();
}
@@ -178,7 +178,7 @@ TEST_F(AudioRendererMixerInputTest, InitializeAfterStop) {
mixer_input_->Stop();
mixer_input_->GetOutputDeviceInfoAsync(base::DoNothing());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
mixer_input_->Initialize(audio_parameters_, fake_callback_.get());
mixer_input_->Stop();
}
@@ -363,7 +363,7 @@ TEST_F(AudioRendererMixerInputTest, SwitchOutputDeviceDuringGODIA) {
EXPECT_CALL(*this, OnDeviceInfoReceived(_))
.WillOnce(testing::SaveArg<0>(&info));
EXPECT_CALL(*this, SwitchCallbackCalled(OUTPUT_DEVICE_STATUS_OK));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(kExpectedStatus, info.device_status());
EXPECT_EQ(kDefaultDeviceId, info.device_id());
}
@@ -394,7 +394,7 @@ TEST_F(AudioRendererMixerInputTest, GODIADuringSwitchOutputDevice) {
constexpr auto kExpectedStatus = OUTPUT_DEVICE_STATUS_OK;
EXPECT_CALL(*this, OnDeviceInfoReceived(_))
.WillOnce(testing::SaveArg<0>(&info));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(kExpectedStatus, info.device_status());
EXPECT_EQ(kAnotherDeviceId, info.device_id());
}
@@ -426,7 +426,7 @@ TEST_F(AudioRendererMixerInputTest, GODIADuringSwitchOutputDeviceWhichFails) {
constexpr auto kExpectedStatus = OUTPUT_DEVICE_STATUS_OK;
EXPECT_CALL(*this, OnDeviceInfoReceived(_))
.WillOnce(testing::SaveArg<0>(&info));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(kExpectedStatus, info.device_status());
EXPECT_EQ(kDefaultDeviceId, info.device_id());
}
diff --git a/chromium/media/base/audio_renderer_mixer_unittest.cc b/chromium/media/base/audio_renderer_mixer_unittest.cc
index 6cb998e71e8..c39459e6035 100644
--- a/chromium/media/base/audio_renderer_mixer_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_unittest.cc
@@ -16,7 +16,7 @@
#include "base/bind_helpers.h"
#include "base/stl_util.h"
#include "base/synchronization/waitable_event.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/platform_thread.h"
#include "media/base/audio_renderer_mixer_input.h"
#include "media/base/audio_renderer_mixer_pool.h"
@@ -351,7 +351,7 @@ class AudioRendererMixerTest
protected:
virtual ~AudioRendererMixerTest() = default;
- base::test::ScopedTaskEnvironment task_env_;
+ base::test::TaskEnvironment task_env_;
scoped_refptr<MockAudioRendererSink> sink_;
std::unique_ptr<AudioRendererMixer> mixer_;
AudioRendererSink::RenderCallback* mixer_callback_;
diff --git a/chromium/media/base/bind_to_current_loop_unittest.cc b/chromium/media/base/bind_to_current_loop_unittest.cc
index 1a750bb4c90..83bbf24f558 100644
--- a/chromium/media/base/bind_to_current_loop_unittest.cc
+++ b/chromium/media/base/bind_to_current_loop_unittest.cc
@@ -11,7 +11,7 @@
#include "base/memory/free_deleter.h"
#include "base/run_loop.h"
#include "base/synchronization/waitable_event.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "base/threading/thread_checker_impl.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -61,7 +61,7 @@ void ClearReference(base::OnceClosure cb) {}
// on the message loop, not during the original Run.
class BindToCurrentLoopTest : public ::testing::Test {
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
};
TEST_F(BindToCurrentLoopTest, RepeatingClosure) {
diff --git a/chromium/media/base/bitstream_buffer.cc b/chromium/media/base/bitstream_buffer.cc
index e434157e7f0..40659bc426f 100644
--- a/chromium/media/base/bitstream_buffer.cc
+++ b/chromium/media/base/bitstream_buffer.cc
@@ -24,6 +24,18 @@ BitstreamBuffer::BitstreamBuffer(
presentation_timestamp_(presentation_timestamp) {}
BitstreamBuffer::BitstreamBuffer(int32_t id,
+ base::UnsafeSharedMemoryRegion region,
+ size_t size,
+ off_t offset,
+ base::TimeDelta presentation_timestamp)
+ : id_(id),
+ region_(base::UnsafeSharedMemoryRegion::TakeHandleForSerialization(
+ std::move(region))),
+ size_(size),
+ offset_(offset),
+ presentation_timestamp_(presentation_timestamp) {}
+
+BitstreamBuffer::BitstreamBuffer(int32_t id,
base::SharedMemoryHandle handle,
bool read_only,
size_t size,
diff --git a/chromium/media/base/bitstream_buffer.h b/chromium/media/base/bitstream_buffer.h
index 343ca0700e2..ca5333a68fe 100644
--- a/chromium/media/base/bitstream_buffer.h
+++ b/chromium/media/base/bitstream_buffer.h
@@ -11,6 +11,7 @@
#include "base/macros.h"
#include "base/memory/platform_shared_memory_region.h"
#include "base/memory/scoped_refptr.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/time/time.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
@@ -42,6 +43,13 @@ class MEDIA_EXPORT BitstreamBuffer {
off_t offset = 0,
base::TimeDelta presentation_timestamp = kNoTimestamp);
+ // As above, creating by unwrapping a base::UnsafeSharedMemoryRegion.
+ BitstreamBuffer(int32_t id,
+ base::UnsafeSharedMemoryRegion region,
+ size_t size,
+ off_t offset = 0,
+ base::TimeDelta presentation_timestamp = kNoTimestamp);
+
// As above, but creates by duplicating a SharedMemoryHandle.
// TODO(https://crbug.com/793446): remove once legacy shared memory has been
// converted.
diff --git a/chromium/media/base/callback_registry_unittest.cc b/chromium/media/base/callback_registry_unittest.cc
index 6c1ff1e7aed..5a0b47427ec 100644
--- a/chromium/media/base/callback_registry_unittest.cc
+++ b/chromium/media/base/callback_registry_unittest.cc
@@ -7,7 +7,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -20,7 +20,7 @@ using ::testing::IsNull;
class CallbackRegistryTest : public testing::Test {
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
};
TEST_F(CallbackRegistryTest, RegisterWithNoParam) {
@@ -32,7 +32,7 @@ TEST_F(CallbackRegistryTest, RegisterWithNoParam) {
EXPECT_CALL(callback, Run());
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, RegisterWithOneParam) {
@@ -44,7 +44,7 @@ TEST_F(CallbackRegistryTest, RegisterWithOneParam) {
EXPECT_CALL(callback, Run(1));
registry.Notify(1);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, RegisterWithTwoParams) {
@@ -56,7 +56,7 @@ TEST_F(CallbackRegistryTest, RegisterWithTwoParams) {
EXPECT_CALL(callback, Run(1, 2));
registry.Notify(1, 2);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, RegisterWithMoveOnlyParam) {
@@ -69,7 +69,7 @@ TEST_F(CallbackRegistryTest, RegisterWithMoveOnlyParam) {
EXPECT_CALL(callback, Run(_));
registry.Notify(std::make_unique<int>(1));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, RegisterWithPointerParam) {
@@ -81,7 +81,7 @@ TEST_F(CallbackRegistryTest, RegisterWithPointerParam) {
EXPECT_CALL(callback, Run(IsNull()));
registry.Notify(nullptr);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, RegisterWithReferenceParam) {
@@ -94,7 +94,7 @@ TEST_F(CallbackRegistryTest, RegisterWithReferenceParam) {
int i = 1;
EXPECT_CALL(callback, Run(i));
registry.Notify(i);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, RegisterAfterNotify) {
@@ -106,7 +106,7 @@ TEST_F(CallbackRegistryTest, RegisterAfterNotify) {
EXPECT_CALL(callback_1, Run());
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
base::MockCallback<base::RepeatingClosure> callback_2;
auto registration_2 = registry.Register(callback_2.Get());
@@ -115,7 +115,7 @@ TEST_F(CallbackRegistryTest, RegisterAfterNotify) {
EXPECT_CALL(callback_1, Run());
EXPECT_CALL(callback_2, Run());
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, EmptyRegistry) {
@@ -136,16 +136,16 @@ TEST_F(CallbackRegistryTest, UnregisterCallback) {
EXPECT_CALL(callback_1, Run());
EXPECT_CALL(callback_2, Run());
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
registration_1.reset();
EXPECT_CALL(callback_2, Run());
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
registration_2.reset();
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(CallbackRegistryTest, RegisterDuringNotification) {
@@ -162,13 +162,13 @@ TEST_F(CallbackRegistryTest, RegisterDuringNotification) {
registration_2 = registry.Register(callback_2.Get());
}));
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_TRUE(registration_2);
EXPECT_CALL(callback_1, Run());
EXPECT_CALL(callback_2, Run());
registry.Notify();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
} // namespace
diff --git a/chromium/media/base/fake_audio_worker_unittest.cc b/chromium/media/base/fake_audio_worker_unittest.cc
index be8a9863ca3..d52b640ee54 100644
--- a/chromium/media/base/fake_audio_worker_unittest.cc
+++ b/chromium/media/base/fake_audio_worker_unittest.cc
@@ -4,14 +4,20 @@
#include "media/base/fake_audio_worker.h"
+#include <limits>
+#include <memory>
+#include <utility>
#include <vector>
#include "base/bind.h"
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
+#include "base/test/test_mock_time_task_runner.h"
+#include "base/time/tick_clock.h"
#include "base/time/time.h"
+#include "base/time/time_override.h"
#include "build/build_config.h"
#include "media/base/audio_parameters.h"
#include "testing/gmock/include/gmock/gmock-matchers.h"
@@ -28,8 +34,7 @@ class FakeAudioWorkerTest : public testing::Test {
public:
FakeAudioWorkerTest()
: params_(AudioParameters::AUDIO_FAKE, CHANNEL_LAYOUT_STEREO, 44100, 128),
- fake_worker_(scoped_task_environment_.GetMainThreadTaskRunner(),
- params_) {
+ fake_worker_(task_environment_.GetMainThreadTaskRunner(), params_) {
time_between_callbacks_ = base::TimeDelta::FromMicroseconds(
params_.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
static_cast<float>(params_.sample_rate()));
@@ -78,12 +83,12 @@ class FakeAudioWorkerTest : public testing::Test {
}
scoped_refptr<base::SingleThreadTaskRunner> TaskRunner() {
- return scoped_task_environment_.GetMainThreadTaskRunner();
+ return task_environment_.GetMainThreadTaskRunner();
}
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_{
- base::test::ScopedTaskEnvironment::TimeSource::MOCK_TIME_AND_NOW};
+ base::test::TaskEnvironment task_environment_{
+ base::test::TaskEnvironment::TimeSource::MOCK_TIME};
AudioParameters params_;
FakeAudioWorker fake_worker_;
base::TimeDelta time_between_callbacks_;
@@ -105,10 +110,10 @@ TEST_F(FakeAudioWorkerTest, FakeBasicCallback) {
// PostTaskAndReply because we want to end_test after run_on_audio_thread is
// finished. This is because RunOnAudioThread may post other tasks which
// should run before we end_test.
- scoped_task_environment_.GetMainThreadTaskRunner()->PostTaskAndReply(
+ task_environment_.GetMainThreadTaskRunner()->PostTaskAndReply(
FROM_HERE, std::move(run_on_audio_thread), std::move(end_test));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_THAT(callbacks_, SizeIs(1));
}
@@ -119,7 +124,7 @@ TEST_F(FakeAudioWorkerTest, TimeBetweenCallbacks) {
FROM_HERE,
base::BindOnce(&FakeAudioWorkerTest::TimeCallbacksOnAudioThread,
base::Unretained(this), kTestCallbacks));
- scoped_task_environment_.FastForwardUntilNoTasksRemain();
+ task_environment_.FastForwardUntilNoTasksRemain();
EXPECT_THAT(callbacks_, SizeIs(Eq(kTestCallbacks)));
@@ -150,13 +155,13 @@ TEST_F(FakeAudioWorkerTest, StartStopClearsCallbacks) {
// Issuing a Stop() / Start() in the middle of the callback period should not
// trigger a callback.
- scoped_task_environment_.FastForwardBy(time_between_callbacks_ / 2);
+ task_environment_.FastForwardBy(time_between_callbacks_ / 2);
EXPECT_THAT(callbacks_, SizeIs(1));
TaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&FakeAudioWorkerTest::StopStartOnAudioThread,
base::Unretained(this)));
- scoped_task_environment_.FastForwardBy(time_between_callbacks_);
+ task_environment_.FastForwardBy(time_between_callbacks_);
// We expect 3 callbacks: First Start(), Second Start(), and one for the
// period. If the first callback was not cancelled, we would get 4 callbacks,
// two on the first period.
@@ -165,7 +170,95 @@ TEST_F(FakeAudioWorkerTest, StartStopClearsCallbacks) {
base::BindOnce(&FakeAudioWorkerTest::EndTest, base::Unretained(this)));
// EndTest() will ensure the proper number of callbacks have occurred.
- scoped_task_environment_.FastForwardUntilNoTasksRemain();
+ task_environment_.FastForwardUntilNoTasksRemain();
EXPECT_THAT(callbacks_, SizeIs(3));
}
+
+class FakeAudioWorkerMockTaskTest : public testing::Test {
+ public:
+ FakeAudioWorkerMockTaskTest()
+ : params_(AudioParameters::AUDIO_FAKE, CHANNEL_LAYOUT_STEREO, 44100, 128),
+ fake_worker_(task_runner_, params_) {
+ DCHECK(!global_clock_);
+ global_clock_ = task_runner_->GetMockTickClock();
+ time_between_callbacks_ = base::TimeDelta::FromMicroseconds(
+ params_.frames_per_buffer() * base::Time::kMicrosecondsPerSecond /
+ static_cast<float>(params_.sample_rate()));
+ clock_overrides_ = std::make_unique<base::subtle::ScopedTimeClockOverrides>(
+ nullptr, TimeTicksOverride, nullptr);
+ }
+
+ ~FakeAudioWorkerMockTaskTest() override { global_clock_ = nullptr; }
+
+ void CalledByFakeWorker(base::TimeTicks ideal_time, base::TimeTicks now) {
+ callbacks_.push_back(base::TimeTicks::Now());
+ }
+
+ void SetUp() override {
+ {
+ base::TestMockTimeTaskRunner::ScopedContext ctx(task_runner_);
+ fake_worker_.Start(
+ base::BindRepeating(&FakeAudioWorkerMockTaskTest::CalledByFakeWorker,
+ base::Unretained(this)));
+ }
+ }
+
+ void TearDown() override {
+ {
+ base::TestMockTimeTaskRunner::ScopedContext ctx(task_runner_);
+ fake_worker_.Stop();
+ }
+ task_runner_->RunUntilIdle();
+ }
+
+ protected:
+ scoped_refptr<base::TestMockTimeTaskRunner> task_runner_ =
+ new base::TestMockTimeTaskRunner();
+ std::unique_ptr<base::subtle::ScopedTimeClockOverrides> clock_overrides_;
+ AudioParameters params_;
+ FakeAudioWorker fake_worker_;
+ base::TimeDelta time_between_callbacks_;
+ std::vector<base::TimeTicks> callbacks_;
+
+ static const base::TickClock* global_clock_;
+ static base::TimeTicks TimeTicksOverride() {
+ DCHECK(global_clock_);
+ return global_clock_->NowTicks();
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(FakeAudioWorkerMockTaskTest);
+};
+
+const base::TickClock* FakeAudioWorkerMockTaskTest::global_clock_ = nullptr;
+
+// This test is disabled because when late we skip reading to maintain
+// compatibility for input and output streams.
+TEST_F(FakeAudioWorkerMockTaskTest, DISABLED_LateCallbackProducesCallback) {
+ task_runner_->RunUntilIdle();
+ EXPECT_THAT(callbacks_, SizeIs(1));
+
+ // Advancing 2 periods will trigger the late logic. It should result in one
+ // callback, And one queued item which will run in 0.5 callback periods.
+ task_runner_->AdvanceMockTickClock(time_between_callbacks_ * 2.5);
+ task_runner_->RunUntilIdle();
+ EXPECT_THAT(callbacks_, SizeIs(2));
+ // Fast-forward to trigger the next time. Note that 0.5 does not work due to
+ // rounding in the next frame logic, since 128 does not divide 44100.
+ task_runner_->FastForwardBy(time_between_callbacks_ * 0.501);
+ EXPECT_THAT(callbacks_, SizeIs(3));
+}
+
+TEST_F(FakeAudioWorkerMockTaskTest, CallbackDelay) {
+ // Initial call only
+ task_runner_->RunUntilIdle();
+
+ // Run the clock forward 1.5 periods and then trigger the callback.
+ // This means we are not behind, but the next callback should occur
+ // in 0.5 periods.
+ task_runner_->AdvanceMockTickClock(time_between_callbacks_ * 1.5);
+ task_runner_->RunUntilIdle();
+ EXPECT_THAT(callbacks_, SizeIs(2));
+ EXPECT_EQ(task_runner_->NextPendingTaskDelay(), time_between_callbacks_ / 2);
+}
} // namespace media
diff --git a/chromium/media/base/fake_demuxer_stream_unittest.cc b/chromium/media/base/fake_demuxer_stream_unittest.cc
index b9a077a6681..0003878df43 100644
--- a/chromium/media/base/fake_demuxer_stream_unittest.cc
+++ b/chromium/media/base/fake_demuxer_stream_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -180,7 +180,7 @@ class FakeDemuxerStreamTest : public testing::Test {
ReadAllBuffers(num_configs, num_buffers_in_one_config);
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<FakeDemuxerStream> stream_;
DemuxerStream::Status status_;
diff --git a/chromium/media/base/fallback_video_decoder_unittest.cc b/chromium/media/base/fallback_video_decoder_unittest.cc
index e168be0271b..d54bd02a6f1 100644
--- a/chromium/media/base/fallback_video_decoder_unittest.cc
+++ b/chromium/media/base/fallback_video_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/fallback_video_decoder.h"
#include "media/base/mock_filters.h"
@@ -72,7 +72,7 @@ class FallbackVideoDecoderUnittest : public ::testing::TestWithParam<bool> {
bool PreferredShouldSucceed() { return GetParam(); }
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<MockVideoDecoder>* backup_decoder_;
StrictMock<MockVideoDecoder>* preferred_decoder_;
diff --git a/chromium/media/base/media_resource.cc b/chromium/media/base/media_resource.cc
index 70142e1b110..e36937aa07a 100644
--- a/chromium/media/base/media_resource.cc
+++ b/chromium/media/base/media_resource.cc
@@ -3,6 +3,8 @@
// found in the LICENSE file.
#include "media/base/media_resource.h"
+#include "base/no_destructor.h"
+#include "url/origin.h"
namespace media {
@@ -10,9 +12,11 @@ MediaResource::MediaResource() = default;
MediaResource::~MediaResource() = default;
-MediaUrlParams MediaResource::GetMediaUrlParams() const {
+const MediaUrlParams& MediaResource::GetMediaUrlParams() const {
NOTREACHED();
- return MediaUrlParams{GURL(), GURL(), false, false};
+ static base::NoDestructor<MediaUrlParams> instance{
+ GURL(), GURL(), url::Origin(), false, false};
+ return *instance;
}
MediaResource::Type MediaResource::GetType() const {
diff --git a/chromium/media/base/media_resource.h b/chromium/media/base/media_resource.h
index 92c62281c13..0678a365f0a 100644
--- a/chromium/media/base/media_resource.h
+++ b/chromium/media/base/media_resource.h
@@ -55,7 +55,7 @@ class MEDIA_EXPORT MediaResource {
// and should be handled appropriately by the caller.
// Other types:
// Should not be called.
- virtual MediaUrlParams GetMediaUrlParams() const;
+ virtual const MediaUrlParams& GetMediaUrlParams() const;
// This method is only used with the MediaUrlDemuxer, to propagate duration
// changes coming from the MediaPlayerRendereClient.
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 9f184eb0290..19c66820469 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -260,6 +260,10 @@ const base::Feature kD3D11VideoDecoder{"D3D11VideoDecoder",
const base::Feature kD3D11VideoDecoderIgnoreWorkarounds{
"D3D11VideoDecoderIgnoreWorkarounds", base::FEATURE_DISABLED_BY_DEFAULT};
+// Don't allow use of 11.1 devices, even if supported. They might be more crashy
+const base::Feature kD3D11LimitTo11_0{"D3D11VideoDecoderLimitTo11_0",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// Falls back to other decoders after audio/video decode error happens. The
// implementation may choose different strategies on when to fallback. See
// DecoderStream for details. When disabled, playback will fail immediately
@@ -317,9 +321,11 @@ const base::Feature kVaapiVP8Encoder{"VaapiVP8Encoder",
const base::Feature kVaapiVP9Encoder{"VaapiVP9Encoder",
base::FEATURE_DISABLED_BY_DEFAULT};
+#if defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
// Enable VP9 k-SVC decoding with HW decoder for webrtc use case on ChromeOS.
const base::Feature kVp9kSVCHWDecoding{"Vp9kSVCHWDecoding",
base::FEATURE_DISABLED_BY_DEFAULT};
+#endif // defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
// Inform video blitter of video color space.
const base::Feature kVideoBlitColorAccuracy{"video-blit-color-accuracy",
@@ -352,6 +358,12 @@ const base::Feature kHardwareSecureDecryption{
const base::Feature kWidevineAv1{"WidevineAv1",
base::FEATURE_ENABLED_BY_DEFAULT};
+// Forces to support encrypted AV1 in EME requestMediaKeySystemAccess() query by
+// Widevine key system even if the underlying Widevine CDM doesn't support it.
+// No effect if "WidevineAv1" feature is disabled.
+const base::Feature kWidevineAv1ForceSupportForTesting{
+ "WidevineAv1ForceSupportForTesting", base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enables handling of hardware media keys for controlling media.
const base::Feature kHardwareMediaKeyHandling{
"HardwareMediaKeyHandling",
@@ -409,7 +421,7 @@ const base::Feature kMediaDrmPreprovisioningAtStartup{
// Enables the Android Image Reader path for Video decoding(for AVDA and MCVD)
const base::Feature kAImageReaderVideoOutput{"AImageReaderVideoOutput",
- base::FEATURE_DISABLED_BY_DEFAULT};
+ base::FEATURE_ENABLED_BY_DEFAULT};
// Prevents using SurfaceLayer for videos. This is meant to be used by embedders
// that cannot support SurfaceLayer at the moment.
@@ -418,7 +430,7 @@ const base::Feature kDisableSurfaceLayerForVideo{
// Enable picture in picture web api for android.
const base::Feature kPictureInPictureAPI{"PictureInPictureAPI",
- base::FEATURE_DISABLED_BY_DEFAULT};
+ base::FEATURE_ENABLED_BY_DEFAULT};
// Enables CanPlayType() (and other queries) for HLS MIME types. Note that
// disabling this also causes navigation to .m3u8 files to trigger downloading
@@ -429,6 +441,12 @@ const base::Feature kCanPlayHls{"CanPlayHls", base::FEATURE_ENABLED_BY_DEFAULT};
// HLS manifests will fail to load (triggering source fallback or load error).
const base::Feature kHlsPlayer{"HlsPlayer", base::FEATURE_ENABLED_BY_DEFAULT};
+// Use the (hacky) AudioManager.getOutputLatency() call to get the estimated
+// hardware latency for a stream for OpenSLES playback. This is normally not
+// needed, except for some Android TV devices.
+const base::Feature kUseAudioLatencyFromHAL{"UseAudioLatencyFromHAL",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
#endif // defined(OS_ANDROID)
#if defined(OS_WIN)
@@ -505,11 +523,20 @@ const base::Feature kPreloadMediaEngagementData{
const base::Feature kMediaEngagementHTTPSOnly{
"MediaEngagementHTTPSOnly", base::FEATURE_DISABLED_BY_DEFAULT};
-// Enables experimental local learning for media. Adds reporting only; does not
-// change media behavior.
+// Send events to devtools rather than to chrome://media-internals
+const base::Feature kMediaInspectorLogging{"MediaInspectorLogging",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
+// Enables experimental local learning for media. Used in the context of media
+// capabilities only. Adds reporting only; does not change media behavior.
const base::Feature kMediaLearningExperiment{"MediaLearningExperiment",
base::FEATURE_DISABLED_BY_DEFAULT};
+// Enables the general purpose media machine learning framework. Adds reporting
+// only; does not change media behavior.
+const base::Feature kMediaLearningFramework{"MediaLearningFramework",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enables flash to be ducked by audio focus. This is enabled on Chrome OS which
// has audio focus enabled.
const base::Feature kAudioFocusDuckFlash {
@@ -539,6 +566,9 @@ const base::Feature kInternalMediaSession {
#endif
};
+const base::Feature kUseFakeDeviceForMediaStream{
+ "use-fake-device-for-media-stream", base::FEATURE_DISABLED_BY_DEFAULT};
+
bool IsVideoCaptureAcceleratedJpegDecodingEnabled() {
if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kDisableAcceleratedMjpegDecode)) {
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index a36309db5e1..5c18a90ee5f 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -101,6 +101,7 @@ MEDIA_EXPORT extern const base::Feature kAutoplayWhitelistSettings;
MEDIA_EXPORT extern const base::Feature kBackgroundVideoPauseOptimization;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderIgnoreWorkarounds;
+MEDIA_EXPORT extern const base::Feature kD3D11LimitTo11_0;
MEDIA_EXPORT extern const base::Feature kExternalClearKeyForTesting;
MEDIA_EXPORT extern const base::Feature kFFmpegDecodeOpaqueVP8;
MEDIA_EXPORT extern const base::Feature kFailUrlProvisionFetcherForTesting;
@@ -114,7 +115,9 @@ MEDIA_EXPORT extern const base::Feature kMediaCapabilitiesWithParameters;
MEDIA_EXPORT extern const base::Feature kMediaCastOverlayButton;
MEDIA_EXPORT extern const base::Feature kMediaEngagementBypassAutoplayPolicies;
MEDIA_EXPORT extern const base::Feature kMediaEngagementHTTPSOnly;
+MEDIA_EXPORT extern const base::Feature kMediaInspectorLogging;
MEDIA_EXPORT extern const base::Feature kMediaLearningExperiment;
+MEDIA_EXPORT extern const base::Feature kMediaLearningFramework;
MEDIA_EXPORT extern const base::Feature kMemoryPressureBasedSourceBufferGC;
MEDIA_EXPORT extern const base::Feature kChromeosVideoDecoder;
MEDIA_EXPORT extern const base::Feature kNewEncodeCpuLoadEstimator;
@@ -131,6 +134,7 @@ MEDIA_EXPORT extern const base::Feature kSpecCompliantCanPlayThrough;
MEDIA_EXPORT extern const base::Feature kUnifiedAutoplay;
MEDIA_EXPORT extern const base::Feature kUseAndroidOverlay;
MEDIA_EXPORT extern const base::Feature kUseAndroidOverlayAggressively;
+MEDIA_EXPORT extern const base::Feature kUseFakeDeviceForMediaStream;
MEDIA_EXPORT extern const base::Feature kUseNewMediaCache;
MEDIA_EXPORT extern const base::Feature kUseR16Texture;
MEDIA_EXPORT extern const base::Feature kUseSurfaceLayerForVideo;
@@ -138,9 +142,13 @@ MEDIA_EXPORT extern const base::Feature kVaapiH264AMDEncoder;
MEDIA_EXPORT extern const base::Feature kVaapiLowPowerEncoder;
MEDIA_EXPORT extern const base::Feature kVaapiVP8Encoder;
MEDIA_EXPORT extern const base::Feature kVaapiVP9Encoder;
-MEDIA_EXPORT extern const base::Feature kVp9kSVCHWDecoding;
MEDIA_EXPORT extern const base::Feature kVideoBlitColorAccuracy;
MEDIA_EXPORT extern const base::Feature kWidevineAv1;
+MEDIA_EXPORT extern const base::Feature kWidevineAv1ForceSupportForTesting;
+
+#if defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+MEDIA_EXPORT extern const base::Feature kVp9kSVCHWDecoding;
+#endif // defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
#if defined(OS_ANDROID)
MEDIA_EXPORT extern const base::Feature kMediaControlsExpandGesture;
@@ -152,6 +160,7 @@ MEDIA_EXPORT extern const base::Feature kDisableSurfaceLayerForVideo;
MEDIA_EXPORT extern const base::Feature kCanPlayHls;
MEDIA_EXPORT extern const base::Feature kPictureInPictureAPI;
MEDIA_EXPORT extern const base::Feature kHlsPlayer;
+MEDIA_EXPORT extern const base::Feature kUseAudioLatencyFromHAL;
#endif // defined(OS_ANDROID)
#if defined(OS_WIN)
diff --git a/chromium/media/base/media_url_demuxer.cc b/chromium/media/base/media_url_demuxer.cc
index c2f645b56ab..2ee66cffb51 100644
--- a/chromium/media/base/media_url_demuxer.cc
+++ b/chromium/media/base/media_url_demuxer.cc
@@ -13,9 +13,11 @@ MediaUrlDemuxer::MediaUrlDemuxer(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
const GURL& media_url,
const GURL& site_for_cookies,
+ const url::Origin& top_frame_origin,
bool allow_credentials,
bool is_hls)
- : params_{media_url, site_for_cookies, allow_credentials, is_hls},
+ : params_{media_url, site_for_cookies, top_frame_origin, allow_credentials,
+ is_hls},
task_runner_(task_runner) {}
MediaUrlDemuxer::~MediaUrlDemuxer() = default;
@@ -26,7 +28,7 @@ std::vector<DemuxerStream*> MediaUrlDemuxer::GetAllStreams() {
return std::vector<DemuxerStream*>();
}
-MediaUrlParams MediaUrlDemuxer::GetMediaUrlParams() const {
+const MediaUrlParams& MediaUrlDemuxer::GetMediaUrlParams() const {
return params_;
}
diff --git a/chromium/media/base/media_url_demuxer.h b/chromium/media/base/media_url_demuxer.h
index 9e8a044b643..408c5b0e837 100644
--- a/chromium/media/base/media_url_demuxer.h
+++ b/chromium/media/base/media_url_demuxer.h
@@ -36,13 +36,14 @@ class MEDIA_EXPORT MediaUrlDemuxer : public Demuxer {
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
const GURL& media_url,
const GURL& site_for_cookies,
+ const url::Origin& top_frame_origin,
bool allow_credentials,
bool is_hls);
~MediaUrlDemuxer() override;
// MediaResource interface.
std::vector<DemuxerStream*> GetAllStreams() override;
- MediaUrlParams GetMediaUrlParams() const override;
+ const MediaUrlParams& GetMediaUrlParams() const override;
MediaResource::Type GetType() const override;
void ForwardDurationChangeToDemuxerHost(base::TimeDelta duration) override;
diff --git a/chromium/media/base/media_url_demuxer_unittest.cc b/chromium/media/base/media_url_demuxer_unittest.cc
index 56a8bb04a7f..3329df49a50 100644
--- a/chromium/media/base/media_url_demuxer_unittest.cc
+++ b/chromium/media/base/media_url_demuxer_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -24,9 +24,9 @@ class MediaUrlDemuxerTest : public testing::Test {
void InitializeTest(const GURL& media_url,
const GURL& first_party,
bool allow_credentials) {
- demuxer_.reset(new MediaUrlDemuxer(base::ThreadTaskRunnerHandle::Get(),
- media_url, first_party,
- allow_credentials, false));
+ demuxer_.reset(new MediaUrlDemuxer(
+ base::ThreadTaskRunnerHandle::Get(), media_url, first_party,
+ url::Origin::Create(first_party), allow_credentials, false));
}
void InitializeTest() {
@@ -42,7 +42,7 @@ class MediaUrlDemuxerTest : public testing::Test {
std::unique_ptr<Demuxer> demuxer_;
// Necessary, or else base::ThreadTaskRunnerHandle::Get() fails.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
private:
DISALLOW_COPY_AND_ASSIGN(MediaUrlDemuxerTest);
@@ -53,7 +53,7 @@ TEST_F(MediaUrlDemuxerTest, BaseCase) {
EXPECT_EQ(MediaResource::Type::URL, demuxer_->GetType());
- MediaUrlParams params = demuxer_->GetMediaUrlParams();
+ const MediaUrlParams& params = demuxer_->GetMediaUrlParams();
EXPECT_EQ(default_media_url_, params.media_url);
EXPECT_EQ(default_first_party_url_, params.site_for_cookies);
EXPECT_EQ(true, params.allow_credentials);
@@ -62,7 +62,7 @@ TEST_F(MediaUrlDemuxerTest, BaseCase) {
TEST_F(MediaUrlDemuxerTest, AcceptsEmptyStrings) {
InitializeTest(GURL(), GURL(), false);
- MediaUrlParams params = demuxer_->GetMediaUrlParams();
+ const MediaUrlParams& params = demuxer_->GetMediaUrlParams();
EXPECT_EQ(GURL::EmptyGURL(), params.media_url);
EXPECT_EQ(GURL::EmptyGURL(), params.site_for_cookies);
EXPECT_EQ(false, params.allow_credentials);
diff --git a/chromium/media/base/media_url_params.cc b/chromium/media/base/media_url_params.cc
new file mode 100644
index 00000000000..ed47b3941bb
--- /dev/null
+++ b/chromium/media/base/media_url_params.cc
@@ -0,0 +1,23 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/media_url_params.h"
+
+namespace media {
+
+MediaUrlParams::MediaUrlParams(GURL media_url,
+ GURL site_for_cookies,
+ url::Origin top_frame_origin,
+ bool allow_credentials,
+ bool is_hls)
+ : media_url(std::move(media_url)),
+ site_for_cookies(std::move(site_for_cookies)),
+ top_frame_origin(std::move(top_frame_origin)),
+ allow_credentials(allow_credentials),
+ is_hls(is_hls) {}
+
+MediaUrlParams::MediaUrlParams(const MediaUrlParams& other) = default;
+
+MediaUrlParams::~MediaUrlParams() = default;
+} // namespace media
diff --git a/chromium/media/base/media_url_params.h b/chromium/media/base/media_url_params.h
index c59e1ef67e5..ffd0af7c382 100644
--- a/chromium/media/base/media_url_params.h
+++ b/chromium/media/base/media_url_params.h
@@ -7,6 +7,7 @@
#include "media/base/media_export.h"
#include "url/gurl.h"
+#include "url/origin.h"
namespace media {
@@ -14,17 +15,26 @@ namespace media {
// playback (as opposed to stream based).
// See MediaUrlDemuxer and MediaPlayerRenderer.
struct MEDIA_EXPORT MediaUrlParams {
+ MediaUrlParams(GURL media_url,
+ GURL site_for_cookies,
+ url::Origin top_frame_origin,
+ bool allow_credentials,
+ bool is_hls);
+ MediaUrlParams(const MediaUrlParams& other);
+ ~MediaUrlParams();
+
// URL of the media to be played.
GURL media_url;
// Used to play media in authenticated scenarios.
- // NOTE: This URL is not the first party cookies, but the first party URL
- // returned by blink::WebDocument::firstPartyForCookies().
// In the MediaPlayerRenderer case, it will ultimately be used in
// MediaResourceGetterTask::CheckPolicyForCookies, to limit the scope of the
// cookies that the MediaPlayerRenderer has access to.
GURL site_for_cookies;
+ // Used to check for cookie content settings.
+ url::Origin top_frame_origin;
+
// True when the crossorigin mode is unspecified or set to "use-credentials",
// false when it's "anonymous".
//
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index cf2c9b35ceb..24286cc4e89 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -141,7 +141,7 @@ class MockMediaResource : public MediaResource {
MOCK_CONST_METHOD0(GetType, MediaResource::Type());
MOCK_METHOD0(GetAllStreams, std::vector<DemuxerStream*>());
MOCK_METHOD1(GetFirstStream, DemuxerStream*(DemuxerStream::Type type));
- MOCK_CONST_METHOD0(GetMediaUrlParams, MediaUrlParams());
+ MOCK_CONST_METHOD0(GetMediaUrlParams, const MediaUrlParams&());
};
class MockDemuxer : public Demuxer {
@@ -290,6 +290,7 @@ class MockRendererClient : public RendererClient {
MOCK_METHOD1(OnVideoOpacityChange, void(bool));
MOCK_METHOD1(OnDurationChange, void(base::TimeDelta));
MOCK_METHOD1(OnRemotePlayStateChange, void(MediaStatus::State state));
+ MOCK_METHOD0(IsVideoStreamAvailable, bool());
};
class MockVideoRenderer : public VideoRenderer {
diff --git a/chromium/media/base/null_video_sink_unittest.cc b/chromium/media/base/null_video_sink_unittest.cc
index ce91e58a904..98d9b821fcf 100644
--- a/chromium/media/base/null_video_sink_unittest.cc
+++ b/chromium/media/base/null_video_sink_unittest.cc
@@ -8,8 +8,8 @@
#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "components/viz/common/frame_sinks/begin_frame_args.h"
#include "media/base/null_video_sink.h"
#include "media/base/test_helpers.h"
@@ -37,7 +37,7 @@ class NullVideoSinkTest : public testing::Test,
std::unique_ptr<NullVideoSink> new_sink(new NullVideoSink(
clockless, interval,
base::Bind(&NullVideoSinkTest::FrameReceived, base::Unretained(this)),
- scoped_task_environment_.GetMainThreadTaskRunner()));
+ task_environment_.GetMainThreadTaskRunner()));
new_sink->set_tick_clock_for_testing(&tick_clock_);
return new_sink;
}
@@ -62,7 +62,7 @@ class NullVideoSinkTest : public testing::Test,
MOCK_METHOD1(FrameReceived, void(scoped_refptr<VideoFrame>));
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
base::SimpleTestTickClock tick_clock_;
DISALLOW_COPY_AND_ASSIGN(NullVideoSinkTest);
diff --git a/chromium/media/base/pipeline_impl_unittest.cc b/chromium/media/base/pipeline_impl_unittest.cc
index bb015f54e53..8b85276aca9 100644
--- a/chromium/media/base/pipeline_impl_unittest.cc
+++ b/chromium/media/base/pipeline_impl_unittest.cc
@@ -16,8 +16,8 @@
#include "base/single_thread_task_runner.h"
#include "base/stl_util.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/threading/simple_thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/clock.h"
@@ -103,10 +103,9 @@ class PipelineImplTest : public ::testing::Test {
};
PipelineImplTest()
- : pipeline_(
- new PipelineImpl(scoped_task_environment_.GetMainThreadTaskRunner(),
- scoped_task_environment_.GetMainThreadTaskRunner(),
- &media_log_)),
+ : pipeline_(new PipelineImpl(task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
+ &media_log_)),
demuxer_(new StrictMock<MockDemuxer>()),
demuxer_host_(nullptr),
scoped_renderer_(new StrictMock<MockRenderer>()),
@@ -329,7 +328,7 @@ class PipelineImplTest : public ::testing::Test {
// Fixture members.
StrictMock<CallbackHelper> callbacks_;
base::SimpleTestTickClock test_tick_clock_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NullMediaLog media_log_;
std::unique_ptr<PipelineImpl> pipeline_;
@@ -718,10 +717,9 @@ TEST_F(PipelineImplTest, ErrorDuringSeek) {
// Invoked function OnError. This asserts that the pipeline does not enqueue
// non-teardown related tasks while tearing down.
-static void TestNoCallsAfterError(
- PipelineImpl* pipeline,
- base::test::ScopedTaskEnvironment* task_environment,
- PipelineStatus /* status */) {
+static void TestNoCallsAfterError(PipelineImpl* pipeline,
+ base::test::TaskEnvironment* task_environment,
+ PipelineStatus /* status */) {
CHECK(pipeline);
CHECK(task_environment);
@@ -746,8 +744,8 @@ TEST_F(PipelineImplTest, NoMessageDuringTearDownFromError) {
StartPipelineAndExpect(PIPELINE_OK);
// Trigger additional requests on the pipeline during tear down from error.
- base::Callback<void(PipelineStatus)> cb = base::Bind(
- &TestNoCallsAfterError, pipeline_.get(), &scoped_task_environment_);
+ base::Callback<void(PipelineStatus)> cb =
+ base::Bind(&TestNoCallsAfterError, pipeline_.get(), &task_environment_);
ON_CALL(callbacks_, OnError(_)).WillByDefault(Invoke(CreateFunctor(cb)));
base::TimeDelta seek_time = base::TimeDelta::FromSeconds(5);
diff --git a/chromium/media/base/renderer_client.cc b/chromium/media/base/renderer_client.cc
new file mode 100644
index 00000000000..ff7c1d57aef
--- /dev/null
+++ b/chromium/media/base/renderer_client.cc
@@ -0,0 +1,13 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/renderer_client.h"
+
+namespace media {
+
+bool RendererClient::IsVideoStreamAvailable() {
+ return true;
+}
+
+} // namespace media
diff --git a/chromium/media/base/renderer_client.h b/chromium/media/base/renderer_client.h
index a86c934c878..249c5ee76da 100644
--- a/chromium/media/base/renderer_client.h
+++ b/chromium/media/base/renderer_client.h
@@ -7,6 +7,7 @@
#include "base/time/time.h"
#include "media/base/audio_decoder_config.h"
+#include "media/base/buffering_state.h"
#include "media/base/media_status.h"
#include "media/base/pipeline_status.h"
#include "media/base/video_decoder_config.h"
@@ -17,7 +18,7 @@ namespace media {
// Interface used by Renderer, AudioRenderer, VideoRenderer and
// MediaPlayerRenderer implementations to notify their clients.
-class RendererClient {
+class MEDIA_EXPORT RendererClient {
public:
// Executed if any error was encountered after Renderer initialization.
virtual void OnError(PipelineStatus status) = 0;
@@ -48,6 +49,11 @@ class RendererClient {
// Executed for the first video frame and whenever opacity changes.
// Only used if media stream contains a video track.
virtual void OnVideoOpacityChange(bool opaque) = 0;
+
+ // Returns true if video stream is available in the media resource.
+ // TODO(crbug.com/988535): Used by AudioRendererImpl. This can be removed
+ // when the bug is resolved.
+ virtual bool IsVideoStreamAvailable();
};
} // namespace media
diff --git a/chromium/media/base/renderer_factory_selector.cc b/chromium/media/base/renderer_factory_selector.cc
index ec9681935cf..521feec734a 100644
--- a/chromium/media/base/renderer_factory_selector.cc
+++ b/chromium/media/base/renderer_factory_selector.cc
@@ -25,7 +25,8 @@ void RendererFactorySelector::SetBaseFactoryType(FactoryType type) {
base_factory_type_ = type;
}
-RendererFactory* RendererFactorySelector::GetCurrentFactory() {
+RendererFactorySelector::FactoryType
+RendererFactorySelector::GetCurrentFactoryType() {
DCHECK(base_factory_type_);
FactoryType next_factory_type = base_factory_type_.value();
@@ -38,10 +39,14 @@ RendererFactory* RendererFactorySelector::GetCurrentFactory() {
if (query_is_flinging_active_cb_ && query_is_flinging_active_cb_.Run())
next_factory_type = FactoryType::FLINGING;
- DVLOG(1) << __func__ << " Selecting factory type: " << next_factory_type;
+ return next_factory_type;
+}
- RendererFactory* current_factory = factories_[next_factory_type].get();
+RendererFactory* RendererFactorySelector::GetCurrentFactory() {
+ FactoryType next_factory_type = GetCurrentFactoryType();
+ DVLOG(1) << __func__ << " Selecting factory type: " << next_factory_type;
+ RendererFactory* current_factory = factories_[next_factory_type].get();
DCHECK(current_factory);
return current_factory;
diff --git a/chromium/media/base/renderer_factory_selector.h b/chromium/media/base/renderer_factory_selector.h
index d6f055e5500..2c68f1eefa0 100644
--- a/chromium/media/base/renderer_factory_selector.h
+++ b/chromium/media/base/renderer_factory_selector.h
@@ -21,13 +21,16 @@ class MEDIA_EXPORT RendererFactorySelector {
using QueryIsRemotingActiveCB = base::Callback<bool()>;
using QueryIsFlingingActiveCB = base::Callback<bool()>;
+ // These values are persisted to logs. Entries should not be renumbered and
+ // numeric values should never be reused.
enum FactoryType {
- DEFAULT, // DefaultRendererFactory.
- MOJO, // MojoRendererFactory.
- MEDIA_PLAYER, // MediaPlayerRendererClientFactory.
- COURIER, // CourierRendererFactory.
- FLINGING, // FlingingRendererClientFactory
- FACTORY_TYPE_MAX = FLINGING,
+ DEFAULT = 0, // DefaultRendererFactory.
+ MOJO = 1, // MojoRendererFactory.
+ MEDIA_PLAYER = 2, // MediaPlayerRendererClientFactory.
+ COURIER = 3, // CourierRendererFactory.
+ FLINGING = 4, // FlingingRendererClientFactory.
+ CAST = 5, // CastRendererClientFactory.
+ FACTORY_TYPE_MAX = CAST,
};
RendererFactorySelector();
@@ -43,6 +46,10 @@ class MEDIA_EXPORT RendererFactorySelector {
// be used by default.
void SetBaseFactoryType(FactoryType type);
+ // Returns the type of the factory that GetCurrentFactory() would return.
+ // NOTE: SetBaseFactoryType() must be called before calling this method.
+ FactoryType GetCurrentFactoryType();
+
// Updates |current_factory_| if necessary, and returns its value.
// NOTE: SetBaseFactoryType() must be called before calling this method.
RendererFactory* GetCurrentFactory();
diff --git a/chromium/media/base/serial_runner_unittest.cc b/chromium/media/base/serial_runner_unittest.cc
index 354f8f8f06d..ff1ed2b2684 100644
--- a/chromium/media/base/serial_runner_unittest.cc
+++ b/chromium/media/base/serial_runner_unittest.cc
@@ -10,7 +10,7 @@
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/pipeline_status.h"
#include "media/base/serial_runner.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -24,7 +24,7 @@ class SerialRunnerTest : public ::testing::Test {
~SerialRunnerTest() override = default;
void RunSerialRunner() {
- scoped_task_environment_.GetMainThreadTaskRunner()->PostTask(
+ task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&SerialRunnerTest::StartRunnerInternal,
base::Unretained(this), bound_fns_));
base::RunLoop().RunUntilIdle();
@@ -119,7 +119,7 @@ class SerialRunnerTest : public ::testing::Test {
void CancelSerialRunner(const PipelineStatusCB& status_cb) {
// Tasks run by |runner_| shouldn't reset it, hence we post a task to do so.
- scoped_task_environment_.GetMainThreadTaskRunner()->PostTask(
+ task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&SerialRunnerTest::ResetSerialRunner,
base::Unretained(this)));
status_cb.Run(PIPELINE_OK);
@@ -129,7 +129,7 @@ class SerialRunnerTest : public ::testing::Test {
runner_.reset();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
SerialRunner::Queue bound_fns_;
std::unique_ptr<SerialRunner> runner_;
diff --git a/chromium/media/base/simple_watch_timer.cc b/chromium/media/base/simple_watch_timer.cc
new file mode 100644
index 00000000000..83668dfd7a2
--- /dev/null
+++ b/chromium/media/base/simple_watch_timer.cc
@@ -0,0 +1,66 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/simple_watch_timer.h"
+
+#include "base/location.h"
+#include "media/base/timestamp_constants.h"
+
+namespace media {
+
+namespace {
+
+constexpr base::TimeDelta kQueryInterval =
+ base::TimeDelta::FromMilliseconds(750);
+
+} // namespace
+
+SimpleWatchTimer::SimpleWatchTimer(TickCB tick_cb,
+ GetCurrentTimeCB get_current_time_cb)
+ : tick_cb_(std::move(tick_cb)),
+ get_current_time_cb_(std::move(get_current_time_cb)) {
+ DCHECK(!tick_cb_.is_null());
+ DCHECK(!get_current_time_cb_.is_null());
+}
+
+SimpleWatchTimer::~SimpleWatchTimer() {}
+
+void SimpleWatchTimer::Start() {
+ if (timer_.IsRunning())
+ return;
+
+ last_current_time_ = get_current_time_cb_.Run();
+ timer_.Start(FROM_HERE, kQueryInterval, this, &SimpleWatchTimer::Tick);
+}
+
+void SimpleWatchTimer::Stop() {
+ if (!timer_.IsRunning())
+ return;
+
+ timer_.Stop();
+ Tick();
+}
+
+void SimpleWatchTimer::Tick() {
+ base::TimeDelta current_time = get_current_time_cb_.Run();
+ base::TimeDelta duration;
+ if (last_current_time_ != kNoTimestamp &&
+ last_current_time_ != kInfiniteDuration) {
+ duration = current_time - last_current_time_;
+ }
+ last_current_time_ = current_time;
+
+ // Accumulate watch time if the duration is reasonable.
+ if (duration > base::TimeDelta() && duration < kQueryInterval * 2) {
+ unreported_ms_ += duration.InMilliseconds();
+ }
+
+ // Tick if the accumulated time is about a second.
+ if (unreported_ms_ >= 500) {
+ unreported_ms_ -= 1000;
+ tick_cb_.Run();
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/base/simple_watch_timer.h b/chromium/media/base/simple_watch_timer.h
new file mode 100644
index 00000000000..777fc9b5ed2
--- /dev/null
+++ b/chromium/media/base/simple_watch_timer.h
@@ -0,0 +1,56 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_SIMPLE_WATCH_TIMER_H_
+#define MEDIA_BASE_SIMPLE_WATCH_TIMER_H_
+
+#include "base/callback_forward.h"
+#include "base/macros.h"
+#include "base/time/time.h"
+#include "base/timer/timer.h"
+#include "media/base/media_export.h"
+
+namespace media {
+
+// SimpleWatchTimer aids in recording UMA counts that accumulate media watch
+// time in seconds. It will fire its callback about once per second during
+// active playback.
+//
+// Active playback is a duration after Start() and before Stop() in
+// which current time progresses. Large jumps in current time are not considered
+// to be progress; they are assumed to be seeks or media errors.
+//
+// Start() and Stop() may be called repeatedly. It is recommended to call Stop()
+// before destructing a SimpleWatchTimer so that |tick_cb| can be fired at an
+// opportune time.
+//
+// Note: SimpleWatchTimer does not understand playbackRate and will discard
+// durations with high rates.
+class MEDIA_EXPORT SimpleWatchTimer {
+ public:
+ using TickCB = base::RepeatingClosure;
+ using GetCurrentTimeCB = base::RepeatingCallback<base::TimeDelta()>;
+
+ SimpleWatchTimer(TickCB tick_cb, GetCurrentTimeCB get_current_time_cb);
+ ~SimpleWatchTimer();
+
+ void Start();
+ void Stop();
+
+ private:
+ void Tick();
+
+ TickCB tick_cb_;
+ GetCurrentTimeCB get_current_time_cb_;
+
+ int unreported_ms_ = 0;
+ base::TimeDelta last_current_time_;
+ base::RepeatingTimer timer_;
+
+ DISALLOW_COPY_AND_ASSIGN(SimpleWatchTimer);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_SIMPLE_WATCH_TIMER_H_
diff --git a/chromium/media/base/text_renderer_unittest.cc b/chromium/media/base/text_renderer_unittest.cc
index dd0df329579..f5c6b09bf4b 100644
--- a/chromium/media/base/text_renderer_unittest.cc
+++ b/chromium/media/base/text_renderer_unittest.cc
@@ -13,7 +13,7 @@
#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/demuxer_stream.h"
@@ -55,7 +55,7 @@ class TextRendererTest : public testing::Test {
DCHECK(!text_renderer_);
text_renderer_.reset(new TextRenderer(
- scoped_task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
base::Bind(&TextRendererTest::OnAddTextTrack, base::Unretained(this))));
text_renderer_->Initialize(
base::Bind(&TextRendererTest::OnEnd, base::Unretained(this)));
@@ -190,7 +190,7 @@ class TextRendererTest : public testing::Test {
MOCK_METHOD0(OnPause, void());
MOCK_METHOD0(OnFlush, void());
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
typedef std::vector<std::unique_ptr<FakeTextTrackStream>> TextTrackStreams;
TextTrackStreams text_track_streams_;
@@ -206,7 +206,7 @@ class TextRendererTest : public testing::Test {
TEST_F(TextRendererTest, CreateTextRendererNoInit) {
text_renderer_.reset(new TextRenderer(
- scoped_task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
base::Bind(&TextRendererTest::OnAddTextTrack, base::Unretained(this))));
text_renderer_.reset();
}
diff --git a/chromium/media/base/user_input_monitor_unittest.cc b/chromium/media/base/user_input_monitor_unittest.cc
index f174daffefc..1f6bcbef2e5 100644
--- a/chromium/media/base/user_input_monitor_unittest.cc
+++ b/chromium/media/base/user_input_monitor_unittest.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -21,11 +21,11 @@ namespace media {
TEST(UserInputMonitorTest, CreatePlatformSpecific) {
#if defined(OS_LINUX)
- base::test::ScopedTaskEnvironment task_environment(
- base::test::ScopedTaskEnvironment::MainThreadType::IO);
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::IO);
#else
- base::test::ScopedTaskEnvironment task_environment(
- base::test::ScopedTaskEnvironment::MainThreadType::UI);
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::UI);
#endif // defined(OS_LINUX)
std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
@@ -43,11 +43,11 @@ TEST(UserInputMonitorTest, CreatePlatformSpecific) {
TEST(UserInputMonitorTest, CreatePlatformSpecificWithMapping) {
#if defined(OS_LINUX)
- base::test::ScopedTaskEnvironment task_environment(
- base::test::ScopedTaskEnvironment::MainThreadType::IO);
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::IO);
#else
- base::test::ScopedTaskEnvironment task_environment(
- base::test::ScopedTaskEnvironment::MainThreadType::UI);
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::UI);
#endif // defined(OS_LINUX)
std::unique_ptr<UserInputMonitor> monitor = UserInputMonitor::Create(
diff --git a/chromium/media/base/video_codecs.h b/chromium/media/base/video_codecs.h
index 79c5daf1125..3814ad69099 100644
--- a/chromium/media/base/video_codecs.h
+++ b/chromium/media/base/video_codecs.h
@@ -40,7 +40,7 @@ enum VideoCodec {
};
// Video codec profiles. Keep in sync with mojo::VideoCodecProfile (see
-// media/mojo/interfaces/media_types.mojom), gpu::VideoCodecProfile (see
+// media/mojo/mojom/media_types.mojom), gpu::VideoCodecProfile (see
// gpu/config/gpu_info.h), and PP_VideoDecoder_Profile (translation is performed
// in content/renderer/pepper/ppb_video_decoder_impl.cc).
// NOTE: These values are histogrammed over time in UMA so the values must never
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index 17329cd5430..d9bb30a5288 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -103,7 +103,6 @@ static bool RequiresEvenSizeAllocation(VideoPixelFormat format) {
return false;
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_I420:
case PIXEL_FORMAT_MJPEG:
case PIXEL_FORMAT_YUY2:
@@ -347,66 +346,25 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalDataWithLayout(
uint8_t* data,
size_t data_size,
base::TimeDelta timestamp) {
- return WrapExternalStorage(STORAGE_UNOWNED_MEMORY, layout, visible_rect,
- natural_size, data, data_size, timestamp, nullptr,
- nullptr, base::SharedMemoryHandle(), 0);
-}
+ StorageType storage_type = STORAGE_UNOWNED_MEMORY;
-// static
-scoped_refptr<VideoFrame> VideoFrame::WrapExternalReadOnlySharedMemory(
- VideoPixelFormat format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::ReadOnlySharedMemoryRegion* region,
- size_t data_offset,
- base::TimeDelta timestamp) {
- auto layout = GetDefaultLayout(format, coded_size);
- if (!layout)
+ if (!IsValidConfig(layout.format(), storage_type, layout.coded_size(),
+ visible_rect, natural_size)) {
+ DLOG(ERROR) << __func__ << " Invalid config."
+ << ConfigToString(layout.format(), storage_type,
+ layout.coded_size(), visible_rect,
+ natural_size);
return nullptr;
- return WrapExternalStorage(STORAGE_SHMEM, *layout, visible_rect, natural_size,
- data, data_size, timestamp, region, nullptr,
- base::SharedMemoryHandle(), data_offset);
-}
+ }
-// static
-scoped_refptr<VideoFrame> VideoFrame::WrapExternalUnsafeSharedMemory(
- VideoPixelFormat format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::UnsafeSharedMemoryRegion* region,
- size_t data_offset,
- base::TimeDelta timestamp) {
- auto layout = GetDefaultLayout(format, coded_size);
- if (!layout)
- return nullptr;
- return WrapExternalStorage(STORAGE_SHMEM, *layout, visible_rect, natural_size,
- data, data_size, timestamp, nullptr, region,
- base::SharedMemoryHandle(), data_offset);
-}
+ scoped_refptr<VideoFrame> frame = new VideoFrame(
+ layout, storage_type, visible_rect, natural_size, timestamp);
-// static
-scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory(
- VideoPixelFormat format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::SharedMemoryHandle handle,
- size_t data_offset,
- base::TimeDelta timestamp) {
- auto layout = GetDefaultLayout(format, coded_size);
- if (!layout)
- return nullptr;
- return WrapExternalStorage(STORAGE_SHMEM, *layout, visible_rect, natural_size,
- data, data_size, timestamp, nullptr, nullptr,
- handle, data_offset);
+ for (size_t i = 0; i < layout.planes().size(); ++i) {
+ frame->data_[i] = data + layout.planes()[i].offset;
+ }
+
+ return frame;
}
// static
@@ -613,6 +571,10 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
CHECK(!frame.HasTextures());
DCHECK(frame.visible_rect().Contains(visible_rect));
+ // The following storage type should not be wrapped as the shared region
+ // cannot be owned by both the wrapped frame and the wrapping frame.
+ DCHECK(frame.storage_type() != STORAGE_MOJO_SHARED_BUFFER);
+
if (!AreValidPixelFormatsForWrap(frame.format(), format)) {
DLOG(ERROR) << __func__ << " Invalid format conversion."
<< VideoPixelFormatToString(frame.format()) << " to "
@@ -649,18 +611,9 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
#endif
if (frame.storage_type() == STORAGE_SHMEM) {
- if (frame.read_only_shared_memory_region_) {
- DCHECK(frame.read_only_shared_memory_region_->IsValid());
- wrapping_frame->AddReadOnlySharedMemoryRegion(
- frame.read_only_shared_memory_region_);
- } else if (frame.unsafe_shared_memory_region_) {
- DCHECK(frame.unsafe_shared_memory_region_->IsValid());
- wrapping_frame->AddUnsafeSharedMemoryRegion(
- frame.unsafe_shared_memory_region_);
- } else {
- DCHECK(frame.shared_memory_handle_.IsValid());
- wrapping_frame->AddSharedMemoryHandle(frame.shared_memory_handle_);
- }
+ DCHECK(frame.shm_region_ && frame.shm_region_->IsValid());
+ wrapping_frame->BackWithSharedMemory(frame.shm_region_,
+ frame.shared_memory_offset());
}
return wrapping_frame;
@@ -799,8 +752,7 @@ int VideoFrame::BytesPerElement(VideoPixelFormat format, size_t plane) {
case PIXEL_FORMAT_P016LE:
return 2;
case PIXEL_FORMAT_NV12:
- case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21: {
+ case PIXEL_FORMAT_NV21: {
static const int bytes_per_element[] = {1, 2};
DCHECK_LT(plane, base::size(bytes_per_element));
return bytes_per_element[plane];
@@ -864,6 +816,37 @@ void VideoFrame::HashFrameForTesting(base::MD5Context* context,
}
}
+void VideoFrame::BackWithSharedMemory(base::UnsafeSharedMemoryRegion* region,
+ size_t offset) {
+ DCHECK(!shm_region_);
+ DCHECK(!owned_shm_region_.IsValid());
+ // Either we should be backing a frame created with WrapExternal*, or we are
+ // wrapping an existing STORAGE_SHMEM, in which case the storage
+ // type has already been set to STORAGE_SHMEM.
+ DCHECK(storage_type_ == STORAGE_UNOWNED_MEMORY ||
+ storage_type_ == STORAGE_SHMEM);
+ DCHECK(region && region->IsValid());
+ storage_type_ = STORAGE_SHMEM;
+ shm_region_ = region;
+ shared_memory_offset_ = offset;
+}
+
+void VideoFrame::BackWithOwnedSharedMemory(
+ base::UnsafeSharedMemoryRegion region,
+ base::WritableSharedMemoryMapping mapping,
+ size_t offset) {
+ DCHECK(!shm_region_);
+ DCHECK(!owned_shm_region_.IsValid());
+ // We should be backing a frame created with WrapExternal*. We cannot be
+ // wrapping an existing STORAGE_SHMEM, as the region is unowned in that case.
+ DCHECK(storage_type_ == STORAGE_UNOWNED_MEMORY);
+ storage_type_ = STORAGE_SHMEM;
+ owned_shm_region_ = std::move(region);
+ shm_region_ = &owned_shm_region_;
+ owned_shm_mapping_ = std::move(mapping);
+ shared_memory_offset_ = offset;
+}
+
bool VideoFrame::IsMappable() const {
return IsStorageTypeMappable(storage_type_);
}
@@ -928,38 +911,6 @@ VideoFrame::mailbox_holder(size_t texture_index) const {
return mailbox_holders_[texture_index];
}
-base::ReadOnlySharedMemoryRegion* VideoFrame::read_only_shared_memory_region()
- const {
- DCHECK_EQ(storage_type_, STORAGE_SHMEM);
- DCHECK(read_only_shared_memory_region_ &&
- read_only_shared_memory_region_->IsValid());
- return read_only_shared_memory_region_;
-}
-
-base::UnsafeSharedMemoryRegion* VideoFrame::unsafe_shared_memory_region()
- const {
- DCHECK_EQ(storage_type_, STORAGE_SHMEM);
- DCHECK(unsafe_shared_memory_region_ &&
- unsafe_shared_memory_region_->IsValid());
- return unsafe_shared_memory_region_;
-}
-
-base::SharedMemoryHandle VideoFrame::shared_memory_handle() const {
- DCHECK_EQ(storage_type_, STORAGE_SHMEM);
- DCHECK(shared_memory_handle_.IsValid());
- return shared_memory_handle_;
-}
-
-size_t VideoFrame::shared_memory_offset() const {
- DCHECK_EQ(storage_type_, STORAGE_SHMEM);
- DCHECK((read_only_shared_memory_region_ &&
- read_only_shared_memory_region_->IsValid()) ||
- (unsafe_shared_memory_region_ &&
- unsafe_shared_memory_region_->IsValid()) ||
- shared_memory_handle_.IsValid());
- return shared_memory_offset_;
-}
-
#if defined(OS_LINUX)
const std::vector<base::ScopedFD>& VideoFrame::DmabufFds() const {
DCHECK_EQ(storage_type_, STORAGE_DMABUFS);
@@ -978,28 +929,6 @@ bool VideoFrame::IsSameDmaBufsAs(const VideoFrame& frame) const {
}
#endif
-void VideoFrame::AddReadOnlySharedMemoryRegion(
- base::ReadOnlySharedMemoryRegion* region) {
- storage_type_ = STORAGE_SHMEM;
- DCHECK(SharedMemoryUninitialized());
- DCHECK(region && region->IsValid());
- read_only_shared_memory_region_ = region;
-}
-
-void VideoFrame::AddUnsafeSharedMemoryRegion(
- base::UnsafeSharedMemoryRegion* region) {
- storage_type_ = STORAGE_SHMEM;
- DCHECK(SharedMemoryUninitialized());
- DCHECK(region && region->IsValid());
- unsafe_shared_memory_region_ = region;
-}
-
-void VideoFrame::AddSharedMemoryHandle(base::SharedMemoryHandle handle) {
- storage_type_ = STORAGE_SHMEM;
- DCHECK(SharedMemoryUninitialized());
- shared_memory_handle_ = handle;
-}
-
#if defined(OS_MACOSX)
CVPixelBufferRef VideoFrame::CvPixelBuffer() const {
return cv_pixel_buffer_.get();
@@ -1048,60 +977,6 @@ size_t VideoFrame::BitDepth() const {
return media::BitDepth(format());
}
-// static
-scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
- StorageType storage_type,
- const VideoFrameLayout& layout,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::TimeDelta timestamp,
- base::ReadOnlySharedMemoryRegion* read_only_region,
- base::UnsafeSharedMemoryRegion* unsafe_region,
- base::SharedMemoryHandle handle,
- size_t data_offset) {
- DCHECK(IsStorageTypeMappable(storage_type));
-
- if (!IsValidConfig(layout.format(), storage_type, layout.coded_size(),
- visible_rect, natural_size)) {
- DLOG(ERROR) << __func__ << " Invalid config."
- << ConfigToString(layout.format(), storage_type,
- layout.coded_size(), visible_rect,
- natural_size);
- return nullptr;
- }
-
- scoped_refptr<VideoFrame> frame = new VideoFrame(
- layout, storage_type, visible_rect, natural_size, timestamp);
-
- for (size_t i = 0; i < layout.planes().size(); ++i) {
- frame->data_[i] = data + layout.planes()[i].offset;
- }
-
- if (storage_type == STORAGE_SHMEM) {
- if (read_only_region || unsafe_region) {
- DCHECK(!handle.IsValid());
- DCHECK_NE(!!read_only_region, !!unsafe_region)
- << "Expected exactly one read-only or unsafe region for "
- << "STORAGE_SHMEM VideoFrame";
- if (read_only_region) {
- frame->read_only_shared_memory_region_ = read_only_region;
- DCHECK(frame->read_only_shared_memory_region_->IsValid());
- } else if (unsafe_region) {
- frame->unsafe_shared_memory_region_ = unsafe_region;
- DCHECK(frame->unsafe_shared_memory_region_->IsValid());
- }
- frame->shared_memory_offset_ = data_offset;
- } else {
- frame->AddSharedMemoryHandle(handle);
- frame->shared_memory_offset_ = data_offset;
- }
- }
-
- return frame;
-}
-
VideoFrame::VideoFrame(const VideoFrameLayout& layout,
StorageType storage_type,
const gfx::Rect& visible_rect,
@@ -1111,7 +986,6 @@ VideoFrame::VideoFrame(const VideoFrameLayout& layout,
storage_type_(storage_type),
visible_rect_(Intersection(visible_rect, gfx::Rect(layout.coded_size()))),
natural_size_(natural_size),
- shared_memory_offset_(0),
#if defined(OS_LINUX)
dmabuf_fds_(base::MakeRefCounted<DmabufHolder>()),
#endif
@@ -1217,11 +1091,6 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrameWithLayout(
return frame;
}
-bool VideoFrame::SharedMemoryUninitialized() {
- return !read_only_shared_memory_region_ && !unsafe_shared_memory_region_ &&
- !shared_memory_handle_.IsValid();
-}
-
// static
gfx::Size VideoFrame::SampleSize(VideoPixelFormat format, size_t plane) {
DCHECK(IsValidPlane(plane, format));
@@ -1252,7 +1121,6 @@ gfx::Size VideoFrame::SampleSize(VideoPixelFormat format, size_t plane) {
case PIXEL_FORMAT_I420A:
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV420P10:
case PIXEL_FORMAT_YUV420P12:
@@ -1310,6 +1178,12 @@ void VideoFrame::AllocateMemory(bool zero_initialize_memory) {
}
}
+bool VideoFrame::IsValidSharedMemoryFrame() const {
+ if (storage_type_ == STORAGE_SHMEM)
+ return shm_region_ && shm_region_->IsValid();
+ return false;
+}
+
std::vector<size_t> VideoFrame::CalculatePlaneSize() const {
// We have two cases for plane size mapping:
// 1) If plane size is specified: use planes' size.
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index 245c51b6ebf..55946ee6b18 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -18,10 +18,7 @@
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
-#include "base/memory/read_only_shared_memory_region.h"
#include "base/memory/ref_counted.h"
-#include "base/memory/shared_memory.h"
-#include "base/memory/shared_memory_handle.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/optional.h"
#include "base/synchronization/lock.h"
@@ -78,7 +75,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
STORAGE_OPAQUE = 1, // We don't know how VideoFrame's pixels are stored.
STORAGE_UNOWNED_MEMORY = 2, // External, non owned data pointers.
STORAGE_OWNED_MEMORY = 3, // VideoFrame has allocated its own data buffer.
- STORAGE_SHMEM = 4, // Pixels are backed by Shared Memory.
+ STORAGE_SHMEM = 4, // Backed by unsafe (writable) shared memory.
#if defined(OS_LINUX)
// TODO(mcasas): Consider turning this type into STORAGE_NATIVE
// based on the idea of using this same enum value for both DMA
@@ -86,6 +83,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// STORAGE_UNOWNED_MEMORY) and handle it appropriately in all cases.
STORAGE_DMABUFS = 5, // Each plane is stored into a DmaBuf.
#endif
+ // Backed by a mojo shared buffer. This should only be used by the
+ // MojoSharedBufferVideoFrame subclass.
STORAGE_MOJO_SHARED_BUFFER = 6,
STORAGE_LAST = STORAGE_MOJO_SHARED_BUFFER,
};
@@ -187,49 +186,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
size_t data_size,
base::TimeDelta timestamp);
- // Same as WrapExternalData() with a ReadOnlySharedMemoryRegion and its
- // offset. Neither |region| nor |data| are owned by this VideoFrame. The
- // region and mapping which back |data| must outlive this instance; a
- // destruction observer can be used in this case.
- static scoped_refptr<VideoFrame> WrapExternalReadOnlySharedMemory(
- VideoPixelFormat format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::ReadOnlySharedMemoryRegion* region,
- size_t shared_memory_offset,
- base::TimeDelta timestamp);
-
- // Same as WrapExternalData() with a UnsafeSharedMemoryRegion and its
- // offset. Neither |region| nor |data| are owned by this VideoFrame. The owner
- // of the region and mapping which back |data| must outlive this instance; a
- // destruction observer can be used in this case.
- static scoped_refptr<VideoFrame> WrapExternalUnsafeSharedMemory(
- VideoPixelFormat format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::UnsafeSharedMemoryRegion* region,
- size_t shared_memory_offset,
- base::TimeDelta timestamp);
-
- // Legacy wrapping of old SharedMemoryHandle objects. Deprecated, use one of
- // the shared memory region wrappers above instead.
- static scoped_refptr<VideoFrame> WrapExternalSharedMemory(
- VideoPixelFormat format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::SharedMemoryHandle handle,
- size_t shared_memory_offset,
- base::TimeDelta timestamp);
-
// Wraps external YUV data of the given parameters with a VideoFrame.
// The returned VideoFrame does not own the data passed in.
static scoped_refptr<VideoFrame> WrapExternalYuvData(
@@ -380,6 +336,37 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// static
static bool IsStorageTypeMappable(VideoFrame::StorageType storage_type);
+ // A video frame wrapping external data may be backed by an unsafe shared
+ // memory region. These methods are used to appropriately transform a
+ // VideoFrame created with WrapExternalData, WrapExternalYuvaData, etc. The
+ // storage type of the Video Frame will be changed to STORAGE_SHM. Once the
+ // backing of a VideoFrame is set, it cannot be changed.
+ //
+ // The region is NOT owned by the video frame. Both the region and its
+ // associated mapping must outlive this instance.
+ void BackWithSharedMemory(base::UnsafeSharedMemoryRegion* region,
+ size_t offset = 0);
+
+ // As above, but the VideoFrame owns the shared memory region as well as the
+ // mapping. They will be destroyed with their VideoFrame.
+ void BackWithOwnedSharedMemory(base::UnsafeSharedMemoryRegion region,
+ base::WritableSharedMemoryMapping mapping,
+ size_t offset = 0);
+
+ // Returns the offset into the shared memory where the frame data begins. Only
+ // valid if the frame is backed by shared memory.
+ size_t shared_memory_offset() const {
+ DCHECK(IsValidSharedMemoryFrame());
+ return shared_memory_offset_;
+ }
+
+ // Valid for shared memory backed VideoFrames.
+ base::UnsafeSharedMemoryRegion* shm_region() {
+ DCHECK(IsValidSharedMemoryFrame());
+ DCHECK(storage_type_ == STORAGE_SHMEM);
+ return shm_region_;
+ }
+
// Returns true if |frame| is accessible and mapped in the VideoFrame memory
// space. If false, clients should refrain from accessing data(),
// visible_data() etc.
@@ -457,18 +444,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// mailbox, the caller must wait for the included sync point.
const gpu::MailboxHolder& mailbox_holder(size_t texture_index) const;
- // Returns a pointer to the read-only shared-memory region, if present.
- base::ReadOnlySharedMemoryRegion* read_only_shared_memory_region() const;
-
- // Returns a pointer to the unsafe shared memory handle, if present.
- base::UnsafeSharedMemoryRegion* unsafe_shared_memory_region() const;
-
- // Retuns the legacy SharedMemoryHandle, if present.
- base::SharedMemoryHandle shared_memory_handle() const;
-
- // Returns the offset into the shared memory where the frame data begins.
- size_t shared_memory_offset() const;
-
#if defined(OS_LINUX)
// Returns a vector containing the backing DmaBufs for this frame. The number
// of returned DmaBufs will be equal or less than the number of planes of
@@ -488,12 +463,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
bool IsSameDmaBufsAs(const VideoFrame& frame) const;
#endif
- void AddReadOnlySharedMemoryRegion(base::ReadOnlySharedMemoryRegion* region);
- void AddUnsafeSharedMemoryRegion(base::UnsafeSharedMemoryRegion* region);
-
- // Legacy, use one of the Add*SharedMemoryRegion methods above instead.
- void AddSharedMemoryHandle(base::SharedMemoryHandle handle);
-
#if defined(OS_MACOSX)
// Returns the backing CVPixelBuffer, if present.
CVPixelBufferRef CvPixelBuffer() const;
@@ -592,19 +561,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
}
private:
- static scoped_refptr<VideoFrame> WrapExternalStorage(
- StorageType storage_type,
- const VideoFrameLayout& layout,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- uint8_t* data,
- size_t data_size,
- base::TimeDelta timestamp,
- base::ReadOnlySharedMemoryRegion* read_only_region,
- base::UnsafeSharedMemoryRegion* unsafe_region,
- base::SharedMemoryHandle handle,
- size_t data_offset);
-
static scoped_refptr<VideoFrame> CreateFrameInternal(
VideoPixelFormat format,
const gfx::Size& coded_size,
@@ -613,8 +569,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::TimeDelta timestamp,
bool zero_initialize_memory);
- bool SharedMemoryUninitialized();
-
// Returns the pixel size of each subsample for a given |plane| and |format|.
// E.g. 2x2 for the U-plane in PIXEL_FORMAT_I420.
static gfx::Size SampleSize(VideoPixelFormat format, size_t plane);
@@ -634,6 +588,10 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// based on format, coded size and stride for the plane.
std::vector<size_t> CalculatePlaneSize() const;
+ // Returns true iff the frame has a shared memory storage type, and the
+ // associated regions are valid.
+ bool IsValidSharedMemoryFrame() const;
+
// VideFrameLayout (includes format, coded_size, and strides).
const VideoFrameLayout layout_;
@@ -659,18 +617,18 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
gpu::MailboxHolder mailbox_holders_[kMaxPlanes];
ReleaseMailboxCB mailbox_holders_release_cb_;
- // Shared memory handle and associated offset inside it, if this frame is a
- // STORAGE_SHMEM one. Pointers to unowned shared memory regions. At most one
- // of the memory regions will be set.
- base::ReadOnlySharedMemoryRegion* read_only_shared_memory_region_ = nullptr;
- base::UnsafeSharedMemoryRegion* unsafe_shared_memory_region_ = nullptr;
-
- // Legacy handle.
- base::SharedMemoryHandle shared_memory_handle_;
+ // Shared memory handle, if this frame is STORAGE_SHMEM. The region pointed
+ // to is unowned.
+ base::UnsafeSharedMemoryRegion* shm_region_ = nullptr;
// If this is a STORAGE_SHMEM frame, the offset of the data within the shared
// memory.
- size_t shared_memory_offset_;
+ size_t shared_memory_offset_ = 0;
+
+ // Used if this is a STORAGE_SHMEM frame with owned shared memory. In that
+ // case, shm_region_ will refer to this region.
+ base::UnsafeSharedMemoryRegion owned_shm_region_;
+ base::WritableSharedMemoryMapping owned_shm_mapping_;
#if defined(OS_LINUX)
class DmabufHolder;
diff --git a/chromium/media/base/video_frame_layout.cc b/chromium/media/base/video_frame_layout.cc
index 9f80a039088..5c6485677ca 100644
--- a/chromium/media/base/video_frame_layout.cc
+++ b/chromium/media/base/video_frame_layout.cc
@@ -55,7 +55,6 @@ size_t VideoFrameLayout::NumPlanes(VideoPixelFormat format) {
return 1;
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_P016LE:
return 2;
case PIXEL_FORMAT_I420:
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index bfb2aa62148..4fb65ef6140 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -12,8 +12,6 @@
#include "base/callback_helpers.h"
#include "base/format_macros.h"
#include "base/memory/aligned_memory.h"
-#include "base/memory/read_only_shared_memory_region.h"
-#include "base/memory/shared_memory.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
@@ -330,37 +328,21 @@ TEST(VideoFrame, WrapExternalData) {
}
// Create a frame that wraps read-only shared memory.
-TEST(VideoFrame, WrapExternalReadOnlySharedMemory) {
+TEST(VideoFrame, WrapSharedMemory) {
const size_t kDataSize = 2 * 256 * 256;
- auto mapped_region = base::ReadOnlySharedMemoryRegion::Create(kDataSize);
- gfx::Size coded_size(256, 256);
- gfx::Rect visible_rect(coded_size);
- CreateTestY16Frame(coded_size, visible_rect, mapped_region.mapping.memory());
- auto timestamp = base::TimeDelta::FromMilliseconds(1);
- auto frame = VideoFrame::WrapExternalReadOnlySharedMemory(
- media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
- static_cast<uint8_t*>(mapped_region.mapping.memory()), kDataSize,
- &mapped_region.region, 0, timestamp);
-
- EXPECT_EQ(frame->coded_size(), coded_size);
- EXPECT_EQ(frame->visible_rect(), visible_rect);
- EXPECT_EQ(frame->timestamp(), timestamp);
- EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
-}
-
-// Create a frame that wraps unsafe shared memory.
-TEST(VideoFrame, WrapExternalUnsafeSharedMemory) {
- const size_t kDataSize = 2 * 256 * 256;
- auto region = base::UnsafeSharedMemoryRegion::Create(kDataSize);
- auto mapping = region.Map();
+ base::UnsafeSharedMemoryRegion region =
+ base::UnsafeSharedMemoryRegion::Create(kDataSize);
+ ASSERT_TRUE(region.IsValid());
+ base::WritableSharedMemoryMapping mapping = region.Map();
+ ASSERT_TRUE(mapping.IsValid());
gfx::Size coded_size(256, 256);
gfx::Rect visible_rect(coded_size);
CreateTestY16Frame(coded_size, visible_rect, mapping.memory());
auto timestamp = base::TimeDelta::FromMilliseconds(1);
- auto frame = VideoFrame::WrapExternalUnsafeSharedMemory(
+ auto frame = VideoFrame::WrapExternalData(
media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
- static_cast<uint8_t*>(mapping.memory()), kDataSize, &region, 0,
- timestamp);
+ mapping.GetMemoryAsSpan<uint8_t>().data(), kDataSize, timestamp);
+ frame->BackWithSharedMemory(&region);
EXPECT_EQ(frame->coded_size(), coded_size);
EXPECT_EQ(frame->visible_rect(), visible_rect);
@@ -368,19 +350,26 @@ TEST(VideoFrame, WrapExternalUnsafeSharedMemory) {
EXPECT_EQ(frame->data(media::VideoFrame::kYPlane)[0], 0xff);
}
-// Create a frame that wraps a legacy shared memory handle.
-TEST(VideoFrame, WrapExternalSharedMemory) {
+// Create a frame that wraps shared memory with an offset.
+TEST(VideoFrame, WrapUnsafeSharedMemoryWithOffset) {
+ const size_t kOffset = 64;
const size_t kDataSize = 2 * 256 * 256;
- base::SharedMemory shm;
- ASSERT_TRUE(shm.CreateAndMapAnonymous(kDataSize));
+ base::UnsafeSharedMemoryRegion region =
+ base::UnsafeSharedMemoryRegion::Create(kDataSize + kOffset);
+ ASSERT_TRUE(region.IsValid());
+ base::WritableSharedMemoryMapping mapping = region.Map();
+ ASSERT_TRUE(mapping.IsValid());
gfx::Size coded_size(256, 256);
gfx::Rect visible_rect(coded_size);
- CreateTestY16Frame(coded_size, visible_rect, shm.memory());
+ CreateTestY16Frame(
+ coded_size, visible_rect,
+ mapping.GetMemoryAsSpan<uint8_t>().subspan(kOffset).data());
auto timestamp = base::TimeDelta::FromMilliseconds(1);
- auto frame = VideoFrame::WrapExternalSharedMemory(
+ auto frame = VideoFrame::WrapExternalData(
media::PIXEL_FORMAT_Y16, coded_size, visible_rect, visible_rect.size(),
- static_cast<uint8_t*>(shm.memory()), kDataSize, shm.handle(), 0,
+ mapping.GetMemoryAsSpan<uint8_t>().subspan(kOffset).data(), kDataSize,
timestamp);
+ frame->BackWithSharedMemory(&region, kOffset);
EXPECT_EQ(frame->coded_size(), coded_size);
EXPECT_EQ(frame->visible_rect(), visible_rect);
@@ -614,7 +603,6 @@ TEST(VideoFrame, AllocationSize_OddSize) {
case PIXEL_FORMAT_I420:
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21:
EXPECT_EQ(36u, VideoFrame::AllocationSize(format, size))
<< VideoPixelFormatToString(format);
break;
diff --git a/chromium/media/base/video_thumbnail_decoder_unittest.cc b/chromium/media/base/video_thumbnail_decoder_unittest.cc
index c36cfe4a6e9..5cdf459e4fb 100644
--- a/chromium/media/base/video_thumbnail_decoder_unittest.cc
+++ b/chromium/media/base/video_thumbnail_decoder_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/media_util.h"
#include "media/base/mock_filters.h"
#include "media/base/video_decoder_config.h"
@@ -69,7 +69,7 @@ class VideoThumbnailDecoderTest : public testing::Test {
frame_ = std::move(frame);
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
MockVideoDecoder* mock_video_decoder_;
std::unique_ptr<VideoThumbnailDecoder> thumbnail_decoder_;
diff --git a/chromium/media/base/video_types.cc b/chromium/media/base/video_types.cc
index ac7b7cc4e63..1b8bb8f2555 100644
--- a/chromium/media/base/video_types.cc
+++ b/chromium/media/base/video_types.cc
@@ -39,8 +39,6 @@ std::string VideoPixelFormatToString(VideoPixelFormat format) {
return "PIXEL_FORMAT_RGB24";
case PIXEL_FORMAT_MJPEG:
return "PIXEL_FORMAT_MJPEG";
- case PIXEL_FORMAT_MT21:
- return "PIXEL_FORMAT_MT21";
case PIXEL_FORMAT_YUV420P9:
return "PIXEL_FORMAT_YUV420P9";
case PIXEL_FORMAT_YUV420P10:
@@ -97,7 +95,6 @@ bool IsYuvPlanar(VideoPixelFormat format) {
case PIXEL_FORMAT_I444:
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV420P10:
case PIXEL_FORMAT_YUV422P9:
@@ -139,7 +136,6 @@ bool IsOpaque(VideoPixelFormat format) {
case PIXEL_FORMAT_XRGB:
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV420P10:
case PIXEL_FORMAT_YUV422P9:
@@ -179,7 +175,6 @@ size_t BitDepth(VideoPixelFormat format) {
case PIXEL_FORMAT_XRGB:
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_ABGR:
case PIXEL_FORMAT_XBGR:
return 8;
diff --git a/chromium/media/base/video_types.h b/chromium/media/base/video_types.h
index 22c4a78eaad..0feca5ac103 100644
--- a/chromium/media/base/video_types.h
+++ b/chromium/media/base/video_types.h
@@ -47,15 +47,7 @@ enum VideoPixelFormat {
/* PIXEL_FORMAT_RGB32 = 13, Deprecated */
PIXEL_FORMAT_MJPEG = 14, // MJPEG compressed.
- // MediaTek proprietary format. MT21 is similar to NV21 except the memory
- // layout and pixel layout (swizzles). 12bpp with Y plane followed by a 2x2
- // interleaved VU plane. Each image contains two buffers -- Y plane and VU
- // plane. Two planes can be non-contiguous in memory. The starting addresses
- // of Y plane and VU plane are 4KB alignment.
- // Suppose image dimension is (width, height). For both Y plane and VU plane:
- // Row pitch = ((width+15)/16) * 16.
- // Plane size = Row pitch * (((height+31)/32)*32)
- PIXEL_FORMAT_MT21 = 15,
+ /* PIXEL_FORMAT_MT21 = 15, Deprecated */
// The P* in the formats below designates the number of bits per pixel
// component. I.e. P9 is 9-bits per pixel component, P10 is 10-bits per pixel
diff --git a/chromium/media/blink/BUILD.gn b/chromium/media/blink/BUILD.gn
index d75a490ac27..a43733b809f 100644
--- a/chromium/media/blink/BUILD.gn
+++ b/chromium/media/blink/BUILD.gn
@@ -76,7 +76,7 @@ component("blink") {
"//gpu",
"//media",
"//media:shared_memory_support",
- "//media/mojo/interfaces",
+ "//media/mojo/mojom",
"//net",
"//services/network/public/cpp:cpp",
"//services/service_manager/public/cpp:cpp",
@@ -102,7 +102,7 @@ test("media_blink_unittests") {
"//cc",
"//gin",
"//media:test_support",
- "//media/mojo/interfaces",
+ "//media/mojo/mojom",
"//media/mojo/services",
"//mojo/core/embedder",
"//net",
diff --git a/chromium/media/blink/cdm_result_promise.h b/chromium/media/blink/cdm_result_promise.h
index 06d500356b7..75c2b094dd2 100644
--- a/chromium/media/blink/cdm_result_promise.h
+++ b/chromium/media/blink/cdm_result_promise.h
@@ -8,12 +8,16 @@
#include <stdint.h>
#include "base/macros.h"
+#include "base/metrics/histogram_functions.h"
+#include "base/time/time.h"
#include "media/blink/cdm_result_promise_helper.h"
#include "third_party/blink/public/platform/web_content_decryption_module_result.h"
#include "third_party/blink/public/platform/web_string.h"
namespace media {
+const char kTimeUMAPrefix[] = "TimeTo.";
+
// Used to convert a WebContentDecryptionModuleResult into a CdmPromiseTemplate
// so that it can be passed through Chromium. When resolve(T) is called, the
// appropriate complete...() method on WebContentDecryptionModuleResult will be
@@ -25,6 +29,7 @@ template <typename... T>
class CdmResultPromise : public CdmPromiseTemplate<T...> {
public:
CdmResultPromise(const blink::WebContentDecryptionModuleResult& result,
+ const std::string& key_system_uma_prefix,
const std::string& uma_name);
~CdmResultPromise() override;
@@ -41,17 +46,27 @@ class CdmResultPromise : public CdmPromiseTemplate<T...> {
blink::WebContentDecryptionModuleResult web_cdm_result_;
- // UMA name to report result to.
+ // UMA prefix and name to report result and time to.
+ std::string key_system_uma_prefix_;
std::string uma_name_;
+ // Time when |this| is created.
+ base::TimeTicks creation_time_;
+
DISALLOW_COPY_AND_ASSIGN(CdmResultPromise);
};
template <typename... T>
CdmResultPromise<T...>::CdmResultPromise(
const blink::WebContentDecryptionModuleResult& result,
+ const std::string& key_system_uma_prefix,
const std::string& uma_name)
- : web_cdm_result_(result), uma_name_(uma_name) {
+ : web_cdm_result_(result),
+ key_system_uma_prefix_(key_system_uma_prefix),
+ uma_name_(uma_name),
+ creation_time_(base::TimeTicks::Now()) {
+ DCHECK(!key_system_uma_prefix_.empty());
+ DCHECK(!uma_name_.empty());
}
template <typename... T>
@@ -65,7 +80,12 @@ CdmResultPromise<T...>::~CdmResultPromise() {
template <>
inline void CdmResultPromise<>::resolve() {
MarkPromiseSettled();
- ReportCdmResultUMA(uma_name_, 0, SUCCESS);
+ ReportCdmResultUMA(key_system_uma_prefix_ + uma_name_, 0, SUCCESS);
+
+ // Only report time for promise resolution (not rejection).
+ base::UmaHistogramTimes(key_system_uma_prefix_ + kTimeUMAPrefix + uma_name_,
+ base::TimeTicks::Now() - creation_time_);
+
web_cdm_result_.Complete();
}
@@ -73,7 +93,12 @@ template <>
inline void CdmResultPromise<CdmKeyInformation::KeyStatus>::resolve(
const CdmKeyInformation::KeyStatus& key_status) {
MarkPromiseSettled();
- ReportCdmResultUMA(uma_name_, 0, SUCCESS);
+ ReportCdmResultUMA(key_system_uma_prefix_ + uma_name_, 0, SUCCESS);
+
+ // Only report time for promise resolution (not rejection).
+ base::UmaHistogramTimes(key_system_uma_prefix_ + kTimeUMAPrefix + uma_name_,
+ base::TimeTicks::Now() - creation_time_);
+
web_cdm_result_.CompleteWithKeyStatus(ConvertCdmKeyStatus(key_status));
}
@@ -82,7 +107,7 @@ void CdmResultPromise<T...>::reject(CdmPromise::Exception exception_code,
uint32_t system_code,
const std::string& error_message) {
MarkPromiseSettled();
- ReportCdmResultUMA(uma_name_, system_code,
+ ReportCdmResultUMA(key_system_uma_prefix_ + uma_name_, system_code,
ConvertCdmExceptionToResultForUMA(exception_code));
web_cdm_result_.CompleteWithError(ConvertCdmException(exception_code),
system_code,
diff --git a/chromium/media/blink/multibuffer_data_source.cc b/chromium/media/blink/multibuffer_data_source.cc
index cc7dd78443d..e58acc9b3cf 100644
--- a/chromium/media/blink/multibuffer_data_source.cc
+++ b/chromium/media/blink/multibuffer_data_source.cc
@@ -308,11 +308,16 @@ void MultibufferDataSource::MediaPlaybackRateChanged(double playback_rate) {
void MultibufferDataSource::MediaIsPlaying() {
DCHECK(render_task_runner_->BelongsToCurrentThread());
+
+ // Always clear this since it can be set by OnBufferingHaveEnough() calls at
+ // any point in time.
+ cancel_on_defer_ = false;
+
if (media_has_played_)
return;
media_has_played_ = true;
- cancel_on_defer_ = false;
+
// Once we start playing, we need preloading.
preload_ = AUTO;
UpdateBufferSizes();
diff --git a/chromium/media/blink/multibuffer_data_source_unittest.cc b/chromium/media/blink/multibuffer_data_source_unittest.cc
index 02be5cee8fc..eacee0cb133 100644
--- a/chromium/media/blink/multibuffer_data_source_unittest.cc
+++ b/chromium/media/blink/multibuffer_data_source_unittest.cc
@@ -1305,6 +1305,7 @@ TEST_F(MultibufferDataSourceTest,
// Marking the media as playing should prevent deferral. It also tells the
// data source to start buffering beyond the initial load.
+ EXPECT_FALSE(data_source_->cancel_on_defer_for_testing());
data_source_->MediaIsPlaying();
data_source_->OnBufferingHaveEnough(false);
CheckCapacityDefer();
@@ -1316,6 +1317,7 @@ TEST_F(MultibufferDataSourceTest,
ReceiveData(kDataSize);
ASSERT_TRUE(active_loader());
data_source_->OnBufferingHaveEnough(true);
+ EXPECT_TRUE(data_source_->cancel_on_defer_for_testing());
ASSERT_TRUE(active_loader());
ASSERT_FALSE(data_provider()->deferred());
@@ -1329,6 +1331,26 @@ TEST_F(MultibufferDataSourceTest,
EXPECT_GT(bytes_received, 0);
EXPECT_LT(bytes_received + kDataSize, kFileSize);
EXPECT_FALSE(active_loader_allownull());
+
+ // Verify playback resumes correctly too.
+ data_source_->MediaIsPlaying();
+ EXPECT_FALSE(data_source_->cancel_on_defer_for_testing());
+
+ // A read from a previously buffered range won't create a new loader yet.
+ EXPECT_CALL(*this, ReadCallback(kDataSize));
+ ReadAt(kDataSize);
+ EXPECT_FALSE(active_loader_allownull());
+
+ // Reads from an unbuffered range will though...
+ EXPECT_CALL(*this, ReadCallback(kDataSize));
+ ReadAt(kFarReadPosition);
+
+ // Receive enough data to exhaust current capacity which would destroy the
+ // loader upon deferral if the flag hasn't been cleared properly.
+ for (int i = 0; i <= (preload_high() / kDataSize) + 1; ++i) {
+ ReceiveData(kDataSize);
+ ASSERT_TRUE(active_loader());
+ }
}
TEST_F(MultibufferDataSourceTest, SeekPastEOF) {
diff --git a/chromium/media/blink/new_session_cdm_result_promise.cc b/chromium/media/blink/new_session_cdm_result_promise.cc
index a8984c415c8..5254fb9af71 100644
--- a/chromium/media/blink/new_session_cdm_result_promise.cc
+++ b/chromium/media/blink/new_session_cdm_result_promise.cc
@@ -99,7 +99,7 @@ void NewSessionCdmResultPromise::reject(CdmPromise::Exception exception_code,
<< ", error_message = " << error_message;
MarkPromiseSettled();
- ReportCdmResultUMA(uma_name_, system_code,
+ ReportCdmResultUMA(key_system_uma_prefix_ + uma_name_, system_code,
ConvertCdmExceptionToResultForUMA(exception_code));
web_cdm_result_.CompleteWithError(ConvertCdmException(exception_code),
system_code,
diff --git a/chromium/media/blink/run_all_unittests.cc b/chromium/media/blink/run_all_unittests.cc
index 1e61f7d4cbf..f2794dff914 100644
--- a/chromium/media/blink/run_all_unittests.cc
+++ b/chromium/media/blink/run_all_unittests.cc
@@ -5,7 +5,7 @@
#include "base/bind.h"
#include "base/macros.h"
#include "base/test/launcher/unit_test_launcher.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_suite.h"
#include "build/build_config.h"
#include "media/base/media.h"
@@ -34,7 +34,7 @@ constexpr gin::V8Initializer::V8SnapshotFileType kSnapshotType =
#endif // defined(V8_USE_EXTERNAL_STARTUP_DATA)
// We must use a custom blink::Platform that ensures the main thread scheduler
-// knows about the ScopedTaskEnvironment.
+// knows about the TaskEnvironment.
class BlinkPlatformWithTaskEnvironment : public blink::Platform {
public:
BlinkPlatformWithTaskEnvironment()
@@ -51,7 +51,7 @@ class BlinkPlatformWithTaskEnvironment : public blink::Platform {
}
private:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<blink::scheduler::WebThreadScheduler> main_thread_scheduler_;
DISALLOW_COPY_AND_ASSIGN(BlinkPlatformWithTaskEnvironment);
diff --git a/chromium/media/blink/video_decode_stats_reporter.cc b/chromium/media/blink/video_decode_stats_reporter.cc
index 0dc4d74d8d9..e4fc8e0ed59 100644
--- a/chromium/media/blink/video_decode_stats_reporter.cc
+++ b/chromium/media/blink/video_decode_stats_reporter.cc
@@ -11,7 +11,7 @@
#include "base/logging.h"
#include "base/macros.h"
#include "media/capabilities/bucket_utility.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace media {
diff --git a/chromium/media/blink/video_decode_stats_reporter.h b/chromium/media/blink/video_decode_stats_reporter.h
index e07f6481437..463dd9d1b66 100644
--- a/chromium/media/blink/video_decode_stats_reporter.h
+++ b/chromium/media/blink/video_decode_stats_reporter.h
@@ -20,7 +20,7 @@
#include "media/base/pipeline_status.h"
#include "media/base/video_codecs.h"
#include "media/blink/media_blink_export.h"
-#include "media/mojo/interfaces/video_decode_stats_recorder.mojom.h"
+#include "media/mojo/mojom/video_decode_stats_recorder.mojom.h"
namespace media {
diff --git a/chromium/media/blink/video_decode_stats_reporter_unittest.cc b/chromium/media/blink/video_decode_stats_reporter_unittest.cc
index cac5e78b52c..e5262286a9d 100644
--- a/chromium/media/blink/video_decode_stats_reporter_unittest.cc
+++ b/chromium/media/blink/video_decode_stats_reporter_unittest.cc
@@ -19,8 +19,8 @@
#include "media/base/video_types.h"
#include "media/blink/video_decode_stats_reporter.h"
#include "media/capabilities/bucket_utility.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
-#include "media/mojo/interfaces/video_decode_stats_recorder.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
+#include "media/mojo/mojom/video_decode_stats_recorder.mojom.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/media/blink/watch_time_component.h b/chromium/media/blink/watch_time_component.h
index 522e3cf638f..be25696490a 100644
--- a/chromium/media/blink/watch_time_component.h
+++ b/chromium/media/blink/watch_time_component.h
@@ -13,7 +13,7 @@
#include "media/base/timestamp_constants.h"
#include "media/base/watch_time_keys.h"
#include "media/blink/media_blink_export.h"
-#include "media/mojo/interfaces/watch_time_recorder.mojom.h"
+#include "media/mojo/mojom/watch_time_recorder.mojom.h"
namespace media {
diff --git a/chromium/media/blink/watch_time_reporter.h b/chromium/media/blink/watch_time_reporter.h
index eb143afe6b2..122e94eb645 100644
--- a/chromium/media/blink/watch_time_reporter.h
+++ b/chromium/media/blink/watch_time_reporter.h
@@ -18,8 +18,8 @@
#include "media/base/video_codecs.h"
#include "media/blink/media_blink_export.h"
#include "media/blink/watch_time_component.h"
-#include "media/mojo/interfaces/media_metrics_provider.mojom.h"
-#include "media/mojo/interfaces/watch_time_recorder.mojom.h"
+#include "media/mojo/mojom/media_metrics_provider.mojom.h"
+#include "media/mojo/mojom/watch_time_recorder.mojom.h"
#include "third_party/blink/public/platform/web_media_player.h"
#include "ui/gfx/geometry/size.h"
#include "url/origin.h"
diff --git a/chromium/media/blink/watch_time_reporter_unittest.cc b/chromium/media/blink/watch_time_reporter_unittest.cc
index 5a1564f3c5b..4ef3a921a0a 100644
--- a/chromium/media/blink/watch_time_reporter_unittest.cc
+++ b/chromium/media/blink/watch_time_reporter_unittest.cc
@@ -14,8 +14,8 @@
#include "media/base/mock_media_log.h"
#include "media/base/watch_time_keys.h"
#include "media/blink/watch_time_reporter.h"
-#include "media/mojo/interfaces/media_metrics_provider.mojom.h"
-#include "media/mojo/interfaces/watch_time_recorder.mojom.h"
+#include "media/mojo/mojom/media_metrics_provider.mojom.h"
+#include "media/mojo/mojom/watch_time_recorder.mojom.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -238,6 +238,10 @@ class WatchTimeReporterTest
mojom::VideoDecodeStatsRecorderRequest request) override {
FAIL();
}
+ void AcquireLearningTaskController(
+ const std::string& taskName,
+ media::learning::mojom::LearningTaskControllerRequest request)
+ override {}
void Initialize(bool is_mse, mojom::MediaURLScheme url_scheme) override {}
void OnError(PipelineStatus status) override {}
void SetIsAdMedia() override {}
diff --git a/chromium/media/blink/webcontentdecryptionmodule_impl.cc b/chromium/media/blink/webcontentdecryptionmodule_impl.cc
index db38b5d0cc2..3ab48c5ef38 100644
--- a/chromium/media/blink/webcontentdecryptionmodule_impl.cc
+++ b/chromium/media/blink/webcontentdecryptionmodule_impl.cc
@@ -27,6 +27,9 @@ namespace media {
namespace {
+const char kSetServerCertificateUMAName[] = "SetServerCertificate";
+const char kGetStatusForPolicyUMAName[] = "GetStatusForPolicy";
+
bool ConvertHdcpVersion(const blink::WebString& hdcp_version_string,
HdcpVersion* hdcp_version) {
if (!hdcp_version_string.ContainsOnlyASCII())
@@ -132,8 +135,9 @@ void WebContentDecryptionModuleImpl::SetServerCertificate(
adapter_->SetServerCertificate(
std::vector<uint8_t>(server_certificate,
server_certificate + server_certificate_length),
- std::unique_ptr<SimpleCdmPromise>(
- new CdmResultPromise<>(result, std::string())));
+ std::make_unique<CdmResultPromise<>>(result,
+ adapter_->GetKeySystemUMAPrefix(),
+ kSetServerCertificateUMAName));
}
void WebContentDecryptionModuleImpl::GetStatusForPolicy(
@@ -147,11 +151,11 @@ void WebContentDecryptionModuleImpl::GetStatusForPolicy(
return;
}
- // TODO(xhwang): Enable UMA reporting for GetStatusForPolicy().
adapter_->GetStatusForPolicy(
- min_hdcp_version, std::unique_ptr<KeyStatusCdmPromise>(
- new CdmResultPromise<CdmKeyInformation::KeyStatus>(
- result, std::string())));
+ min_hdcp_version,
+ std::make_unique<CdmResultPromise<CdmKeyInformation::KeyStatus>>(
+ result, adapter_->GetKeySystemUMAPrefix(),
+ kGetStatusForPolicyUMAName));
}
std::unique_ptr<CdmContextRef>
diff --git a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
index 07bc3e740d4..2244f9e23b1 100644
--- a/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
+++ b/chromium/media/blink/webcontentdecryptionmodulesession_impl.cc
@@ -411,8 +411,8 @@ void WebContentDecryptionModuleSessionImpl::Update(
adapter_->UpdateSession(
session_id_, sanitized_response,
- std::unique_ptr<SimpleCdmPromise>(new CdmResultPromise<>(
- result, adapter_->GetKeySystemUMAPrefix() + kUpdateSessionUMAName)));
+ std::make_unique<CdmResultPromise<>>(
+ result, adapter_->GetKeySystemUMAPrefix(), kUpdateSessionUMAName));
}
void WebContentDecryptionModuleSessionImpl::Close(
@@ -433,8 +433,8 @@ void WebContentDecryptionModuleSessionImpl::Close(
has_close_been_called_ = true;
adapter_->CloseSession(
session_id_,
- std::unique_ptr<SimpleCdmPromise>(new CdmResultPromise<>(
- result, adapter_->GetKeySystemUMAPrefix() + kCloseSessionUMAName)));
+ std::make_unique<CdmResultPromise<>>(
+ result, adapter_->GetKeySystemUMAPrefix(), kCloseSessionUMAName));
}
void WebContentDecryptionModuleSessionImpl::Remove(
@@ -444,8 +444,8 @@ void WebContentDecryptionModuleSessionImpl::Remove(
adapter_->RemoveSession(
session_id_,
- std::unique_ptr<SimpleCdmPromise>(new CdmResultPromise<>(
- result, adapter_->GetKeySystemUMAPrefix() + kRemoveSessionUMAName)));
+ std::make_unique<CdmResultPromise<>>(
+ result, adapter_->GetKeySystemUMAPrefix(), kRemoveSessionUMAName));
}
void WebContentDecryptionModuleSessionImpl::OnSessionMessage(
diff --git a/chromium/media/blink/webmediaplayer_impl.cc b/chromium/media/blink/webmediaplayer_impl.cc
index 0fe502b37df..8dbcbd45097 100644
--- a/chromium/media/blink/webmediaplayer_impl.cc
+++ b/chromium/media/blink/webmediaplayer_impl.cc
@@ -93,6 +93,13 @@ namespace media {
namespace {
+const char kWatchTimeHistogram[] = "Media.WebMediaPlayerImpl.WatchTime";
+
+void RecordSimpleWatchTimeUMA(RendererFactorySelector::FactoryType type) {
+ UMA_HISTOGRAM_ENUMERATION(kWatchTimeHistogram, type,
+ RendererFactorySelector::FACTORY_TYPE_MAX + 1);
+}
+
void SetSinkIdOnMediaThread(
scoped_refptr<blink::WebAudioSourceProviderImpl> sink,
const std::string& device_id,
@@ -243,9 +250,9 @@ void DestructionHelper(
// used because virtual memory overhead is not considered blocking I/O; and
// CONTINUE_ON_SHUTDOWN is used to allow process termination to not block on
// completing the task.
- base::PostTaskWithTraits(
+ base::PostTask(
FROM_HERE,
- {base::TaskPriority::BEST_EFFORT,
+ {base::ThreadPool(), base::TaskPriority::BEST_EFFORT,
base::TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN},
base::BindOnce(
[](scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
@@ -314,7 +321,13 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
is_background_video_playback_enabled_(
params->IsBackgroundVideoPlaybackEnabled()),
is_background_video_track_optimization_supported_(
- params->IsBackgroundVideoTrackOptimizationSupported()) {
+ params->IsBackgroundVideoTrackOptimizationSupported()),
+ reported_renderer_type_(RendererFactorySelector::DEFAULT),
+ simple_watch_timer_(
+ base::BindRepeating(&WebMediaPlayerImpl::OnSimpleWatchTimerTick,
+ base::Unretained(this)),
+ base::BindRepeating(&WebMediaPlayerImpl::GetCurrentTimeInternal,
+ base::Unretained(this))) {
DVLOG(1) << __func__;
DCHECK(adjust_allocated_memory_cb_);
DCHECK(renderer_factory_selector_);
@@ -445,6 +458,7 @@ WebMediaPlayerImpl::~WebMediaPlayerImpl() {
if (!surface_layer_for_video_enabled_ && video_layer_)
video_layer_->StopUsingProvider();
+ simple_watch_timer_.Stop();
media_log_->AddEvent(
media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_DESTROYED));
@@ -601,9 +615,9 @@ void WebMediaPlayerImpl::ExitedFullscreen() {
MaybeSendOverlayInfoToDecoder();
}
-void WebMediaPlayerImpl::BecameDominantVisibleContent(bool isDominant) {
+void WebMediaPlayerImpl::BecameDominantVisibleContent(bool is_dominant) {
if (observer_)
- observer_->OnBecameDominantVisibleContent(isDominant);
+ observer_->OnBecameDominantVisibleContent(is_dominant);
}
void WebMediaPlayerImpl::SetIsEffectivelyFullscreen(
@@ -771,6 +785,7 @@ void WebMediaPlayerImpl::Play() {
if (video_decode_stats_reporter_)
video_decode_stats_reporter_->OnPlaying();
+ simple_watch_timer_.Start();
media_metrics_provider_->SetHasPlayed();
media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PLAY));
@@ -809,6 +824,7 @@ void WebMediaPlayerImpl::Pause() {
if (video_decode_stats_reporter_)
video_decode_stats_reporter_->OnPaused();
+ simple_watch_timer_.Stop();
media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PAUSE));
// Paused changed so we should update media position state.
@@ -1763,6 +1779,7 @@ void WebMediaPlayerImpl::OnError(PipelineStatus status) {
MaybeSetContainerName();
ReportPipelineError(load_type_, status, media_log_.get());
+ simple_watch_timer_.Stop();
media_log_->AddEvent(media_log_->CreatePipelineErrorEvent(status));
media_metrics_provider_->OnError(status);
if (watch_time_reporter_)
@@ -2613,6 +2630,7 @@ std::unique_ptr<Renderer> WebMediaPlayerImpl::CreateRenderer() {
request_overlay_info_cb = BindToCurrentLoop(
base::Bind(&WebMediaPlayerImpl::OnOverlayInfoRequested, weak_this_));
#endif
+ reported_renderer_type_ = renderer_factory_selector_->GetCurrentFactoryType();
return renderer_factory_selector_->GetCurrentFactory()->CreateRenderer(
media_task_runner_, worker_task_runner_, audio_source_provider_.get(),
compositor_.get(), request_overlay_info_cb, client_->TargetColorSpace());
@@ -2646,6 +2664,7 @@ void WebMediaPlayerImpl::StartPipeline() {
demuxer_.reset(new MediaUrlDemuxer(
media_task_runner_, loaded_url_, frame_->GetDocument().SiteForCookies(),
+ frame_->GetDocument().TopFrameOrigin(),
allow_media_player_renderer_credentials_, demuxer_found_hls_));
pipeline_controller_->Start(Pipeline::StartType::kNormal, demuxer_.get(),
this, false, false);
@@ -3594,4 +3613,8 @@ void WebMediaPlayerImpl::MaybeUpdateBufferSizesForPlayback() {
UpdateMediaPositionState();
}
+void WebMediaPlayerImpl::OnSimpleWatchTimerTick() {
+ RecordSimpleWatchTimeUMA(reported_renderer_type_);
+}
+
} // namespace media
diff --git a/chromium/media/blink/webmediaplayer_impl.h b/chromium/media/blink/webmediaplayer_impl.h
index ec2b9694dd3..830dbfb2959 100644
--- a/chromium/media/blink/webmediaplayer_impl.h
+++ b/chromium/media/blink/webmediaplayer_impl.h
@@ -32,6 +32,7 @@
#include "media/base/overlay_info.h"
#include "media/base/pipeline_impl.h"
#include "media/base/renderer_factory_selector.h"
+#include "media/base/simple_watch_timer.h"
#include "media/base/text_track.h"
#include "media/blink/buffered_data_source_host_impl.h"
#include "media/blink/media_blink_export.h"
@@ -219,11 +220,11 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
bool SupportsOverlayFullscreenVideo() override;
void EnteredFullscreen() override;
void ExitedFullscreen() override;
- void BecameDominantVisibleContent(bool isDominant) override;
+ void BecameDominantVisibleContent(bool is_dominant) override;
void SetIsEffectivelyFullscreen(
blink::WebFullscreenVideoStatus fullscreen_video_status) override;
void OnHasNativeControlsChanged(bool) override;
- void OnDisplayTypeChanged(WebMediaPlayer::DisplayType) override;
+ void OnDisplayTypeChanged(WebMediaPlayer::DisplayType display_type) override;
// blink::WebMediaPlayerDelegate::Observer implementation.
void OnFrameHidden() override;
@@ -351,6 +352,9 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void OnAudioDecoderChange(const PipelineDecoderInfo& info) override;
void OnVideoDecoderChange(const PipelineDecoderInfo& info) override;
+ // Simplified watch time reporting.
+ void OnSimpleWatchTimerTick();
+
// Actually seek. Avoids causing |should_notify_time_changed_| to be set when
// |time_updated| is false.
void DoSeek(base::TimeDelta time, bool time_updated);
@@ -1006,6 +1010,10 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
base::CancelableOnceClosure have_enough_after_lazy_load_cb_;
+ // State for simplified watch time reporting.
+ RendererFactorySelector::FactoryType reported_renderer_type_;
+ SimpleWatchTimer simple_watch_timer_;
+
base::WeakPtr<WebMediaPlayerImpl> weak_this_;
base::WeakPtrFactory<WebMediaPlayerImpl> weak_factory_{this};
diff --git a/chromium/media/blink/webmediaplayer_impl_unittest.cc b/chromium/media/blink/webmediaplayer_impl_unittest.cc
index eaa6daee8e8..a0f1494a304 100644
--- a/chromium/media/blink/webmediaplayer_impl_unittest.cc
+++ b/chromium/media/blink/webmediaplayer_impl_unittest.cc
@@ -43,6 +43,7 @@
#include "media/mojo/services/watch_time_recorder.h"
#include "media/renderers/default_decoder_factory.h"
#include "media/renderers/default_renderer_factory.h"
+#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -99,8 +100,9 @@ MATCHER_P2(PlaybackRateChanged, old_rate_string, new_rate_string, "") {
// returns a valid handle that can be passed to WebLocalFrame constructor
mojo::ScopedMessagePipeHandle CreateStubDocumentInterfaceBrokerHandle() {
- blink::mojom::DocumentInterfaceBrokerPtrInfo info;
- return mojo::MakeRequest(&info).PassMessagePipe();
+ return mojo::PendingRemote<blink::mojom::DocumentInterfaceBroker>()
+ .InitWithNewPipeAndPassReceiver()
+ .PassPipe();
}
class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
@@ -349,7 +351,9 @@ class WebMediaPlayerImplTest : public testing::Test {
MediaMetricsProvider::FrameStatus::kNotTopFrame,
base::BindRepeating([]() { return ukm::kInvalidSourceId; }),
base::BindRepeating([]() { return learning::FeatureValue(0); }),
- VideoDecodePerfHistory::SaveCallback(), mojo::MakeRequest(&provider));
+ VideoDecodePerfHistory::SaveCallback(),
+ MediaMetricsProvider::GetLearningSessionCallback(),
+ mojo::MakeRequest(&provider));
// Initialize provider since none of the tests below actually go through the
// full loading/pipeline initialize phase. If this ever changes the provider
@@ -400,7 +404,7 @@ class WebMediaPlayerImplTest : public testing::Test {
CycleThreads();
- web_view_->MainFrameWidget()->Close();
+ web_view_->Close();
}
protected:
@@ -659,7 +663,7 @@ class WebMediaPlayerImplTest : public testing::Test {
// This runs until we reach the |ready_state_|. Attempting to wait for ready
// states < kReadyStateHaveCurrentData in non-startup-suspend test cases is
// unreliable due to asynchronous execution of tasks on the
- // base::test:ScopedTaskEnvironment.
+ // base::test:TaskEnvironment.
void LoadAndWaitForReadyState(std::string data_file,
blink::WebMediaPlayer::ReadyState ready_state) {
Load(data_file);
@@ -839,7 +843,7 @@ TEST_F(WebMediaPlayerImplTest, LoadAndDestroyDataUrl) {
// This runs until we reach the have current data state. Attempting to wait
// for states < kReadyStateHaveCurrentData is unreliable due to asynchronous
- // execution of tasks on the base::test:ScopedTaskEnvironment.
+ // execution of tasks on the base::test:TaskEnvironment.
while (wmpi_->GetReadyState() <
blink::WebMediaPlayer::kReadyStateHaveCurrentData) {
base::RunLoop loop;
diff --git a/chromium/media/blink/webmediaplayer_params.h b/chromium/media/blink/webmediaplayer_params.h
index dda284c8927..c66321884bc 100644
--- a/chromium/media/blink/webmediaplayer_params.h
+++ b/chromium/media/blink/webmediaplayer_params.h
@@ -20,7 +20,7 @@
#include "media/base/media_switches.h"
#include "media/base/routing_token_callback.h"
#include "media/blink/media_blink_export.h"
-#include "media/mojo/interfaces/media_metrics_provider.mojom.h"
+#include "media/mojo/mojom/media_metrics_provider.mojom.h"
#include "third_party/blink/public/platform/web_media_player.h"
#include "third_party/blink/public/platform/web_video_frame_submitter.h"
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
index 33be8b426f9..d64077c95fe 100644
--- a/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
@@ -9,7 +9,7 @@
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/test/gtest_util.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/capabilities/in_memory_video_decode_stats_db_impl.h"
#include "media/capabilities/video_decode_stats_db_impl.h"
#include "media/capabilities/video_decode_stats_db_provider.h"
@@ -86,7 +86,7 @@ class InMemoryDBTestBase : public testing::Test {
in_memory_db_->Initialize(base::BindOnce(&InMemoryDBTestBase::InitializeCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
MOCK_METHOD1(InitializeCB, void(bool success));
@@ -101,7 +101,7 @@ class InMemoryDBTestBase : public testing::Test {
using VideoDescKey = media::VideoDecodeStatsDB::VideoDescKey;
using DecodeStatsEntry = media::VideoDecodeStatsDB::DecodeStatsEntry;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<MockSeedDB> seed_db_;
std::unique_ptr<MockDBProvider> db_provider_;
std::unique_ptr<InMemoryVideoDecodeStatsDBImpl> in_memory_db_;
@@ -122,7 +122,7 @@ TEST_F(SeedlessInMemoryDBTest, ReadExpectingEmpty) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeededInMemoryDBTest, ReadExpectingEmpty) {
@@ -141,7 +141,7 @@ TEST_F(SeededInMemoryDBTest, ReadExpectingEmpty) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeededInMemoryDBTest, ReadExpectingSeedData) {
@@ -163,7 +163,7 @@ TEST_F(SeededInMemoryDBTest, ReadExpectingSeedData) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Verify a second GetDecodeStats() call with the same key does not trigger a
@@ -174,7 +174,7 @@ TEST_F(SeededInMemoryDBTest, ReadExpectingSeedData) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeededInMemoryDBTest, AppendReadAndClear) {
@@ -202,7 +202,7 @@ TEST_F(SeededInMemoryDBTest, AppendReadAndClear) {
base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Seed DB should not be queried again for this key.
@@ -214,7 +214,7 @@ TEST_F(SeededInMemoryDBTest, AppendReadAndClear) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Append the same seed entry again to triple the stats. Additional appends
@@ -236,7 +236,7 @@ TEST_F(SeededInMemoryDBTest, AppendReadAndClear) {
in_memory_db_->ClearStats(base::BindOnce(&InMemoryDBTestBase::ClearStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// With in-memory stats now gone, GetDecodeStats(kTestKey()) should again
@@ -247,7 +247,7 @@ TEST_F(SeededInMemoryDBTest, AppendReadAndClear) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeedlessInMemoryDBTest, AppendReadAndClear) {
@@ -269,7 +269,7 @@ TEST_F(SeedlessInMemoryDBTest, AppendReadAndClear) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Append same stats again to test summation.
@@ -285,7 +285,7 @@ TEST_F(SeedlessInMemoryDBTest, AppendReadAndClear) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Now destroy the in-memory stats...
@@ -293,7 +293,7 @@ TEST_F(SeedlessInMemoryDBTest, AppendReadAndClear) {
in_memory_db_->ClearStats(base::BindOnce(&InMemoryDBTestBase::ClearStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Verify DB now empty for this key.
@@ -302,7 +302,7 @@ TEST_F(SeedlessInMemoryDBTest, AppendReadAndClear) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeededInMemoryDBTest, ProvidedNullSeedDB) {
@@ -316,7 +316,7 @@ TEST_F(SeededInMemoryDBTest, ProvidedNullSeedDB) {
in_memory_db_->Initialize(base::BindOnce(&InMemoryDBTestBase::InitializeCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Writes still succeed.
@@ -333,7 +333,7 @@ TEST_F(SeededInMemoryDBTest, ProvidedNullSeedDB) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeededInMemoryDBTest, SeedReadFailureOnGettingStats) {
@@ -354,7 +354,7 @@ TEST_F(SeededInMemoryDBTest, SeedReadFailureOnGettingStats) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeededInMemoryDBTest, SeedReadFailureOnAppendingingStats) {
@@ -377,7 +377,7 @@ TEST_F(SeededInMemoryDBTest, SeedReadFailureOnAppendingingStats) {
base::BindOnce(&InMemoryDBTestBase::AppendDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
::testing::Mock::VerifyAndClear(this);
// Reading the appended data works without issue and does not trigger new
@@ -388,7 +388,7 @@ TEST_F(SeededInMemoryDBTest, SeedReadFailureOnAppendingingStats) {
kTestKey(), base::BindOnce(&InMemoryDBTestBase::GetDecodeStatsCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(SeededInMemoryDBTest, SeedDBTearDownRace) {
diff --git a/chromium/media/capabilities/learning_helper.cc b/chromium/media/capabilities/learning_helper.cc
index eee53f0532d..8cd115350bf 100644
--- a/chromium/media/capabilities/learning_helper.cc
+++ b/chromium/media/capabilities/learning_helper.cc
@@ -50,9 +50,9 @@ LearningHelper::LearningHelper(FeatureProviderFactoryCB feature_factory) {
// it's likely that the session will live on the main thread, and handle
// delegation of LearningTaskControllers to other threads. However, for now,
// do it here.
- learning_session_ = std::make_unique<LearningSessionImpl>(
- base::CreateSequencedTaskRunnerWithTraits(
- {base::TaskPriority::BEST_EFFORT,
+ learning_session_ =
+ std::make_unique<LearningSessionImpl>(base::CreateSequencedTaskRunner(
+ {base::ThreadPool(), base::TaskPriority::BEST_EFFORT,
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN}));
// Register a few learning tasks.
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl.cc b/chromium/media/capabilities/video_decode_stats_db_impl.cc
index cf08f748c23..81a1b218c37 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl.cc
+++ b/chromium/media/capabilities/video_decode_stats_db_impl.cc
@@ -8,6 +8,7 @@
#include <tuple>
#include "base/bind.h"
+#include "base/debug/alias.h"
#include "base/files/file_path.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
@@ -26,10 +27,6 @@ using ProtoDecodeStatsEntry = leveldb_proto::ProtoDatabase<DecodeStatsProto>;
namespace {
-// Avoid changing client name. Used in UMA.
-// See comments in components/leveldb_proto/leveldb_database.h
-const char kDatabaseClientName[] = "VideoDecodeStatsDB";
-
const int kMaxFramesPerBufferDefault = 2500;
const int kMaxDaysToKeepStatsDefault = 30;
@@ -70,33 +67,30 @@ bool VideoDecodeStatsDBImpl::GetEnableUnweightedEntries() {
// static
std::unique_ptr<VideoDecodeStatsDBImpl> VideoDecodeStatsDBImpl::Create(
- base::FilePath db_dir) {
+ base::FilePath db_dir,
+ leveldb_proto::ProtoDatabaseProvider* db_provider) {
DVLOG(2) << __func__ << " db_dir:" << db_dir;
- auto proto_db =
- leveldb_proto::ProtoDatabaseProvider::CreateUniqueDB<DecodeStatsProto>(
- base::CreateSequencedTaskRunnerWithTraits(
- {base::MayBlock(), base::TaskPriority::BEST_EFFORT,
- base::TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN}));
+ auto proto_db = db_provider->GetDB<DecodeStatsProto>(
+ leveldb_proto::ProtoDbType::VIDEO_DECODE_STATS_DB, db_dir,
+ base::CreateSequencedTaskRunner(
+ {base::ThreadPool(), base::MayBlock(),
+ base::TaskPriority::BEST_EFFORT,
+ base::TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN}));
- return base::WrapUnique(
- new VideoDecodeStatsDBImpl(std::move(proto_db), db_dir));
+ return base::WrapUnique(new VideoDecodeStatsDBImpl(std::move(proto_db)));
}
constexpr char VideoDecodeStatsDBImpl::kDefaultWriteTime[];
VideoDecodeStatsDBImpl::VideoDecodeStatsDBImpl(
- std::unique_ptr<leveldb_proto::ProtoDatabase<DecodeStatsProto>> db,
- const base::FilePath& db_dir)
- : db_(std::move(db)),
- db_dir_(db_dir),
- wall_clock_(base::DefaultClock::GetInstance()) {
+ std::unique_ptr<leveldb_proto::ProtoDatabase<DecodeStatsProto>> db)
+ : db_(std::move(db)), wall_clock_(base::DefaultClock::GetInstance()) {
bool time_parsed =
base::Time::FromString(kDefaultWriteTime, &default_write_time_);
DCHECK(time_parsed);
DCHECK(db_);
- DCHECK(!db_dir_.empty());
}
VideoDecodeStatsDBImpl::~VideoDecodeStatsDBImpl() {
@@ -112,13 +106,15 @@ void VideoDecodeStatsDBImpl::Initialize(InitializeCB init_cb) {
// case our whole DB will be less than 35K, so we aren't worried about
// spamming the cache.
// TODO(chcunningham): Keep an eye on the size as the table evolves.
- db_->Init(kDatabaseClientName, db_dir_, leveldb_proto::CreateSimpleOptions(),
- base::BindOnce(&VideoDecodeStatsDBImpl::OnInit,
+ db_->Init(base::BindOnce(&VideoDecodeStatsDBImpl::OnInit,
weak_ptr_factory_.GetWeakPtr(), std::move(init_cb)));
}
-void VideoDecodeStatsDBImpl::OnInit(InitializeCB init_cb, bool success) {
+void VideoDecodeStatsDBImpl::OnInit(InitializeCB init_cb,
+ leveldb_proto::Enums::InitStatus status) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_NE(status, leveldb_proto::Enums::InitStatus::kInvalidOperation);
+ bool success = status == leveldb_proto::Enums::InitStatus::kOK;
DVLOG(2) << __func__ << (success ? " succeeded" : " FAILED!");
UMA_HISTOGRAM_BOOLEAN("Media.VideoDecodeStatsDB.OpSuccess.Initialize",
success);
@@ -239,9 +235,20 @@ void VideoDecodeStatsDBImpl::WriteUpdatedEntry(
stats_proto.reset(new DecodeStatsProto());
}
+ // Debug alias the various counts so we can get them in dumps to catch
+ // lingering crashes in http://crbug.com/982009
uint64_t old_frames_decoded = stats_proto->frames_decoded();
uint64_t old_frames_dropped = stats_proto->frames_dropped();
uint64_t old_frames_power_efficient = stats_proto->frames_power_efficient();
+ uint64_t new_frames_decoded = new_entry.frames_decoded;
+ uint64_t new_frames_dropped = new_entry.frames_dropped;
+ uint64_t new_frames_power_efficient = new_entry.frames_power_efficient;
+ base::debug::Alias(&old_frames_decoded);
+ base::debug::Alias(&old_frames_dropped);
+ base::debug::Alias(&old_frames_power_efficient);
+ base::debug::Alias(&new_frames_decoded);
+ base::debug::Alias(&new_frames_dropped);
+ base::debug::Alias(&new_frames_power_efficient);
const uint64_t kMaxFramesPerBuffer = GetMaxFramesPerBuffer();
DCHECK_GT(kMaxFramesPerBuffer, 0UL);
@@ -254,6 +261,10 @@ void VideoDecodeStatsDBImpl::WriteUpdatedEntry(
new_entry_efficient_ratio =
static_cast<double>(new_entry.frames_power_efficient) /
new_entry.frames_decoded;
+ } else {
+ // Callers shouldn't ask DB to save empty records. See
+ // VideoDecodeStatsRecorder.
+ NOTREACHED() << __func__ << " saving empty stats record";
}
if (old_frames_decoded + new_entry.frames_decoded > kMaxFramesPerBuffer) {
@@ -264,16 +275,28 @@ void VideoDecodeStatsDBImpl::WriteUpdatedEntry(
static_cast<double>(new_entry.frames_decoded) / kMaxFramesPerBuffer,
1.0);
- double old_dropped_ratio =
- static_cast<double>(old_frames_dropped) / old_frames_decoded;
- double old_efficient_ratio =
- static_cast<double>(old_frames_power_efficient) / old_frames_decoded;
+ double old_dropped_ratio = 0;
+ double old_efficient_ratio = 0;
+ if (old_frames_decoded) {
+ old_dropped_ratio =
+ static_cast<double>(old_frames_dropped) / old_frames_decoded;
+ old_efficient_ratio =
+ static_cast<double>(old_frames_power_efficient) / old_frames_decoded;
+ }
double agg_dropped_ratio = fill_ratio * new_entry_dropped_ratio +
(1 - fill_ratio) * old_dropped_ratio;
double agg_efficient_ratio = fill_ratio * new_entry_efficient_ratio +
(1 - fill_ratio) * old_efficient_ratio;
+ // Debug alias the various counts so we can get them in dumps to catch
+ // lingering crashes in http://crbug.com/982009
+ base::debug::Alias(&fill_ratio);
+ base::debug::Alias(&old_dropped_ratio);
+ base::debug::Alias(&old_efficient_ratio);
+ base::debug::Alias(&agg_dropped_ratio);
+ base::debug::Alias(&agg_efficient_ratio);
+
stats_proto->set_frames_decoded(kMaxFramesPerBuffer);
stats_proto->set_frames_dropped(
std::round(agg_dropped_ratio * kMaxFramesPerBuffer));
@@ -322,6 +345,11 @@ void VideoDecodeStatsDBImpl::WriteUpdatedEntry(
// Update the time stamp for the current write.
stats_proto->set_last_write_date(wall_clock_->Now().ToJsTime());
+ // Make sure we never write bogus stats into the DB! While its possible the DB
+ // may experience some corruption (disk), we should have detected that above
+ // and discarded any bad data prior to this upcoming save.
+ DCHECK(AreStatsUsable(stats_proto.get()));
+
// Push the update to the DB.
using DBType = leveldb_proto::ProtoDatabase<DecodeStatsProto>;
std::unique_ptr<DBType::KeyEntryVector> entries =
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl.h b/chromium/media/capabilities/video_decode_stats_db_impl.h
index 129493bd4ed..f2c1cae047c 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl.h
+++ b/chromium/media/capabilities/video_decode_stats_db_impl.h
@@ -20,6 +20,10 @@ class FilePath;
class Clock;
} // namespace base
+namespace leveldb_proto {
+class ProtoDatabaseProvider;
+} // namespace leveldb_proto
+
namespace media {
class DecodeStatsProto;
@@ -36,7 +40,9 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
// Create an instance! |db_dir| specifies where to store LevelDB files to
// disk. LevelDB generates a handful of files, so its recommended to provide a
// dedicated directory to keep them isolated.
- static std::unique_ptr<VideoDecodeStatsDBImpl> Create(base::FilePath db_dir);
+ static std::unique_ptr<VideoDecodeStatsDBImpl> Create(
+ base::FilePath db_dir,
+ leveldb_proto::ProtoDatabaseProvider* db_provider);
~VideoDecodeStatsDBImpl() override;
@@ -55,8 +61,7 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
// Private constructor only called by tests (friends). Production code
// should always use the static Create() method.
VideoDecodeStatsDBImpl(
- std::unique_ptr<leveldb_proto::ProtoDatabase<DecodeStatsProto>> db,
- const base::FilePath& dir);
+ std::unique_ptr<leveldb_proto::ProtoDatabase<DecodeStatsProto>> db);
// Default |last_write_time| for DB entries that lack a time stamp due to
// using an earlier version of DecodeStatsProto. Date chosen so old stats from
@@ -79,7 +84,7 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
// Called when the database has been initialized. Will immediately call
// |init_cb| to forward |success|.
- void OnInit(InitializeCB init_cb, bool success);
+ void OnInit(InitializeCB init_cb, leveldb_proto::Enums::InitStatus status);
// Returns true if the DB is successfully initialized.
bool IsInitialized();
@@ -134,9 +139,6 @@ class MEDIA_EXPORT VideoDecodeStatsDBImpl : public VideoDecodeStatsDB {
// encountered.
std::unique_ptr<leveldb_proto::ProtoDatabase<DecodeStatsProto>> db_;
- // Directory where levelDB should store database files.
- base::FilePath db_dir_;
-
// For getting wall-clock time. Tests may override via SetClockForTest().
const base::Clock* wall_clock_ = nullptr;
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc b/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
index d01c5888c9e..e3a8c742a0a 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
+++ b/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
@@ -14,8 +14,8 @@
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "base/test/scoped_feature_list.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_clock.h"
+#include "base/test/task_environment.h"
#include "components/leveldb_proto/testing/fake_db.h"
#include "media/base/media_switches.h"
#include "media/base/test_data_util.h"
@@ -62,8 +62,7 @@ class VideoDecodeStatsDBImplTest : public ::testing::Test {
// Wrap the fake proto DB with our interface.
stats_db_ = base::WrapUnique(new VideoDecodeStatsDBImpl(
- std::unique_ptr<FakeDB<DecodeStatsProto>>(fake_db_),
- base::FilePath(FILE_PATH_LITERAL("/fake/path"))));
+ std::unique_ptr<FakeDB<DecodeStatsProto>>(fake_db_)));
}
int GetMaxFramesPerBuffer() {
@@ -86,7 +85,7 @@ class VideoDecodeStatsDBImplTest : public ::testing::Test {
stats_db_->Initialize(base::BindOnce(
&VideoDecodeStatsDBImplTest::OnInitialize, base::Unretained(this)));
EXPECT_CALL(*this, OnInitialize(true));
- fake_db_->InitCallback(true);
+ fake_db_->InitStatusCallback(leveldb_proto::Enums::InitStatus::kOK);
testing::Mock::VerifyAndClearExpectations(this);
}
@@ -158,7 +157,7 @@ class VideoDecodeStatsDBImplTest : public ::testing::Test {
MOCK_METHOD0(MockClearStatsCb, void());
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
const VideoDescKey kStatsKeyVp9;
const VideoDescKey kStatsKeyAvc;
@@ -180,7 +179,7 @@ TEST_F(VideoDecodeStatsDBImplTest, FailedInitialize) {
stats_db_->Initialize(base::BindOnce(
&VideoDecodeStatsDBImplTest::OnInitialize, base::Unretained(this)));
EXPECT_CALL(*this, OnInitialize(false));
- fake_db_->InitCallback(false);
+ fake_db_->InitStatusCallback(leveldb_proto::Enums::InitStatus::kError);
}
TEST_F(VideoDecodeStatsDBImplTest, ReadExpectingNothing) {
@@ -447,6 +446,43 @@ TEST_F(VideoDecodeStatsDBImplTest, FillBufferInMixedIncrements) {
std::round(GetMaxFramesPerBuffer() * kEfficientRateC)));
}
+// Overfilling an empty buffer triggers the codepath to compute weighted dropped
+// and power efficient ratios under a circumstance where the existing counts are
+// all zero. This test ensures that we don't do any dividing by zero with that
+// empty data.
+TEST_F(VideoDecodeStatsDBImplTest, OverfillEmptyBuffer) {
+ InitializeDB();
+
+ // Setup DB entry that overflows the buffer max (by 1) with 10% of frames
+ // dropped and 50% of frames power efficient.
+ const int kNumFramesOverfill = GetMaxFramesPerBuffer() + 1;
+ DecodeStatsEntry entryA(kNumFramesOverfill,
+ std::round(0.1 * kNumFramesOverfill),
+ std::round(0.5 * kNumFramesOverfill));
+
+ // Append entry to completely fill the buffer and verify read.
+ AppendStats(kStatsKeyVp9, entryA);
+ // Read-back stats should have same ratios, but scaled such that
+ // frames_decoded = GetMaxFramesPerBuffer().
+ DecodeStatsEntry readBackEntryA(GetMaxFramesPerBuffer(),
+ std::round(0.1 * GetMaxFramesPerBuffer()),
+ std::round(0.5 * GetMaxFramesPerBuffer()));
+ VerifyReadStats(kStatsKeyVp9, readBackEntryA);
+
+ // Append another entry that again overfills with different dropped and power
+ // efficient ratios. Verify that read-back only reflects latest entry.
+ DecodeStatsEntry entryB(kNumFramesOverfill,
+ std::round(0.2 * kNumFramesOverfill),
+ std::round(0.6 * kNumFramesOverfill));
+ AppendStats(kStatsKeyVp9, entryB);
+ // Read-back stats should have same ratios, but scaled such that
+ // frames_decoded = GetMaxFramesPerBuffer().
+ DecodeStatsEntry readBackEntryB(GetMaxFramesPerBuffer(),
+ std::round(0.2 * GetMaxFramesPerBuffer()),
+ std::round(0.6 * GetMaxFramesPerBuffer()));
+ VerifyReadStats(kStatsKeyVp9, readBackEntryB);
+}
+
TEST_F(VideoDecodeStatsDBImplTest, NoWriteDateReadAndExpire) {
InitializeDB();
diff --git a/chromium/media/capture/BUILD.gn b/chromium/media/capture/BUILD.gn
index 5bdda82d4d1..97c40033b7a 100644
--- a/chromium/media/capture/BUILD.gn
+++ b/chromium/media/capture/BUILD.gn
@@ -86,7 +86,7 @@ jumbo_source_set("capture_device_specific") {
"//media",
"//media/capture/mojom:image_capture",
"//media/capture/mojom:image_capture_types",
- "//media/mojo/interfaces:interfaces",
+ "//media/mojo/mojom",
"//media/parsers",
"//third_party/libyuv",
"//ui/gfx",
@@ -101,8 +101,6 @@ jumbo_component("capture_lib") {
"video/scoped_buffer_pool_reservation.h",
"video/shared_memory_buffer_tracker.cc",
"video/shared_memory_buffer_tracker.h",
- "video/shared_memory_handle_provider.cc",
- "video/shared_memory_handle_provider.h",
"video/video_capture_buffer_pool.h",
"video/video_capture_buffer_pool_impl.cc",
"video/video_capture_buffer_pool_impl.h",
@@ -134,7 +132,7 @@ jumbo_component("capture_lib") {
"//media/capture/mojom:image_capture",
"//media/capture/mojom:image_capture_types",
"//media/capture/mojom:video_capture",
- "//media/mojo/interfaces:interfaces",
+ "//media/mojo/mojom",
"//services/service_manager/public/cpp",
"//third_party/libyuv",
"//ui/display",
@@ -243,6 +241,14 @@ jumbo_component("capture_lib") {
sources += [
"video/chromeos/camera_3a_controller.cc",
"video/chromeos/camera_3a_controller.h",
+ "video/chromeos/camera_app_device_bridge_impl.cc",
+ "video/chromeos/camera_app_device_bridge_impl.h",
+ "video/chromeos/camera_app_device_impl.cc",
+ "video/chromeos/camera_app_device_impl.h",
+ "video/chromeos/camera_app_device_provider_impl.cc",
+ "video/chromeos/camera_app_device_provider_impl.h",
+ "video/chromeos/camera_app_helper_impl.cc",
+ "video/chromeos/camera_app_helper_impl.h",
"video/chromeos/camera_buffer_factory.cc",
"video/chromeos/camera_buffer_factory.h",
"video/chromeos/camera_device_context.cc",
@@ -255,18 +261,12 @@ jumbo_component("capture_lib") {
"video/chromeos/camera_hal_dispatcher_impl.h",
"video/chromeos/camera_metadata_utils.cc",
"video/chromeos/camera_metadata_utils.h",
- "video/chromeos/cros_image_capture_impl.cc",
- "video/chromeos/cros_image_capture_impl.h",
"video/chromeos/display_rotation_observer.cc",
"video/chromeos/display_rotation_observer.h",
"video/chromeos/gpu_memory_buffer_tracker.cc",
"video/chromeos/gpu_memory_buffer_tracker.h",
"video/chromeos/pixel_format_utils.cc",
"video/chromeos/pixel_format_utils.h",
- "video/chromeos/renderer_facing_cros_image_capture.cc",
- "video/chromeos/renderer_facing_cros_image_capture.h",
- "video/chromeos/reprocess_manager.cc",
- "video/chromeos/reprocess_manager.h",
"video/chromeos/request_builder.cc",
"video/chromeos/request_builder.h",
"video/chromeos/request_manager.cc",
@@ -292,7 +292,7 @@ jumbo_component("capture_lib") {
"//components/chromeos_camera:mojo_mjpeg_decode_accelerator",
"//components/chromeos_camera/common",
"//gpu/ipc/common:common",
- "//media/capture/video/chromeos/mojo:cros_camera",
+ "//media/capture/video/chromeos/mojom:cros_camera",
"//third_party/libsync",
]
}
@@ -366,7 +366,6 @@ test("capture_unittests") {
"video/mac/video_capture_device_factory_mac_unittest.mm",
"video/mock_gpu_memory_buffer_manager.cc",
"video/mock_gpu_memory_buffer_manager.h",
- "video/shared_memory_handle_provider_unittest.cc",
"video/video_capture_device_client_unittest.cc",
"video/video_capture_device_unittest.cc",
"video_capture_types_unittest.cc",
@@ -436,7 +435,7 @@ test("capture_unittests") {
":chromeos_test_utils",
"//build/config/linux/libdrm",
"//chromeos/dbus/power",
- "//media/capture/video/chromeos/mojo:cros_camera",
+ "//media/capture/video/chromeos/mojom:cros_camera",
"//media/capture/video/chromeos/public",
"//mojo/core/embedder",
"//third_party/libsync",
diff --git a/chromium/media/capture/mojom/BUILD.gn b/chromium/media/capture/mojom/BUILD.gn
index adf9fe4088f..ebadbd0dbf0 100644
--- a/chromium/media/capture/mojom/BUILD.gn
+++ b/chromium/media/capture/mojom/BUILD.gn
@@ -13,9 +13,13 @@ mojom("video_capture") {
public_deps = [
"//gpu/ipc/common:interfaces",
"//mojo/public/mojom/base",
- "//ui/gfx/geometry/mojo",
- "//ui/gfx/mojo",
+ "//ui/gfx/geometry/mojom",
+ "//ui/gfx/mojom",
]
+
+ export_class_attribute_blink = "BLINK_PLATFORM_EXPORT"
+ export_define_blink = "BLINK_PLATFORM_IMPLEMENTATION=1"
+ export_header_blink = "third_party/blink/public/platform/web_common.h"
}
mojom("image_capture") {
diff --git a/chromium/media/capture/mojom/image_capture.mojom b/chromium/media/capture/mojom/image_capture.mojom
index 720faa9b8dd..b71f4c54ba5 100644
--- a/chromium/media/capture/mojom/image_capture.mojom
+++ b/chromium/media/capture/mojom/image_capture.mojom
@@ -47,6 +47,9 @@ struct PhotoState {
Range sharpness;
Range focus_distance;
+
+ Range pan;
+ Range tilt;
Range zoom;
bool supports_torch;
@@ -101,6 +104,11 @@ struct PhotoSettings {
bool has_focus_distance;
double focus_distance;
+
+ bool has_pan;
+ double pan;
+ bool has_tilt;
+ double tilt;
bool has_zoom;
double zoom;
diff --git a/chromium/media/capture/mojom/image_capture_types.cc b/chromium/media/capture/mojom/image_capture_types.cc
index 89ba6e7be26..fd20b92bb52 100644
--- a/chromium/media/capture/mojom/image_capture_types.cc
+++ b/chromium/media/capture/mojom/image_capture_types.cc
@@ -19,6 +19,8 @@ media::mojom::PhotoStatePtr CreateEmptyPhotoState() {
photo_capabilities->contrast = media::mojom::Range::New();
photo_capabilities->saturation = media::mojom::Range::New();
photo_capabilities->sharpness = media::mojom::Range::New();
+ photo_capabilities->pan = media::mojom::Range::New();
+ photo_capabilities->tilt = media::mojom::Range::New();
photo_capabilities->zoom = media::mojom::Range::New();
photo_capabilities->focus_distance = media::mojom::Range::New();
photo_capabilities->torch = false;
diff --git a/chromium/media/capture/mojom/video_capture.mojom b/chromium/media/capture/mojom/video_capture.mojom
index e074393c38d..16a8a3bd143 100644
--- a/chromium/media/capture/mojom/video_capture.mojom
+++ b/chromium/media/capture/mojom/video_capture.mojom
@@ -5,7 +5,8 @@
module media.mojom;
import "media/capture/mojom/video_capture_types.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
+import "mojo/public/mojom/base/unguessable_token.mojom";
// This file decribes the communication between a given Renderer Host interface
// implementation (VideoCaptureHost) and a remote VideoCaptureObserver.
@@ -81,38 +82,44 @@ interface VideoCaptureHost {
// Start the |session_id| session with |params|. The video capture will be
// identified as |device_id|, a new id picked by the renderer process.
// |observer| will be used for notifications.
- Start(int32 device_id, int32 session_id, VideoCaptureParams params,
+ Start(mojo_base.mojom.UnguessableToken device_id,
+ mojo_base.mojom.UnguessableToken session_id,
+ VideoCaptureParams params,
VideoCaptureObserver observer);
// Closes the video capture specified by |device_id|.
- Stop(int32 device_id);
+ Stop(mojo_base.mojom.UnguessableToken device_id);
// Pauses the video capture specified by |device_id|.
- Pause(int32 device_id);
+ Pause(mojo_base.mojom.UnguessableToken device_id);
// Resume |device_id| video capture, in |session_id| and with |params|.
- Resume(int32 device_id, int32 session_id, VideoCaptureParams params);
+ Resume(mojo_base.mojom.UnguessableToken device_id,
+ mojo_base.mojom.UnguessableToken session_id,
+ VideoCaptureParams params);
// Requests that the video capturer send a frame "soon" (e.g., to resolve
// picture loss or quality issues).
- RequestRefreshFrame(int32 device_id);
+ RequestRefreshFrame(mojo_base.mojom.UnguessableToken device_id);
// Indicates that a renderer has finished using a previously shared buffer.
- ReleaseBuffer(int32 device_id, int32 buffer_id,
+ ReleaseBuffer(mojo_base.mojom.UnguessableToken device_id, int32 buffer_id,
double consumer_resource_utilization);
// Get the formats supported by a device referenced by |session_id|.
- GetDeviceSupportedFormats(int32 device_id, int32 session_id)
+ GetDeviceSupportedFormats(mojo_base.mojom.UnguessableToken device_id,
+ mojo_base.mojom.UnguessableToken session_id)
=> (array<VideoCaptureFormat> formats_supported);
// Get the format(s) in use by a device referenced by |session_id|.
- GetDeviceFormatsInUse(int32 device_id, int32 session_id)
+ GetDeviceFormatsInUse(mojo_base.mojom.UnguessableToken device_id,
+ mojo_base.mojom.UnguessableToken session_id)
=> (array<VideoCaptureFormat> formats_in_use);
// Notifies the host about a frame being dropped.
- OnFrameDropped(int32 device_id,
+ OnFrameDropped(mojo_base.mojom.UnguessableToken device_id,
media.mojom.VideoCaptureFrameDropReason reason);
// Sends a log message to the VideoCaptureHost.
- OnLog(int32 device_id, string message);
+ OnLog(mojo_base.mojom.UnguessableToken device_id, string message);
};
diff --git a/chromium/media/capture/mojom/video_capture_types.mojom b/chromium/media/capture/mojom/video_capture_types.mojom
index 0da1c597139..2f661557377 100644
--- a/chromium/media/capture/mojom/video_capture_types.mojom
+++ b/chromium/media/capture/mojom/video_capture_types.mojom
@@ -8,10 +8,9 @@ import "gpu/ipc/common/mailbox_holder.mojom";
import "mojo/public/mojom/base/shared_memory.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/values.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
-[EnableIf=is_chromeos]
-import "ui/gfx/mojo/buffer_types.mojom";
-import "ui/gfx/mojo/color_space.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
+import "ui/gfx/mojom/buffer_types.mojom";
+import "ui/gfx/mojom/color_space.mojom";
enum VideoCapturePixelFormat {
UNKNOWN,
@@ -28,7 +27,6 @@ enum VideoCapturePixelFormat {
XRGB,
RGB24,
MJPEG,
- MT21,
YUV420P9,
YUV420P10,
YUV422P9,
@@ -94,7 +92,6 @@ enum VideoCaptureBufferType {
kMailboxHolder,
- [EnableIf=is_chromeos]
kGpuMemoryBuffer
};
@@ -314,6 +311,5 @@ union VideoBufferHandle {
mojo_base.mojom.ReadOnlySharedMemoryRegion read_only_shmem_region;
SharedMemoryViaRawFileDescriptor shared_memory_via_raw_file_descriptor;
MailboxBufferHandleSet mailbox_handles;
- [EnableIf=is_chromeos]
gfx.mojom.GpuMemoryBufferHandle gpu_memory_buffer_handle;
};
diff --git a/chromium/media/capture/mojom/video_capture_types.typemap b/chromium/media/capture/mojom/video_capture_types.typemap
index d2425818f51..bbe592ffbc1 100644
--- a/chromium/media/capture/mojom/video_capture_types.typemap
+++ b/chromium/media/capture/mojom/video_capture_types.typemap
@@ -19,8 +19,8 @@ sources = [
deps = [
"//media",
"//media/capture:capture_base",
- "//media/mojo/interfaces",
- "//ui/gfx/geometry/mojo:struct_traits",
+ "//media/mojo/mojom",
+ "//ui/gfx/geometry/mojom:mojom_traits",
]
public_deps = [
@@ -28,15 +28,15 @@ public_deps = [
]
type_mappings = [
- "media.mojom.ResolutionChangePolicy=media::ResolutionChangePolicy",
- "media.mojom.PowerLineFrequency=media::PowerLineFrequency",
- "media.mojom.VideoCapturePixelFormat=media::VideoPixelFormat",
- "media.mojom.VideoCaptureBufferType=media::VideoCaptureBufferType",
- "media.mojom.VideoCaptureError=media::VideoCaptureError",
- "media.mojom.VideoCaptureFrameDropReason=media::VideoCaptureFrameDropReason",
- "media.mojom.VideoCaptureFormat=media::VideoCaptureFormat",
- "media.mojom.VideoCaptureParams=media::VideoCaptureParams",
- "media.mojom.VideoCaptureDeviceDescriptor=media::VideoCaptureDeviceDescriptor",
- "media.mojom.VideoCaptureDeviceInfo=media::VideoCaptureDeviceInfo",
- "media.mojom.VideoFacingMode=media::VideoFacingMode",
+ "media.mojom.ResolutionChangePolicy=::media::ResolutionChangePolicy",
+ "media.mojom.PowerLineFrequency=::media::PowerLineFrequency",
+ "media.mojom.VideoCapturePixelFormat=::media::VideoPixelFormat",
+ "media.mojom.VideoCaptureBufferType=::media::VideoCaptureBufferType",
+ "media.mojom.VideoCaptureError=::media::VideoCaptureError",
+ "media.mojom.VideoCaptureFrameDropReason=::media::VideoCaptureFrameDropReason",
+ "media.mojom.VideoCaptureFormat=::media::VideoCaptureFormat",
+ "media.mojom.VideoCaptureParams=::media::VideoCaptureParams",
+ "media.mojom.VideoCaptureDeviceDescriptor=::media::VideoCaptureDeviceDescriptor",
+ "media.mojom.VideoCaptureDeviceInfo=::media::VideoCaptureDeviceInfo",
+ "media.mojom.VideoFacingMode=::media::VideoFacingMode",
]
diff --git a/chromium/media/capture/mojom/video_capture_types_for_blink.typemap b/chromium/media/capture/mojom/video_capture_types_for_blink.typemap
new file mode 100644
index 00000000000..485c4179a83
--- /dev/null
+++ b/chromium/media/capture/mojom/video_capture_types_for_blink.typemap
@@ -0,0 +1,32 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/capture/mojom/video_capture_types.mojom"
+
+public_headers = [
+ "//media/capture/video_capture_types.h",
+ "//media/capture/video/video_capture_device_descriptor.h",
+ "//media/capture/video/video_capture_device_info.h",
+]
+
+traits_headers = [ "//media/capture/mojom/video_capture_types_mojom_traits.h" ]
+
+deps = [
+ "//media",
+ "//media/capture:capture_base",
+]
+
+type_mappings = [
+ "media.mojom.ResolutionChangePolicy=::media::ResolutionChangePolicy",
+ "media.mojom.PowerLineFrequency=::media::PowerLineFrequency",
+ "media.mojom.VideoCapturePixelFormat=::media::VideoPixelFormat",
+ "media.mojom.VideoCaptureBufferType=::media::VideoCaptureBufferType",
+ "media.mojom.VideoCaptureError=::media::VideoCaptureError",
+ "media.mojom.VideoCaptureFrameDropReason=::media::VideoCaptureFrameDropReason",
+ "media.mojom.VideoCaptureFormat=::media::VideoCaptureFormat",
+ "media.mojom.VideoCaptureParams=::media::VideoCaptureParams",
+ "media.mojom.VideoCaptureDeviceDescriptor=::media::VideoCaptureDeviceDescriptor",
+ "media.mojom.VideoCaptureDeviceInfo=::media::VideoCaptureDeviceInfo",
+ "media.mojom.VideoFacingMode=::media::VideoFacingMode",
+]
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
index 7018febc993..d1dc614d167 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
@@ -5,8 +5,8 @@
#include "media/capture/mojom/video_capture_types_mojom_traits.h"
#include "media/base/ipc/media_param_traits_macros.h"
-#include "ui/gfx/geometry/mojo/geometry.mojom.h"
-#include "ui/gfx/geometry/mojo/geometry_struct_traits.h"
+#include "ui/gfx/geometry/mojom/geometry.mojom.h"
+#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
namespace mojo {
@@ -115,8 +115,6 @@ EnumTraits<media::mojom::VideoCapturePixelFormat,
return media::mojom::VideoCapturePixelFormat::RGB24;
case media::VideoPixelFormat::PIXEL_FORMAT_MJPEG:
return media::mojom::VideoCapturePixelFormat::MJPEG;
- case media::VideoPixelFormat::PIXEL_FORMAT_MT21:
- return media::mojom::VideoCapturePixelFormat::MT21;
case media::VideoPixelFormat::PIXEL_FORMAT_YUV420P9:
return media::mojom::VideoCapturePixelFormat::YUV420P9;
case media::VideoPixelFormat::PIXEL_FORMAT_YUV420P10:
@@ -196,9 +194,6 @@ bool EnumTraits<media::mojom::VideoCapturePixelFormat,
case media::mojom::VideoCapturePixelFormat::MJPEG:
*output = media::PIXEL_FORMAT_MJPEG;
return true;
- case media::mojom::VideoCapturePixelFormat::MT21:
- *output = media::PIXEL_FORMAT_MT21;
- return true;
case media::mojom::VideoCapturePixelFormat::YUV420P9:
*output = media::PIXEL_FORMAT_YUV420P9;
return true;
@@ -256,10 +251,8 @@ EnumTraits<media::mojom::VideoCaptureBufferType,
kSharedMemoryViaRawFileDescriptor;
case media::VideoCaptureBufferType::kMailboxHolder:
return media::mojom::VideoCaptureBufferType::kMailboxHolder;
-#if defined(OS_CHROMEOS)
case media::VideoCaptureBufferType::kGpuMemoryBuffer:
return media::mojom::VideoCaptureBufferType::kGpuMemoryBuffer;
-#endif
}
NOTREACHED();
return media::mojom::VideoCaptureBufferType::kSharedMemory;
@@ -282,11 +275,9 @@ bool EnumTraits<media::mojom::VideoCaptureBufferType,
case media::mojom::VideoCaptureBufferType::kMailboxHolder:
*output = media::VideoCaptureBufferType::kMailboxHolder;
return true;
-#if defined(OS_CHROMEOS)
case media::mojom::VideoCaptureBufferType::kGpuMemoryBuffer:
*output = media::VideoCaptureBufferType::kGpuMemoryBuffer;
return true;
-#endif
}
NOTREACHED();
return false;
diff --git a/chromium/media/capture/run_all_unittests.cc b/chromium/media/capture/run_all_unittests.cc
index 9c4b2770c53..b3173506411 100644
--- a/chromium/media/capture/run_all_unittests.cc
+++ b/chromium/media/capture/run_all_unittests.cc
@@ -5,7 +5,7 @@
#include <stdio.h>
#include "base/bind.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_pump_type.h"
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/test_suite.h"
#include "base/threading/thread.h"
@@ -22,7 +22,7 @@ class MojoEnabledTestEnvironment final : public testing::Environment {
void SetUp() final {
mojo::core::Init();
mojo_ipc_thread_.StartWithOptions(
- base::Thread::Options(base::MessageLoop::TYPE_IO, 0));
+ base::Thread::Options(base::MessagePumpType::IO, 0));
mojo_ipc_support_.reset(new mojo::core::ScopedIPCSupport(
mojo_ipc_thread_.task_runner(),
mojo::core::ScopedIPCSupport::ShutdownPolicy::FAST));
diff --git a/chromium/media/capture/video/android/video_capture_device_android.cc b/chromium/media/capture/video/android/video_capture_device_android.cc
index c2c7640f469..6807041d262 100644
--- a/chromium/media/capture/video/android/video_capture_device_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_android.cc
@@ -103,8 +103,7 @@ PhotoCapabilities::AndroidFillLightMode ToAndroidFillLightMode(
VideoCaptureDeviceAndroid::VideoCaptureDeviceAndroid(
const VideoCaptureDeviceDescriptor& device_descriptor)
: main_task_runner_(base::ThreadTaskRunnerHandle::Get()),
- device_descriptor_(device_descriptor),
- weak_ptr_factory_(this) {}
+ device_descriptor_(device_descriptor) {}
VideoCaptureDeviceAndroid::~VideoCaptureDeviceAndroid() {
DCHECK(main_task_runner_->BelongsToCurrentThread());
diff --git a/chromium/media/capture/video/android/video_capture_device_android.h b/chromium/media/capture/video/android/video_capture_device_android.h
index 587ec58d057..961e85b062c 100644
--- a/chromium/media/capture/video/android/video_capture_device_android.h
+++ b/chromium/media/capture/video/android/video_capture_device_android.h
@@ -206,7 +206,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceAndroid : public VideoCaptureDevice {
// Java VideoCaptureAndroid instance.
base::android::ScopedJavaLocalRef<jobject> j_capture_;
- base::WeakPtrFactory<VideoCaptureDeviceAndroid> weak_ptr_factory_;
+ base::WeakPtrFactory<VideoCaptureDeviceAndroid> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceAndroid);
};
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.cc b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
index f89b2b14e1f..c1dc5caa03a 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.cc
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
@@ -49,8 +49,7 @@ Camera3AController::Camera3AController(
ANDROID_CONTROL_AWB_STATE_INACTIVE),
awb_mode_set_(false),
set_point_of_interest_running_(false),
- ae_locked_for_point_of_interest_(false),
- weak_ptr_factory_(this) {
+ ae_locked_for_point_of_interest_(false) {
DCHECK(task_runner_->BelongsToCurrentThread());
capture_metadata_dispatcher_->AddResultMetadataObserver(this);
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.h b/chromium/media/capture/video/chromeos/camera_3a_controller.h
index 681638aabe4..baf506adcae 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.h
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.h
@@ -9,7 +9,7 @@
#include "base/cancelable_callback.h"
#include "media/base/media_export.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/request_manager.h"
namespace media {
@@ -134,7 +134,7 @@ class CAPTURE_EXPORT Camera3AController
base::CancelableOnceClosure delayed_ae_unlock_callback_;
- base::WeakPtrFactory<Camera3AController> weak_ptr_factory_;
+ base::WeakPtrFactory<Camera3AController> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(Camera3AController);
};
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
new file mode 100644
index 00000000000..b2fa774a4af
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.cc
@@ -0,0 +1,80 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
+
+#include <string>
+
+#include "base/command_line.h"
+#include "media/base/media_switches.h"
+#include "media/capture/video/chromeos/public/cros_features.h"
+#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
+
+namespace media {
+
+CameraAppDeviceBridgeImpl::CameraAppDeviceBridgeImpl() {}
+
+CameraAppDeviceBridgeImpl::~CameraAppDeviceBridgeImpl() = default;
+
+void CameraAppDeviceBridgeImpl::SetIsSupported(bool is_supported) {
+ is_supported_ = is_supported;
+}
+
+void CameraAppDeviceBridgeImpl::BindReceiver(
+ mojo::PendingReceiver<cros::mojom::CameraAppDeviceBridge> receiver) {
+ receivers_.Add(this, std::move(receiver));
+}
+
+void CameraAppDeviceBridgeImpl::OnDeviceClosed(const std::string& device_id) {
+ auto it = camera_app_devices_.find(device_id);
+ if (it != camera_app_devices_.end()) {
+ camera_app_devices_.erase(it);
+ }
+}
+
+void CameraAppDeviceBridgeImpl::SetCameraInfoGetter(
+ CameraInfoGetter camera_info_getter) {
+ camera_info_getter_ = std::move(camera_info_getter);
+}
+
+void CameraAppDeviceBridgeImpl::UnsetCameraInfoGetter() {
+ camera_info_getter_ = {};
+}
+
+CameraAppDeviceImpl* CameraAppDeviceBridgeImpl::GetCameraAppDevice(
+ const std::string& device_id) {
+ auto it = camera_app_devices_.find(device_id);
+ if (it != camera_app_devices_.end()) {
+ return it->second.get();
+ }
+ return CreateCameraAppDevice(device_id);
+}
+
+void CameraAppDeviceBridgeImpl::GetCameraAppDevice(
+ const std::string& device_id,
+ GetCameraAppDeviceCallback callback) {
+ DCHECK(is_supported_);
+
+ mojo::PendingRemote<cros::mojom::CameraAppDevice> device;
+ GetCameraAppDevice(device_id)->BindReceiver(
+ device.InitWithNewPipeAndPassReceiver());
+ std::move(callback).Run(cros::mojom::GetCameraAppDeviceStatus::SUCCESS,
+ std::move(device));
+}
+
+media::CameraAppDeviceImpl* CameraAppDeviceBridgeImpl::CreateCameraAppDevice(
+ const std::string& device_id) {
+ DCHECK(camera_info_getter_);
+ auto device_info = camera_info_getter_.Run(device_id);
+ auto device_impl = std::make_unique<media::CameraAppDeviceImpl>(
+ device_id, std::move(device_info));
+ auto result = camera_app_devices_.emplace(device_id, std::move(device_impl));
+ return result.first->second.get();
+}
+
+void CameraAppDeviceBridgeImpl::IsSupported(IsSupportedCallback callback) {
+ std::move(callback).Run(is_supported_);
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
new file mode 100644
index 00000000000..42a1972d1d9
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_device_bridge_impl.h
@@ -0,0 +1,65 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_BRIDGE_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_BRIDGE_IMPL_H_
+
+#include <string>
+
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/camera_app_device_impl.h"
+#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
+#include "mojo/public/cpp/bindings/receiver_set.h"
+
+namespace media {
+
+// A bridge class which helps to construct the connection of CameraAppDevice
+// between remote side (Chrome) and receiver side (Video Capture Service).
+class CAPTURE_EXPORT CameraAppDeviceBridgeImpl
+ : public cros::mojom::CameraAppDeviceBridge {
+ public:
+ using CameraInfoGetter =
+ base::RepeatingCallback<cros::mojom::CameraInfoPtr(const std::string&)>;
+
+ CameraAppDeviceBridgeImpl();
+
+ ~CameraAppDeviceBridgeImpl() override;
+
+ void SetIsSupported(bool is_supported);
+
+ void BindReceiver(
+ mojo::PendingReceiver<cros::mojom::CameraAppDeviceBridge> receiver);
+
+ void OnDeviceClosed(const std::string& device_id);
+
+ void SetCameraInfoGetter(CameraInfoGetter camera_info_getter);
+
+ void UnsetCameraInfoGetter();
+
+ CameraAppDeviceImpl* GetCameraAppDevice(const std::string& device_id);
+
+ // cros::mojom::CameraAppDeviceBridge implementations.
+ void GetCameraAppDevice(const std::string& device_id,
+ GetCameraAppDeviceCallback callback) override;
+
+ void IsSupported(IsSupportedCallback callback) override;
+
+ private:
+ CameraAppDeviceImpl* CreateCameraAppDevice(const std::string& device_id);
+
+ bool is_supported_;
+
+ CameraInfoGetter camera_info_getter_;
+
+ mojo::ReceiverSet<cros::mojom::CameraAppDeviceBridge> receivers_;
+
+ base::flat_map<std::string, std::unique_ptr<media::CameraAppDeviceImpl>>
+ camera_app_devices_;
+
+ DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceBridgeImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_BRIDGE_IMPL_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
new file mode 100644
index 00000000000..166b2507df3
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.cc
@@ -0,0 +1,256 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/camera_app_device_impl.h"
+
+#include "media/capture/video/chromeos/camera_metadata_utils.h"
+
+namespace media {
+
+namespace {
+
+void OnStillCaptureDone(media::mojom::ImageCapture::TakePhotoCallback callback,
+ int status,
+ mojom::BlobPtr blob) {
+ DCHECK_EQ(status, kReprocessSuccess);
+ std::move(callback).Run(std::move(blob));
+}
+
+} // namespace
+
+ReprocessTask::ReprocessTask() = default;
+
+ReprocessTask::ReprocessTask(ReprocessTask&& other)
+ : effect(other.effect),
+ callback(std::move(other.callback)),
+ extra_metadata(std::move(other.extra_metadata)) {}
+
+ReprocessTask::~ReprocessTask() = default;
+
+bool CameraAppDeviceImpl::SizeComparator::operator()(
+ const gfx::Size& size_1,
+ const gfx::Size& size_2) const {
+ return size_1.width() < size_2.width() || (size_1.width() == size_2.width() &&
+ size_1.height() < size_2.height());
+}
+
+// static
+int CameraAppDeviceImpl::GetReprocessReturnCode(
+ cros::mojom::Effect effect,
+ const cros::mojom::CameraMetadataPtr* metadata) {
+ if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
+ auto portrait_mode_segmentation_result = GetMetadataEntryAsSpan<uint8_t>(
+ *metadata, static_cast<cros::mojom::CameraMetadataTag>(
+ kPortraitModeSegmentationResultVendorKey));
+ DCHECK(!portrait_mode_segmentation_result.empty());
+ return static_cast<int>(portrait_mode_segmentation_result[0]);
+ }
+ return kReprocessSuccess;
+}
+
+// static
+ReprocessTaskQueue CameraAppDeviceImpl::GetSingleShotReprocessOptions(
+ media::mojom::ImageCapture::TakePhotoCallback take_photo_callback) {
+ ReprocessTaskQueue result_task_queue;
+ ReprocessTask still_capture_task;
+ still_capture_task.effect = cros::mojom::Effect::NO_EFFECT;
+ still_capture_task.callback =
+ base::BindOnce(&OnStillCaptureDone, std::move(take_photo_callback));
+ result_task_queue.push(std::move(still_capture_task));
+ return result_task_queue;
+}
+
+CameraAppDeviceImpl::CameraAppDeviceImpl(const std::string& device_id,
+ cros::mojom::CameraInfoPtr camera_info)
+ : device_id_(device_id),
+ camera_info_(std::move(camera_info)),
+ task_runner_(base::ThreadTaskRunnerHandle::Get()),
+ capture_intent_(cros::mojom::CaptureIntent::DEFAULT),
+ next_observer_id_(0),
+ weak_ptr_factory_(
+ std::make_unique<base::WeakPtrFactory<CameraAppDeviceImpl>>(this)) {}
+
+CameraAppDeviceImpl::~CameraAppDeviceImpl() {
+ task_runner_->DeleteSoon(FROM_HERE, std::move(weak_ptr_factory_));
+}
+
+void CameraAppDeviceImpl::BindReceiver(
+ mojo::PendingReceiver<cros::mojom::CameraAppDevice> receiver) {
+ receivers_.Add(this, std::move(receiver));
+}
+
+void CameraAppDeviceImpl::ConsumeReprocessOptions(
+ media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
+ base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback) {
+ ReprocessTaskQueue result_task_queue;
+
+ ReprocessTask still_capture_task;
+ still_capture_task.effect = cros::mojom::Effect::NO_EFFECT;
+ still_capture_task.callback =
+ base::BindOnce(&OnStillCaptureDone, std::move(take_photo_callback));
+ result_task_queue.push(std::move(still_capture_task));
+
+ base::AutoLock lock(reprocess_tasks_lock_);
+
+ while (!reprocess_task_queue_.empty()) {
+ result_task_queue.push(std::move(reprocess_task_queue_.front()));
+ reprocess_task_queue_.pop();
+ }
+ std::move(consumption_callback).Run(std::move(result_task_queue));
+}
+
+void CameraAppDeviceImpl::GetFpsRange(const gfx::Size& resolution,
+ GetFpsRangeCallback callback) {
+ base::AutoLock lock(fps_ranges_lock_);
+
+ auto it = resolution_fps_range_map_.find(resolution);
+ if (it == resolution_fps_range_map_.end()) {
+ std::move(callback).Run({});
+ return;
+ }
+ std::move(callback).Run(it->second);
+}
+
+cros::mojom::CaptureIntent CameraAppDeviceImpl::GetCaptureIntent() {
+ base::AutoLock lock(capture_intent_lock_);
+ return capture_intent_;
+}
+
+void CameraAppDeviceImpl::OnResultMetadataAvailable(
+ const cros::mojom::CameraMetadataPtr& metadata,
+ cros::mojom::StreamType streamType) {
+ base::AutoLock lock(observers_lock_);
+
+ const auto& observer_ids = stream_observer_ids_[streamType];
+
+ for (auto& id : observer_ids) {
+ observers_[id]->OnMetadataAvailable(metadata.Clone());
+ }
+}
+
+void CameraAppDeviceImpl::SetReprocessResult(
+ SetReprocessOptionCallback callback,
+ const int32_t status,
+ media::mojom::BlobPtr blob) {
+ auto callback_on_mojo_thread = base::BindOnce(
+ [](const int32_t status, media::mojom::BlobPtr blob,
+ SetReprocessOptionCallback callback) {
+ std::move(callback).Run(status, std::move(blob));
+ },
+ status, std::move(blob), std::move(callback));
+ task_runner_->PostTask(FROM_HERE, std::move(callback_on_mojo_thread));
+}
+
+void CameraAppDeviceImpl::GetCameraInfo(GetCameraInfoCallback callback) {
+ DCHECK(camera_info_);
+ std::move(callback).Run(camera_info_.Clone());
+}
+
+void CameraAppDeviceImpl::SetReprocessOption(
+ cros::mojom::Effect effect,
+ SetReprocessOptionCallback reprocess_result_callback) {
+ ReprocessTask task;
+ task.effect = effect;
+ task.callback = base::BindOnce(&CameraAppDeviceImpl::SetReprocessResult,
+ weak_ptr_factory_->GetWeakPtr(),
+ std::move(reprocess_result_callback));
+
+ if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
+ std::vector<uint8_t> portrait_mode_value(sizeof(int32_t));
+ *reinterpret_cast<int32_t*>(portrait_mode_value.data()) = 1;
+ cros::mojom::CameraMetadataEntryPtr e =
+ cros::mojom::CameraMetadataEntry::New();
+ e->tag =
+ static_cast<cros::mojom::CameraMetadataTag>(kPortraitModeVendorKey);
+ e->type = cros::mojom::EntryType::TYPE_INT32;
+ e->count = 1;
+ e->data = std::move(portrait_mode_value);
+ task.extra_metadata.push_back(std::move(e));
+ }
+
+ base::AutoLock lock(reprocess_tasks_lock_);
+
+ reprocess_task_queue_.push(std::move(task));
+}
+
+void CameraAppDeviceImpl::SetFpsRange(const gfx::Size& resolution,
+ const gfx::Range& fps_range,
+ SetFpsRangeCallback callback) {
+ const int entry_length = 2;
+
+ auto& static_metadata = camera_info_->static_camera_characteristics;
+ auto available_fps_range_entries = GetMetadataEntryAsSpan<int32_t>(
+ static_metadata, cros::mojom::CameraMetadataTag::
+ ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+ DCHECK(available_fps_range_entries.size() % entry_length == 0);
+
+ bool is_valid = false;
+ int min_fps = static_cast<int>(fps_range.GetMin());
+ int max_fps = static_cast<int>(fps_range.GetMax());
+ for (size_t i = 0; i < available_fps_range_entries.size();
+ i += entry_length) {
+ if (available_fps_range_entries[i] == min_fps &&
+ available_fps_range_entries[i + 1] == max_fps) {
+ is_valid = true;
+ break;
+ }
+ }
+
+ base::AutoLock lock(fps_ranges_lock_);
+
+ if (!is_valid) {
+ // If the input range is invalid, we should still clear the cache range so
+ // that it will fallback to use default fps range rather than the cache one.
+ auto it = resolution_fps_range_map_.find(resolution);
+ if (it != resolution_fps_range_map_.end()) {
+ resolution_fps_range_map_.erase(it);
+ }
+ std::move(callback).Run(false);
+ return;
+ }
+
+ resolution_fps_range_map_[resolution] = fps_range;
+ std::move(callback).Run(true);
+}
+
+void CameraAppDeviceImpl::SetCaptureIntent(
+ cros::mojom::CaptureIntent capture_intent,
+ SetCaptureIntentCallback callback) {
+ base::AutoLock lock(capture_intent_lock_);
+ capture_intent_ = capture_intent;
+ std::move(callback).Run();
+}
+
+void CameraAppDeviceImpl::AddResultMetadataObserver(
+ mojo::PendingRemote<cros::mojom::ResultMetadataObserver> observer,
+ cros::mojom::StreamType stream_type,
+ AddResultMetadataObserverCallback callback) {
+ base::AutoLock lock(observers_lock_);
+
+ uint32_t id = next_observer_id_++;
+ observers_[id] =
+ mojo::Remote<cros::mojom::ResultMetadataObserver>(std::move(observer));
+ stream_observer_ids_[stream_type].insert(id);
+
+ std::move(callback).Run(id);
+}
+
+void CameraAppDeviceImpl::RemoveResultMetadataObserver(
+ uint32_t id,
+ RemoveResultMetadataObserverCallback callback) {
+ base::AutoLock lock(observers_lock_);
+
+ if (observers_.erase(id) == 0) {
+ std::move(callback).Run(false);
+ return;
+ }
+
+ for (auto& kv : stream_observer_ids_) {
+ auto& observer_ids = kv.second;
+ observer_ids.erase(id);
+ }
+ std::move(callback).Run(true);
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
new file mode 100644
index 00000000000..e3804b5bba0
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_device_impl.h
@@ -0,0 +1,146 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_IMPL_H_
+
+#include <queue>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/containers/flat_set.h"
+#include "base/memory/weak_ptr.h"
+#include "base/single_thread_task_runner.h"
+#include "base/synchronization/lock.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/mojom/image_capture.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
+#include "mojo/public/cpp/bindings/receiver_set.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/range/range.h"
+
+namespace media {
+
+struct ReprocessTask {
+ public:
+ ReprocessTask();
+ ReprocessTask(ReprocessTask&& other);
+ ~ReprocessTask();
+ cros::mojom::Effect effect;
+ cros::mojom::CameraAppDevice::SetReprocessOptionCallback callback;
+ std::vector<cros::mojom::CameraMetadataEntryPtr> extra_metadata;
+};
+
+using ReprocessTaskQueue = base::queue<ReprocessTask>;
+
+// TODO(shik): Get the keys from VendorTagOps by names instead (b/130774415).
+constexpr uint32_t kPortraitModeVendorKey = 0x80000000;
+constexpr uint32_t kPortraitModeSegmentationResultVendorKey = 0x80000001;
+constexpr int32_t kReprocessSuccess = 0;
+
+class CAPTURE_EXPORT CameraAppDeviceImpl : public cros::mojom::CameraAppDevice {
+ public:
+ struct SizeComparator {
+ bool operator()(const gfx::Size& size_1, const gfx::Size& size_2) const;
+ };
+
+ using GetFpsRangeCallback =
+ base::OnceCallback<void(base::Optional<gfx::Range>)>;
+
+ using ResolutionFpsRangeMap =
+ base::flat_map<gfx::Size, gfx::Range, SizeComparator>;
+
+ static int GetReprocessReturnCode(
+ cros::mojom::Effect effect,
+ const cros::mojom::CameraMetadataPtr* metadata);
+
+ static ReprocessTaskQueue GetSingleShotReprocessOptions(
+ media::mojom::ImageCapture::TakePhotoCallback take_photo_callback);
+
+ CameraAppDeviceImpl(const std::string& device_id,
+ cros::mojom::CameraInfoPtr camera_info);
+ ~CameraAppDeviceImpl() override;
+
+ void BindReceiver(
+ mojo::PendingReceiver<cros::mojom::CameraAppDevice> receiver);
+
+ void ConsumeReprocessOptions(
+ media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
+ base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback);
+
+ void GetFpsRange(const gfx::Size& resolution, GetFpsRangeCallback callback);
+
+ cros::mojom::CaptureIntent GetCaptureIntent();
+
+ void OnResultMetadataAvailable(const cros::mojom::CameraMetadataPtr& metadata,
+ const cros::mojom::StreamType stream_type);
+
+ void SetReprocessResult(SetReprocessOptionCallback callback,
+ const int32_t status,
+ media::mojom::BlobPtr blob);
+
+ // cros::mojom::CameraAppDevice implementations.
+ void GetCameraInfo(GetCameraInfoCallback callback) override;
+
+ void SetReprocessOption(cros::mojom::Effect effect,
+ SetReprocessOptionCallback callback) override;
+
+ void SetFpsRange(const gfx::Size& resolution,
+ const gfx::Range& fps_range,
+ SetFpsRangeCallback callback) override;
+
+ void SetCaptureIntent(cros::mojom::CaptureIntent capture_intent,
+ SetCaptureIntentCallback callback) override;
+
+ void AddResultMetadataObserver(
+ mojo::PendingRemote<cros::mojom::ResultMetadataObserver> observer,
+ cros::mojom::StreamType streamType,
+ AddResultMetadataObserverCallback callback) override;
+
+ void RemoveResultMetadataObserver(
+ uint32_t id,
+ RemoveResultMetadataObserverCallback callback) override;
+
+ private:
+ std::string device_id_;
+
+ mojo::ReceiverSet<cros::mojom::CameraAppDevice> receivers_;
+
+ cros::mojom::CameraInfoPtr camera_info_;
+
+ const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ base::Lock reprocess_tasks_lock_;
+
+ base::queue<ReprocessTask> reprocess_task_queue_;
+
+ base::Lock fps_ranges_lock_;
+
+ ResolutionFpsRangeMap resolution_fps_range_map_;
+
+ base::Lock capture_intent_lock_;
+
+ cros::mojom::CaptureIntent capture_intent_;
+
+ base::Lock observers_lock_;
+
+ uint32_t next_observer_id_;
+
+ base::flat_map<uint32_t, mojo::Remote<cros::mojom::ResultMetadataObserver>>
+ observers_;
+
+ base::flat_map<cros::mojom::StreamType, base::flat_set<uint32_t>>
+ stream_observer_ids_;
+
+ std::unique_ptr<base::WeakPtrFactory<CameraAppDeviceImpl>> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
new file mode 100644
index 00000000000..c99e1950258
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.cc
@@ -0,0 +1,51 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/camera_app_device_provider_impl.h"
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "media/base/bind_to_current_loop.h"
+
+namespace media {
+
+CameraAppDeviceProviderImpl::CameraAppDeviceProviderImpl(
+ mojo::PendingRemote<cros::mojom::CameraAppDeviceBridge> bridge,
+ DeviceIdMappingCallback mapping_callback)
+ : bridge_(std::move(bridge)),
+ mapping_callback_(std::move(mapping_callback)),
+ weak_ptr_factory_(this) {}
+
+CameraAppDeviceProviderImpl::~CameraAppDeviceProviderImpl() = default;
+
+void CameraAppDeviceProviderImpl::GetCameraAppDevice(
+ const std::string& source_id,
+ GetCameraAppDeviceCallback callback) {
+ mapping_callback_.Run(
+ source_id,
+ media::BindToCurrentLoop(base::BindOnce(
+ &CameraAppDeviceProviderImpl::GetCameraAppDeviceWithDeviceId,
+ weak_ptr_factory_.GetWeakPtr(), std::move(callback))));
+}
+
+void CameraAppDeviceProviderImpl::GetCameraAppDeviceWithDeviceId(
+ GetCameraAppDeviceCallback callback,
+ const base::Optional<std::string>& device_id) {
+ if (!device_id.has_value()) {
+ std::move(callback).Run(
+ cros::mojom::GetCameraAppDeviceStatus::ERROR_INVALID_ID,
+ mojo::NullRemote());
+ return;
+ }
+
+ bridge_->GetCameraAppDevice(*device_id, std::move(callback));
+}
+
+void CameraAppDeviceProviderImpl::IsSupported(IsSupportedCallback callback) {
+ bridge_->IsSupported(std::move(callback));
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
new file mode 100644
index 00000000000..b7c13e37aa4
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_device_provider_impl.h
@@ -0,0 +1,50 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_PROVIDER_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_PROVIDER_IMPL_H_
+
+#include <string>
+
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
+#include "mojo/public/cpp/bindings/receiver_set.h"
+
+namespace media {
+
+class CAPTURE_EXPORT CameraAppDeviceProviderImpl
+ : public cros::mojom::CameraAppDeviceProvider {
+ public:
+ using WithRealIdCallback =
+ base::OnceCallback<void(const base::Optional<std::string>&)>;
+ using DeviceIdMappingCallback =
+ base::RepeatingCallback<void(const std::string&, WithRealIdCallback)>;
+
+ CameraAppDeviceProviderImpl(
+ mojo::PendingRemote<cros::mojom::CameraAppDeviceBridge> bridge,
+ DeviceIdMappingCallback mapping_callback);
+ ~CameraAppDeviceProviderImpl() override;
+
+ // cros::mojom::CameraAppDeviceProvider implementations.
+ void GetCameraAppDevice(const std::string& source_id,
+ GetCameraAppDeviceCallback callback) override;
+ void IsSupported(IsSupportedCallback callback) override;
+
+ private:
+ void GetCameraAppDeviceWithDeviceId(
+ GetCameraAppDeviceCallback callback,
+ const base::Optional<std::string>& device_id);
+
+ mojo::Remote<cros::mojom::CameraAppDeviceBridge> bridge_;
+
+ DeviceIdMappingCallback mapping_callback_;
+
+ base::WeakPtrFactory<CameraAppDeviceProviderImpl> weak_ptr_factory_;
+
+ DISALLOW_COPY_AND_ASSIGN(CameraAppDeviceProviderImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_DEVICE_PROVIDER_IMPL_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/camera_app_helper_impl.cc b/chromium/media/capture/video/chromeos/camera_app_helper_impl.cc
new file mode 100644
index 00000000000..69168abffb5
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_helper_impl.cc
@@ -0,0 +1,21 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/chromeos/camera_app_helper_impl.h"
+
+namespace media {
+
+CameraAppHelperImpl::CameraAppHelperImpl(IntentCallback intent_callback)
+ : intent_callback_(std::move(intent_callback)) {}
+
+CameraAppHelperImpl::~CameraAppHelperImpl() = default;
+
+void CameraAppHelperImpl::OnIntentHandled(
+ uint32_t intent_id,
+ bool is_success,
+ const std::vector<uint8_t>& captured_data) {
+ intent_callback_.Run(intent_id, is_success, captured_data);
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/camera_app_helper_impl.h b/chromium/media/capture/video/chromeos/camera_app_helper_impl.h
new file mode 100644
index 00000000000..2fd18f5455d
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/camera_app_helper_impl.h
@@ -0,0 +1,38 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_HELPER_IMPL_H_
+#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_HELPER_IMPL_H_
+
+#include <string>
+#include <vector>
+
+#include "media/capture/capture_export.h"
+#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
+#include "mojo/public/cpp/bindings/binding_set.h"
+
+namespace media {
+
+class CAPTURE_EXPORT CameraAppHelperImpl : public cros::mojom::CameraAppHelper {
+ public:
+ using IntentCallback = base::RepeatingCallback<
+ void(uint32_t, bool, const std::vector<uint8_t>&)>;
+
+ explicit CameraAppHelperImpl(IntentCallback intent_callback);
+ ~CameraAppHelperImpl() override;
+
+ // cros::mojom::CameraAppHelper implementations.
+ void OnIntentHandled(uint32_t intent_id,
+ bool is_success,
+ const std::vector<uint8_t>& captured_data) override;
+
+ private:
+ IntentCallback intent_callback_;
+
+ DISALLOW_COPY_AND_ASSIGN(CameraAppHelperImpl);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_APP_HELPER_IMPL_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/camera_buffer_factory.h b/chromium/media/capture/video/chromeos/camera_buffer_factory.h
index f17893b31f5..ded3d310755 100644
--- a/chromium/media/capture/video/chromeos/camera_buffer_factory.h
+++ b/chromium/media/capture/video/chromeos/camera_buffer_factory.h
@@ -8,7 +8,7 @@
#include <memory>
#include <unordered_map>
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/pixel_format_utils.h"
#include "media/capture/video_capture_types.h"
#include "ui/gfx/buffer_types.h"
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index 2144cf8b7c8..84dbb8ee5f4 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -24,7 +24,6 @@
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
#include "media/capture/video/chromeos/request_manager.h"
namespace media {
@@ -162,12 +161,11 @@ CameraDeviceDelegate::CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- ReprocessManager* reprocess_manager)
+ CameraAppDeviceImpl* camera_app_device)
: device_descriptor_(device_descriptor),
camera_hal_delegate_(std::move(camera_hal_delegate)),
ipc_task_runner_(std::move(ipc_task_runner)),
- reprocess_manager_(reprocess_manager),
- weak_ptr_factory_(this) {}
+ camera_app_device_(camera_app_device) {}
CameraDeviceDelegate::~CameraDeviceDelegate() = default;
@@ -221,8 +219,6 @@ void CameraDeviceDelegate::StopAndDeAllocate(
base::OnceClosure device_close_callback) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
- reprocess_manager_->Flush(device_descriptor_.device_id);
-
if (!device_context_ ||
device_context_->GetState() == CameraDeviceContext::State::kStopped ||
(device_context_->GetState() == CameraDeviceContext::State::kError &&
@@ -490,7 +486,8 @@ void CameraDeviceDelegate::Initialize() {
std::make_unique<StreamCaptureInterfaceImpl>(GetWeakPtr()),
device_context_, chrome_capture_params_.buffer_type,
std::make_unique<CameraBufferFactory>(),
- base::BindRepeating(&RotateAndBlobify), ipc_task_runner_);
+ base::BindRepeating(&RotateAndBlobify), ipc_task_runner_,
+ camera_app_device_);
camera_3a_controller_ = std::make_unique<Camera3AController>(
static_metadata_, request_manager_.get(), ipc_task_runner_);
device_ops_->Initialize(
@@ -516,7 +513,24 @@ void CameraDeviceDelegate::OnInitialized(int32_t result) {
return;
}
device_context_->SetState(CameraDeviceContext::State::kInitialized);
- ConfigureStreams(false, base::nullopt);
+ bool require_photo = [&] {
+ if (camera_app_device_ == nullptr) {
+ return false;
+ }
+ auto capture_intent = camera_app_device_->GetCaptureIntent();
+ switch (capture_intent) {
+ case cros::mojom::CaptureIntent::DEFAULT:
+ return false;
+ case cros::mojom::CaptureIntent::STILL_CAPTURE:
+ return true;
+ case cros::mojom::CaptureIntent::VIDEO_RECORD:
+ return false;
+ default:
+ NOTREACHED() << "Unknown capture intent: " << capture_intent;
+ return false;
+ }
+ }();
+ ConfigureStreams(require_photo, base::nullopt);
}
void CameraDeviceDelegate::ConfigureStreams(
@@ -782,13 +796,16 @@ void CameraDeviceDelegate::OnConstructedDefaultPreviewRequestSettings(
FROM_HERE, "Failed to get default request settings");
return;
}
- reprocess_manager_->GetFpsRange(
- device_descriptor_.device_id,
- chrome_capture_params_.requested_format.frame_size.width(),
- chrome_capture_params_.requested_format.frame_size.height(),
- media::BindToCurrentLoop(
- base::BindOnce(&CameraDeviceDelegate::OnGotFpsRange, GetWeakPtr(),
- std::move(settings))));
+
+ if (camera_app_device_) {
+ camera_app_device_->GetFpsRange(
+ chrome_capture_params_.requested_format.frame_size,
+ media::BindToCurrentLoop(
+ base::BindOnce(&CameraDeviceDelegate::OnGotFpsRange, GetWeakPtr(),
+ std::move(settings))));
+ } else {
+ OnGotFpsRange(std::move(settings), {});
+ }
}
void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
@@ -796,15 +813,21 @@ void CameraDeviceDelegate::OnConstructedDefaultStillCaptureRequestSettings(
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
while (!take_photo_callbacks_.empty()) {
- reprocess_manager_->ConsumeReprocessOptions(
- device_descriptor_.device_id,
- base::BindOnce(
- &TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
- base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
- camera_3a_controller_->GetWeakPtr())),
- media::BindToCurrentLoop(base::BindOnce(&RequestManager::TakePhoto,
- request_manager_->GetWeakPtr(),
- settings.Clone())));
+ auto take_photo_callback = base::BindOnce(
+ &TakePhotoCallbackBundle, std::move(take_photo_callbacks_.front()),
+ base::BindOnce(&Camera3AController::SetAutoFocusModeForStillCapture,
+ camera_3a_controller_->GetWeakPtr()));
+ if (camera_app_device_) {
+ camera_app_device_->ConsumeReprocessOptions(
+ std::move(take_photo_callback),
+ media::BindToCurrentLoop(base::BindOnce(
+ &RequestManager::TakePhoto, request_manager_->GetWeakPtr(),
+ settings.Clone())));
+ } else {
+ request_manager_->TakePhoto(
+ settings.Clone(), CameraAppDeviceImpl::GetSingleShotReprocessOptions(
+ std::move(take_photo_callback)));
+ }
take_photo_callbacks_.pop();
}
}
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.h b/chromium/media/capture/video/chromeos/camera_device_delegate.h
index b1cca24168c..36adf9f5eeb 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.h
@@ -11,8 +11,8 @@
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video_capture_types.h"
#include "ui/gfx/geometry/size.h"
@@ -21,9 +21,9 @@
namespace media {
class Camera3AController;
+class CameraAppDeviceImpl;
class CameraDeviceContext;
class CameraHalDelegate;
-class ReprocessManager;
class RequestManager;
enum class StreamType : uint64_t {
@@ -76,7 +76,7 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- ReprocessManager* reprocess_manager);
+ CameraAppDeviceImpl* camera_app_device);
~CameraDeviceDelegate();
@@ -203,9 +203,9 @@ class CAPTURE_EXPORT CameraDeviceDelegate final {
VideoCaptureDevice::SetPhotoOptionsCallback set_photo_option_callback_;
- ReprocessManager* reprocess_manager_; // weak
+ CameraAppDeviceImpl* camera_app_device_; // Weak.
- base::WeakPtrFactory<CameraDeviceDelegate> weak_ptr_factory_;
+ base::WeakPtrFactory<CameraDeviceDelegate> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(CameraDeviceDelegate);
};
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index 6fa474bd4e1..78325727604 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -13,7 +13,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_device_context.h"
@@ -21,7 +21,6 @@
#include "media/capture/video/chromeos/mock_camera_module.h"
#include "media/capture/video/chromeos/mock_vendor_tag_ops.h"
#include "media/capture/video/chromeos/mock_video_capture_client.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -128,13 +127,11 @@ class CameraDeviceDelegateTest : public ::testing::Test {
new CameraHalDelegate(hal_delegate_thread_.task_runner());
auto get_camera_info = base::BindRepeating(
&CameraHalDelegate::GetCameraInfoFromDeviceId, camera_hal_delegate_);
- reprocess_manager_ = std::make_unique<ReprocessManager>(get_camera_info);
camera_hal_delegate_->SetCameraModule(
mock_camera_module_.GetInterfacePtrInfo());
}
void TearDown() override {
- reprocess_manager_.reset();
camera_hal_delegate_->Reset();
hal_delegate_thread_.Stop();
}
@@ -146,9 +143,10 @@ class CameraDeviceDelegateTest : public ::testing::Test {
camera_hal_delegate_->GetDeviceDescriptors(&descriptors);
ASSERT_EQ(descriptors.size(), 1u);
device_delegate_thread_.Start();
+
camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
descriptors[0], camera_hal_delegate_,
- device_delegate_thread_.task_runner(), reprocess_manager_.get());
+ device_delegate_thread_.task_runner(), nullptr);
}
void GetNumberOfFakeCameras(
@@ -221,6 +219,17 @@ class CameraDeviceDelegateTest : public ::testing::Test {
entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
static_metadata->entries->push_back(std::move(entry));
+ entry = cros::mojom::CameraMetadataEntry::New();
+ entry->index = 3;
+ entry->tag =
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PIPELINE_MAX_DEPTH;
+ entry->type = cros::mojom::EntryType::TYPE_BYTE;
+ entry->count = 1;
+ uint8_t pipeline_max_depth = 1;
+ entry->data.assign(&pipeline_max_depth,
+ &pipeline_max_depth + entry->count * sizeof(uint8_t));
+ static_metadata->entries->push_back(std::move(entry));
+
switch (camera_id) {
case 0:
camera_info->facing = cros::mojom::CameraFacing::CAMERA_FACING_FRONT;
@@ -453,7 +462,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
}
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
std::unique_ptr<CameraDeviceDelegate> camera_device_delegate_;
@@ -465,8 +474,6 @@ class CameraDeviceDelegateTest : public ::testing::Test {
mojo::Binding<cros::mojom::Camera3DeviceOps> mock_camera_device_binding_;
cros::mojom::Camera3CallbackOpsPtr callback_ops_;
- std::unique_ptr<ReprocessManager> reprocess_manager_;
-
base::Thread device_delegate_thread_;
std::unique_ptr<CameraDeviceContext> device_context_;
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index 6990532c9b1..ed97f81e0b4 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -19,10 +19,10 @@
#include "base/strings/string_piece.h"
#include "base/strings/string_split.h"
#include "base/system/system_monitor.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
namespace media {
@@ -129,21 +129,37 @@ void CameraHalDelegate::Reset() {
std::unique_ptr<VideoCaptureDevice> CameraHalDelegate::CreateDevice(
scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
const VideoCaptureDeviceDescriptor& device_descriptor,
- ReprocessManager* reprocess_manager) {
+ CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- std::unique_ptr<VideoCaptureDevice> capture_device;
if (!UpdateBuiltInCameraInfo()) {
- return capture_device;
+ return nullptr;
}
int camera_id = GetCameraIdFromDeviceId(device_descriptor.device_id);
if (camera_id == -1) {
LOG(ERROR) << "Invalid camera device: " << device_descriptor.device_id;
- return capture_device;
+ return nullptr;
+ }
+
+ if (camera_app_device_bridge) {
+ auto* camera_app_device = camera_app_device_bridge->GetCameraAppDevice(
+ device_descriptor.device_id);
+ // Since the cleanup callback will be triggered when VideoCaptureDevice died
+ // and |camera_app_device_bridge| is actually owned by
+ // VideoCaptureServiceImpl, it should be safe to assume
+ // |camera_app_device_bridge| is still valid here.
+ auto cleanup_callback = base::BindOnce(
+ [](const std::string& device_id, CameraAppDeviceBridgeImpl* bridge) {
+ bridge->OnDeviceClosed(device_id);
+ },
+ device_descriptor.device_id, camera_app_device_bridge);
+ return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(
+ std::move(task_runner_for_screen_observer), device_descriptor, this,
+ camera_app_device, std::move(cleanup_callback));
+ } else {
+ return std::make_unique<VideoCaptureDeviceChromeOSHalv3>(
+ std::move(task_runner_for_screen_observer), device_descriptor, this,
+ nullptr, base::DoNothing());
}
- capture_device.reset(new VideoCaptureDeviceChromeOSHalv3(
- std::move(task_runner_for_screen_observer), device_descriptor, this,
- reprocess_manager));
- return capture_device;
}
void CameraHalDelegate::GetSupportedFormats(
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.h b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
index f8034c787de..83795c3b4a9 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.h
@@ -15,8 +15,8 @@
#include "base/synchronization/lock.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/thread.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "media/capture/video/chromeos/vendor_tag_ops_delegate.h"
#include "media/capture/video/video_capture_device_factory.h"
#include "media/capture/video_capture_types.h"
@@ -24,8 +24,8 @@
namespace media {
+class CameraAppDeviceBridgeImpl;
class CameraBufferFactory;
-class ReprocessManager;
// CameraHalDelegate is the component which does Mojo IPCs to the camera HAL
// process on Chrome OS to access the module-level camera functionalities such
@@ -59,7 +59,7 @@ class CAPTURE_EXPORT CameraHalDelegate final
scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer,
const VideoCaptureDeviceDescriptor& device_descriptor,
- ReprocessManager* reprocess_manager);
+ CameraAppDeviceBridgeImpl* app_device_bridge);
void GetSupportedFormats(
const VideoCaptureDeviceDescriptor& device_descriptor,
VideoCaptureFormats* supported_formats);
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index 7c38d0ac083..c36ec68489f 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -12,7 +12,7 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/capture/video/chromeos/mock_camera_module.h"
#include "media/capture/video/chromeos/mock_vendor_tag_ops.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
@@ -59,7 +59,7 @@ class CameraHalDelegateTest : public ::testing::Test {
}
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
testing::StrictMock<unittest_internal::MockCameraModule> mock_camera_module_;
testing::StrictMock<unittest_internal::MockVendorTagOps> mock_vendor_tag_ops_;
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
index bb42d28cbb4..ba1634c2ed0 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.cc
@@ -21,7 +21,7 @@
#include "base/strings/string_number_conversions.h"
#include "base/synchronization/waitable_event.h"
#include "base/trace_event/trace_event.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "mojo/public/cpp/platform/named_platform_channel.h"
#include "mojo/public/cpp/platform/platform_channel.h"
#include "mojo/public/cpp/platform/socket_utils_posix.h"
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
index 828a9d35283..07d57e20528 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl.h
@@ -15,7 +15,7 @@
#include "components/chromeos_camera/common/jpeg_encode_accelerator.mojom.h"
#include "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom.h"
#include "media/capture/capture_export.h"
-#include "media/capture/video/chromeos/mojo/cros_camera_service.mojom.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
#include "media/capture/video/video_capture_device_factory.h"
#include "mojo/public/cpp/bindings/binding_set.h"
diff --git a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
index e558eb40a56..80aa63cd8b2 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_dispatcher_impl_unittest.cc
@@ -10,9 +10,9 @@
#include "base/bind.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
-#include "media/capture/video/chromeos/mojo/cros_camera_service.mojom.h"
+#include "base/test/task_environment.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/cros_camera_service.mojom.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -122,7 +122,7 @@ class CameraHalDispatcherImplTest : public ::testing::Test {
CameraHalDispatcherImpl* dispatcher_;
private:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<base::RunLoop> run_loop_;
DISALLOW_COPY_AND_ASSIGN(CameraHalDispatcherImplTest);
};
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
index cfd2fded87c..4b2fc0c910c 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
@@ -121,6 +121,7 @@ void MergeMetadata(cros::mojom::CameraMetadataPtr* to,
tags.insert(entry->tag);
(*to)->entries->push_back(entry->Clone());
}
+ SortCameraMetadata(to);
}
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.h b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
index fcf7b874120..43fe76d86db 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.h
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.h
@@ -6,7 +6,7 @@
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_METADATA_UTILS_H_
#include "media/capture/capture_export.h"
-#include "media/capture/video/chromeos/mojo/camera_metadata.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_metadata.mojom.h"
namespace media {
diff --git a/chromium/media/capture/video/chromeos/cros_image_capture_impl.cc b/chromium/media/capture/video/chromeos/cros_image_capture_impl.cc
deleted file mode 100644
index e8f161ff326..00000000000
--- a/chromium/media/capture/video/chromeos/cros_image_capture_impl.cc
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/chromeos/cros_image_capture_impl.h"
-
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "base/task/post_task.h"
-#include "media/base/bind_to_current_loop.h"
-
-namespace media {
-
-CrosImageCaptureImpl::CrosImageCaptureImpl(ReprocessManager* reprocess_manager)
- : reprocess_manager_(reprocess_manager) {}
-
-CrosImageCaptureImpl::~CrosImageCaptureImpl() = default;
-
-void CrosImageCaptureImpl::GetCameraInfo(const std::string& device_id,
- GetCameraInfoCallback callback) {
- reprocess_manager_->GetCameraInfo(
- device_id, media::BindToCurrentLoop(base::BindOnce(
- &CrosImageCaptureImpl::OnGotCameraInfo,
- base::Unretained(this), std::move(callback))));
-}
-
-void CrosImageCaptureImpl::SetReprocessOption(
- const std::string& device_id,
- cros::mojom::Effect effect,
- SetReprocessOptionCallback callback) {
- reprocess_manager_->SetReprocessOption(
- device_id, effect, media::BindToCurrentLoop(std::move(callback)));
-}
-
-void CrosImageCaptureImpl::SetFpsRange(const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_fps,
- const int32_t max_fps,
- SetFpsRangeCallback callback) {
- reprocess_manager_->SetFpsRange(
- device_id, stream_width, stream_height, min_fps, max_fps,
- media::BindToCurrentLoop(std::move(callback)));
-}
-
-void CrosImageCaptureImpl::OnGotCameraInfo(
- GetCameraInfoCallback callback,
- cros::mojom::CameraInfoPtr camera_info) {
- std::move(callback).Run(std::move(camera_info));
-}
-
-void CrosImageCaptureImpl::OnIntentHandled(
- uint32_t intent_id,
- bool is_success,
- const std::vector<uint8_t>& captured_data) {
- NOTREACHED() << "Should be handled in RendererFacingCrosImageCapture";
-}
-
-} // namespace media
diff --git a/chromium/media/capture/video/chromeos/cros_image_capture_impl.h b/chromium/media/capture/video/chromeos/cros_image_capture_impl.h
deleted file mode 100644
index c122c2edf49..00000000000
--- a/chromium/media/capture/video/chromeos/cros_image_capture_impl.h
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_CROS_IMAGE_CAPTURE_IMPL_H_
-#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CROS_IMAGE_CAPTURE_IMPL_H_
-
-#include <string>
-
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
-#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
-#include "mojo/public/cpp/bindings/binding_set.h"
-
-namespace media {
-
-class CrosImageCaptureImpl : public cros::mojom::CrosImageCapture {
- public:
- explicit CrosImageCaptureImpl(ReprocessManager* reprocess_manager);
-
- ~CrosImageCaptureImpl() override;
-
- void BindRequest(cros::mojom::CrosImageCaptureRequest request);
-
- // cros::mojom::CrosImageCapture implementations.
-
- void GetCameraInfo(const std::string& device_id,
- GetCameraInfoCallback callback) override;
-
- void SetReprocessOption(const std::string& device_id,
- cros::mojom::Effect effect,
- SetReprocessOptionCallback callback) override;
-
- void SetFpsRange(const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_fps,
- const int32_t max_fps,
- SetFpsRangeCallback callback) override;
-
- void OnIntentHandled(uint32_t intent_id,
- bool is_success,
- const std::vector<uint8_t>& captured_data) override;
-
- private:
- void OnGotCameraInfo(GetCameraInfoCallback callback,
- cros::mojom::CameraInfoPtr camera_info);
-
- ReprocessManager* reprocess_manager_; // weak
-
- DISALLOW_COPY_AND_ASSIGN(CrosImageCaptureImpl);
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_CROS_IMAGE_CAPTURE_IMPL_H_
diff --git a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
index 949ba343c25..dc3b45df656 100644
--- a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
+++ b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.cc
@@ -50,16 +50,15 @@ GpuMemoryBufferTracker::GetMemoryMappedAccess() {
return std::make_unique<NullHandle>();
}
-mojo::ScopedSharedBufferHandle GpuMemoryBufferTracker::GetHandleForTransit(
- bool read_only) {
+base::UnsafeSharedMemoryRegion
+GpuMemoryBufferTracker::DuplicateAsUnsafeRegion() {
NOTREACHED() << "Unsupported operation";
- return mojo::ScopedSharedBufferHandle();
+ return base::UnsafeSharedMemoryRegion();
}
-base::SharedMemoryHandle
-GpuMemoryBufferTracker::GetNonOwnedSharedMemoryHandleForLegacyIPC() {
+mojo::ScopedSharedBufferHandle GpuMemoryBufferTracker::DuplicateAsMojoBuffer() {
NOTREACHED() << "Unsupported operation";
- return base::SharedMemoryHandle();
+ return mojo::ScopedSharedBufferHandle();
}
gfx::GpuMemoryBufferHandle GpuMemoryBufferTracker::GetGpuMemoryBufferHandle() {
diff --git a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h
index 048ba8f9548..e0817af43e2 100644
--- a/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h
+++ b/chromium/media/capture/video/chromeos/gpu_memory_buffer_tracker.h
@@ -30,8 +30,8 @@ class CAPTURE_EXPORT GpuMemoryBufferTracker final
const mojom::PlaneStridesPtr& strides) override;
uint32_t GetMemorySizeInBytes() override;
std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() override;
- mojo::ScopedSharedBufferHandle GetHandleForTransit(bool read_only) override;
- base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC() override;
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override;
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override;
gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override;
private:
diff --git a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
index ef82d6926f1..0f13455f31a 100644
--- a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.cc
@@ -10,6 +10,8 @@
#include <stdint.h>
#include <xf86drm.h>
+#include <vector>
+
#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "base/trace_event/memory_allocator_dump_guid.h"
@@ -74,6 +76,19 @@ uint32_t GetDrmFormat(gfx::BufferFormat gfx_format) {
}
}
+uint32_t GetGbmUsage(gfx::BufferUsage usage) {
+ switch (usage) {
+ case gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE:
+ case gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE:
+ return GBM_BO_USE_LINEAR | GBM_BO_USE_CAMERA_READ |
+ GBM_BO_USE_CAMERA_WRITE;
+ case gfx::BufferUsage::SCANOUT_CPU_READ_WRITE:
+ return GBM_BO_USE_LINEAR;
+ default:
+ return 0;
+ }
+}
+
class GpuMemoryBufferImplGbm : public gfx::GpuMemoryBuffer {
public:
GpuMemoryBufferImplGbm(gfx::BufferFormat format, gbm_bo* buffer_object)
@@ -223,35 +238,33 @@ LocalGpuMemoryBufferManager::CreateGpuMemoryBuffer(
gfx::BufferFormat format,
gfx::BufferUsage usage,
gpu::SurfaceHandle surface_handle) {
- if (usage != gfx::BufferUsage::SCANOUT_CAMERA_READ_WRITE &&
- usage != gfx::BufferUsage::CAMERA_AND_CPU_READ_WRITE) {
- LOG(ERROR) << "Unsupported usage " << gfx::BufferUsageToString(usage);
- return std::unique_ptr<gfx::GpuMemoryBuffer>();
- }
if (!gbm_device_) {
LOG(ERROR) << "Invalid GBM device";
- return std::unique_ptr<gfx::GpuMemoryBuffer>();
+ return nullptr;
}
- uint32_t drm_format = GetDrmFormat(format);
- uint32_t camera_gbm_usage =
- GBM_BO_USE_LINEAR | GBM_BO_USE_CAMERA_READ | GBM_BO_USE_CAMERA_WRITE;
+ const uint32_t drm_format = GetDrmFormat(format);
if (!drm_format) {
LOG(ERROR) << "Unable to convert gfx::BufferFormat "
<< static_cast<int>(format) << " to DRM format";
- return std::unique_ptr<gfx::GpuMemoryBuffer>();
+ return nullptr;
}
- if (!gbm_device_is_format_supported(gbm_device_, drm_format,
- camera_gbm_usage)) {
- return std::unique_ptr<gfx::GpuMemoryBuffer>();
+ const uint32_t gbm_usage = GetGbmUsage(usage);
+ if (gbm_usage == 0) {
+ LOG(ERROR) << "Unsupported usage " << gfx::BufferUsageToString(usage);
+ return nullptr;
+ }
+
+ if (!gbm_device_is_format_supported(gbm_device_, drm_format, gbm_usage)) {
+ return nullptr;
}
- gbm_bo* buffer_object = gbm_bo_create(
- gbm_device_, size.width(), size.height(), drm_format, camera_gbm_usage);
+ gbm_bo* buffer_object = gbm_bo_create(gbm_device_, size.width(),
+ size.height(), drm_format, gbm_usage);
if (!buffer_object) {
LOG(ERROR) << "Failed to create GBM buffer object";
- return std::unique_ptr<gfx::GpuMemoryBuffer>();
+ return nullptr;
}
return std::make_unique<GpuMemoryBufferImplGbm>(format, buffer_object);
@@ -305,4 +318,16 @@ std::unique_ptr<gfx::GpuMemoryBuffer> LocalGpuMemoryBufferManager::ImportDmaBuf(
return std::make_unique<GpuMemoryBufferImplGbm>(format, buffer_object);
}
+bool LocalGpuMemoryBufferManager::IsFormatAndUsageSupported(
+ gfx::BufferFormat format,
+ gfx::BufferUsage usage) {
+ const uint32_t drm_format = GetDrmFormat(format);
+ if (!drm_format)
+ return false;
+ const uint32_t gbm_usage = GetGbmUsage(usage);
+ if (gbm_usage == 0)
+ return false;
+ return gbm_device_is_format_supported(gbm_device_, drm_format, gbm_usage);
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.h b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.h
index b7a1552b058..93cf0f133a3 100644
--- a/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/local_gpu_memory_buffer_manager.h
@@ -51,6 +51,11 @@ class CAPTURE_EXPORT LocalGpuMemoryBufferManager
const gfx::Size& size,
gfx::BufferFormat format);
+ // Returns true if the combination of |format| and |usage| is supported by
+ // CreateGpuMemoryBuffer().
+ bool IsFormatAndUsageSupported(gfx::BufferFormat format,
+ gfx::BufferUsage usage);
+
private:
gbm_device* gbm_device_;
diff --git a/chromium/media/capture/video/chromeos/mock_camera_module.h b/chromium/media/capture/video/chromeos/mock_camera_module.h
index f10d2b75cab..d8f3d47c80c 100644
--- a/chromium/media/capture/video/chromeos/mock_camera_module.h
+++ b/chromium/media/capture/video/chromeos/mock_camera_module.h
@@ -9,8 +9,8 @@
#include <stdint.h>
#include "base/threading/thread.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "mojo/public/cpp/bindings/binding.h"
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h b/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h
index b39a9384a8c..1c45c3272aa 100644
--- a/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h
+++ b/chromium/media/capture/video/chromeos/mock_vendor_tag_ops.h
@@ -10,7 +10,7 @@
#include <vector>
#include "base/threading/thread.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
#include "mojo/public/cpp/bindings/binding.h"
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom b/chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom
deleted file mode 100644
index 36796475804..00000000000
--- a/chromium/media/capture/video/chromeos/mojo/cros_image_capture.mojom
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-module cros.mojom;
-
-import "media/capture/mojom/image_capture.mojom";
-import "media/capture/video/chromeos/mojo/camera_common.mojom";
-
-// Effect that recognized by Chrome OS.
-enum Effect {
- NO_EFFECT = 0,
- PORTRAIT_MODE = 1,
-};
-
-// Interface for Chrome OS specific Image Capture API which supports reprocess
-// mechanism. The |source_id| parameter in following methods might not be the
-// actual device id if it is called by renderer. It needs to be
-// translated to the actual video device id to be used in CrosImageCapture
-// implementation.
-interface CrosImageCapture {
- // Gets camera information |camera_info| which includes camera facing,
- // characteristics, orientation, etc. The |source_id| might need translation
- // to be actual video device id. For invalid |source_id|, the returned
- // |camera_info| would be empty.
- GetCameraInfo(string source_id) => (CameraInfo? camera_info);
-
- // Sets reprocess option to bind with the coming take photo request. When this
- // method is called, the reprocess option will be queued. All reprocess
- // options in the queue will be consumed when TakePhoto() method in Image
- // Capture API is triggered and all the queued reprocess options will be bound
- // to that take photo request. The |source_id| might need translation to be
- // actual video device id.
- // The result |status| would be set to 0 for success and the corresponding
- // result will be put in |blob|. If it fails, the |status| indicates the error
- // type and |blob| might be empty. For invalid |source_id|, it returns
- // -EINVAL.
- SetReprocessOption(string source_id, Effect effect)
- => (int32 status, media.mojom.Blob? blob);
-
- // Sets the fps range for upcoming configured camera stream.
- // The |source_id| might need translation to be actual video device id.
- // The |stream_width| and |stream_height| are the target stream resolution
- // that the caller sets the fps range for.
- // The |min_fps| and |max_fps| represent the target fps range.
- // If the given fps range is valid and set successfully, |is_success| returns
- // true. If the given fps range is invalid, the fps range which is cached
- // previously will be cleared and |is_success| will return false.
- SetFpsRange(string source_id, uint32 stream_width, uint32 stream_height,
- int32 min_fps, int32 max_fps)
- => (bool is_success);
-
- // Invoked when the intent is fulfilled or is failed. For the intent which
- // expects to have result, it is fulfilled when the captured is done and is
- // failed if the session ends without finishing the capture. For the intent
- // which don't expect any result, it is fulfilled when the camera app is
- // successfully launched and is failed when the camera fails to launch.
- // |intent_id| should be the same id that was specified in the query when
- // launching the camera app. |is_success| indicates the result status of the
- // intent. The |captured_data| will be delivered to the handler as a byte
- // array.
- OnIntentHandled(uint32 intent_id, bool is_success,
- array<uint8> captured_data);
-}; \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/mojo/BUILD.gn b/chromium/media/capture/video/chromeos/mojom/BUILD.gn
index 13052810b29..f98d89e6e0d 100644
--- a/chromium/media/capture/video/chromeos/mojo/BUILD.gn
+++ b/chromium/media/capture/video/chromeos/mojom/BUILD.gn
@@ -7,19 +7,18 @@ import("//mojo/public/tools/bindings/mojom.gni")
mojom("cros_camera") {
sources = [
"camera3.mojom",
+ "camera_app.mojom",
"camera_common.mojom",
"camera_metadata.mojom",
"camera_metadata_tags.mojom",
"cros_camera_service.mojom",
- "cros_image_capture.mojom",
]
deps = [
"//components/chromeos_camera/common",
"//media/capture/mojom:image_capture",
- "//media/mojo/interfaces",
+ "//media/mojo/mojom",
+ "//ui/gfx/geometry/mojom",
+ "//ui/gfx/range/mojom",
]
-
- # TODO(https://crbug.com/968369): Change to use new names.
- use_old_js_lite_bindings_names = true
}
diff --git a/chromium/media/capture/video/chromeos/mojo/OWNERS b/chromium/media/capture/video/chromeos/mojom/OWNERS
index 08850f42120..08850f42120 100644
--- a/chromium/media/capture/video/chromeos/mojo/OWNERS
+++ b/chromium/media/capture/video/chromeos/mojom/OWNERS
diff --git a/chromium/media/capture/video/chromeos/mojo/camera3.mojom b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
index 3d103348f11..5092b16ae0f 100644
--- a/chromium/media/capture/video/chromeos/mojo/camera3.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera3.mojom
@@ -6,7 +6,7 @@
module cros.mojom;
-import "media/capture/video/chromeos/mojo/camera_metadata.mojom";
+import "media/capture/video/chromeos/mojom/camera_metadata.mojom";
// These usages flags are defined in gralloc.h. They determine the nature of
// the buffers allocated by gralloc. Read more on:
diff --git a/chromium/media/capture/video/chromeos/mojom/camera_app.mojom b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
new file mode 100644
index 00000000000..214d73c14e5
--- /dev/null
+++ b/chromium/media/capture/video/chromeos/mojom/camera_app.mojom
@@ -0,0 +1,135 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module cros.mojom;
+
+import "media/capture/mojom/image_capture.mojom";
+import "media/capture/video/chromeos/mojom/camera_common.mojom";
+import "media/capture/video/chromeos/mojom/camera_metadata.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
+import "ui/gfx/range/mojom/range.mojom";
+
+// Effect that recognized by Chrome OS.
+enum Effect {
+ NO_EFFECT = 0,
+ PORTRAIT_MODE = 1,
+};
+
+// Stream types that can be observed by camera app device.
+// This is a mirror of the StreamType in camera_device_delegate.h
+enum StreamType {
+ PREVIEW_OUTPUT = 0,
+ JPEG_OUTPUT = 1,
+ YUV_INPUT = 2,
+ YUV_OUTPUT = 3,
+ UNKNOWN = 4,
+};
+
+// Status code for getting camera app device.
+enum GetCameraAppDeviceStatus {
+ SUCCESS = 0,
+ ERROR_INVALID_ID = 1,
+};
+
+// The purpose of this capture is to help the camera device decide optimal
+// configurations.
+enum CaptureIntent {
+ DEFAULT = 0,
+ VIDEO_RECORD = 1,
+ STILL_CAPTURE = 2,
+};
+
+// Interface to let Chrome Camera App (Remote) get specific CameraAppDevice from
+// Chrome (Receiver).
+interface CameraAppDeviceProvider {
+ // Gets the interface to communicate with specific camera device given by
+ // |source_id|. If the |status| is not success, the |device| would be null.
+ GetCameraAppDevice(string source_id)
+ => (GetCameraAppDeviceStatus status,
+ pending_remote<CameraAppDevice>? device);
+
+ // Checks if the device supports direct communication between camera devices
+ // and camera app. Currently only devices running camera HAL v3 support this
+ // feature.
+ IsSupported() => (bool is_supported);
+};
+
+// Interface for communication between Chrome Camera App (Remote) and Chrome
+// (Receiver).
+interface CameraAppHelper {
+ // Invoked when the Android intent from ARC++ is fulfilled or is failed.
+ // For the intent which expects to have result, it is fulfilled when the
+ // captured is done and is failed if the session ends without finishing the
+ // capture. For the intent which don't expect any result, it is fulfilled when
+ // the camera app is successfully launched and is failed when the camera fails
+ // to launch. |intent_id| should be the same id that was specified in the
+ // query when launching the camera app. |is_success| indicates the result
+ // status of the intent. The |captured_data| will be delivered to the handler
+ // as a byte array.
+ OnIntentHandled(uint32 intent_id, bool is_success,
+ array<uint8> captured_data);
+};
+
+// Inner interface that used to communicate between browser process (Remote) and
+// the Video Capture service (Receiver).
+interface CameraAppDeviceBridge {
+ // Gets the interface to communicate with specific camera device given by
+ // |device_id|. If the |status| is not success, the |device| would be null.
+ GetCameraAppDevice(string device_id)
+ => (GetCameraAppDeviceStatus status,
+ pending_remote<CameraAppDevice>? device);
+
+ // Checks if the device supports direct communication between camera devices
+ // and camera app. Currently only devices running camera HAL v3 support this
+ // feature.
+ IsSupported() => (bool is_supported);
+};
+
+// Interface for communication between Chrome Camera App (Remote) and camera
+// device (Receiver).
+interface CameraAppDevice {
+ // Gets camera information |camera_info| which includes camera facing,
+ // characteristics, orientation, etc.
+ GetCameraInfo() => (CameraInfo camera_info);
+
+ // Sets reprocess option to bind with the coming take photo request. When this
+ // method is called, the reprocess option will be queued. All reprocess
+ // options in the queue will be consumed when ImageCapture::TakePhoto() is
+ // triggered and all the queued reprocess options will be bound
+ // to that take photo request.
+ SetReprocessOption(Effect effect)
+ => (int32 status, media.mojom.Blob? blob);
+
+ // Sets the fps range for upcoming configured camera stream.
+ // The caller sets the |fps_range| for target |resolution|.
+ // If the given fps range is valid and set successfully, |is_success| returns
+ // true. If the given fps range is invalid, the fps range which is cached
+ // previously will be cleared and |is_success| will return false.
+ SetFpsRange(gfx.mojom.Size resolution, gfx.mojom.Range fps_range)
+ => (bool is_success);
+
+ // Sets the intent for the upcoming capture session. The underlying video
+ // capture device should configure the streams accordingly. Returns an empty
+ // response after the intent is set, which could be used to sequence the
+ // other calls such as getUserMedia().
+ SetCaptureIntent(CaptureIntent intent) => ();
+
+ // Adds the remote of a ResultMetadataObserver to CameraAppDevice.
+ // The |observer| will have a remote call from camera device.
+ AddResultMetadataObserver(pending_remote<ResultMetadataObserver> observer,
+ StreamType stream_type)
+ => (uint32 id);
+
+ // Remove a ResultMetadataObserver by supplying its id returned by
+ // AddResultMetadataObserver.
+ // If the ResultMetadataObserver is found, |is_success| returns true.
+ RemoveResultMetadataObserver(uint32 id) => (bool is_success);
+};
+
+// Interface for camera device to send camera metadata to Chrome Camera App.
+interface ResultMetadataObserver {
+ // Remotely invoked by camera device whenever a |camera_metadata| of a frame
+ // is produced.
+ OnMetadataAvailable(CameraMetadata camera_metadata);
+};
diff --git a/chromium/media/capture/video/chromeos/mojo/camera_common.mojom b/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
index 65689d61c0f..fadf3937e31 100644
--- a/chromium/media/capture/video/chromeos/mojo/camera_common.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_common.mojom
@@ -6,8 +6,8 @@
module cros.mojom;
-import "media/capture/video/chromeos/mojo/camera3.mojom";
-import "media/capture/video/chromeos/mojo/camera_metadata.mojom";
+import "media/capture/video/chromeos/mojom/camera3.mojom";
+import "media/capture/video/chromeos/mojom/camera_metadata.mojom";
enum CameraFacing {
CAMERA_FACING_BACK = 0,
diff --git a/chromium/media/capture/video/chromeos/mojo/camera_metadata.mojom b/chromium/media/capture/video/chromeos/mojom/camera_metadata.mojom
index 976976065e1..269dcc9d2c5 100644
--- a/chromium/media/capture/video/chromeos/mojo/camera_metadata.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_metadata.mojom
@@ -4,7 +4,7 @@
module cros.mojom;
-import "media/capture/video/chromeos/mojo/camera_metadata_tags.mojom";
+import "media/capture/video/chromeos/mojom/camera_metadata_tags.mojom";
enum EntryType {
TYPE_BYTE = 0,
diff --git a/chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom b/chromium/media/capture/video/chromeos/mojom/camera_metadata_tags.mojom
index 82e1790123a..82e1790123a 100644
--- a/chromium/media/capture/video/chromeos/mojo/camera_metadata_tags.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/camera_metadata_tags.mojom
diff --git a/chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom b/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
index 1bcd8c97dde..be4da551bfb 100644
--- a/chromium/media/capture/video/chromeos/mojo/cros_camera_service.mojom
+++ b/chromium/media/capture/video/chromeos/mojom/cros_camera_service.mojom
@@ -8,7 +8,7 @@ module cros.mojom;
import "components/chromeos_camera/common/jpeg_encode_accelerator.mojom";
import "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom";
-import "media/capture/video/chromeos/mojo/camera_common.mojom";
+import "media/capture/video/chromeos/mojom/camera_common.mojom";
// The CrOS camera HAL v3 Mojo dispatcher. The dispatcher acts as a proxy and
// waits for the server and the clients to register. There can only be one
diff --git a/chromium/media/capture/video/chromeos/pixel_format_utils.h b/chromium/media/capture/video/chromeos/pixel_format_utils.h
index 0a40d2afa57..be43491a9b4 100644
--- a/chromium/media/capture/video/chromeos/pixel_format_utils.h
+++ b/chromium/media/capture/video/chromeos/pixel_format_utils.h
@@ -8,7 +8,7 @@
#include <vector>
#include "base/optional.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video_capture_types.h"
#include "ui/gfx/buffer_types.h"
diff --git a/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc b/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc
deleted file mode 100644
index db146bfb7cd..00000000000
--- a/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.cc
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/chromeos/renderer_facing_cros_image_capture.h"
-
-#include <errno.h>
-
-#include <memory>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "base/task/post_task.h"
-#include "media/base/bind_to_current_loop.h"
-#include "media/capture/mojom/image_capture.mojom.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
-
-namespace media {
-
-RendererFacingCrosImageCapture::RendererFacingCrosImageCapture(
- cros::mojom::CrosImageCapturePtr api_ptr,
- DeviceIdMappingCallback mapping_callback,
- IntentCallback intent_callback)
- : cros_image_capture_(std::move(api_ptr)),
- mapping_callback_(std::move(mapping_callback)),
- intent_callback_(std::move(intent_callback)),
- weak_ptr_factory_(this) {}
-
-RendererFacingCrosImageCapture::~RendererFacingCrosImageCapture() = default;
-
-void RendererFacingCrosImageCapture::GetCameraInfoWithRealId(
- GetCameraInfoCallback callback,
- const base::Optional<std::string>& device_id) {
- if (!device_id.has_value()) {
- std::move(callback).Run({});
- return;
- }
- cros_image_capture_->GetCameraInfo(*device_id, std::move(callback));
-}
-
-void RendererFacingCrosImageCapture::SetReprocessOptionWithRealId(
- cros::mojom::Effect effect,
- SetReprocessOptionCallback callback,
- const base::Optional<std::string>& device_id) {
- if (!device_id.has_value()) {
- std::move(callback).Run(-EINVAL, {});
- return;
- }
- cros_image_capture_->SetReprocessOption(*device_id, effect,
- std::move(callback));
-}
-
-void RendererFacingCrosImageCapture::SetFpsRangeWithRealId(
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_frame_rate,
- const int32_t max_frame_rate,
- SetFpsRangeCallback callback,
- const base::Optional<std::string>& device_id) {
- if (!device_id.has_value()) {
- std::move(callback).Run(false);
- return;
- }
- cros_image_capture_->SetFpsRange(*device_id, stream_width, stream_height,
- min_frame_rate, max_frame_rate,
- std::move(callback));
-}
-
-void RendererFacingCrosImageCapture::GetCameraInfo(
- const std::string& source_id,
- GetCameraInfoCallback callback) {
- mapping_callback_.Run(
- source_id, media::BindToCurrentLoop(base::BindOnce(
- &RendererFacingCrosImageCapture::GetCameraInfoWithRealId,
- weak_ptr_factory_.GetWeakPtr(), std::move(callback))));
-}
-
-void RendererFacingCrosImageCapture::SetReprocessOption(
- const std::string& source_id,
- cros::mojom::Effect effect,
- SetReprocessOptionCallback callback) {
- mapping_callback_.Run(
- source_id,
- media::BindToCurrentLoop(base::BindOnce(
- &RendererFacingCrosImageCapture::SetReprocessOptionWithRealId,
- weak_ptr_factory_.GetWeakPtr(), effect, std::move(callback))));
-}
-
-void RendererFacingCrosImageCapture::SetFpsRange(const std::string& source_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_frame_rate,
- const int32_t max_frame_rate,
- SetFpsRangeCallback callback) {
- mapping_callback_.Run(
- source_id,
- media::BindToCurrentLoop(base::BindOnce(
- &RendererFacingCrosImageCapture::SetFpsRangeWithRealId,
- weak_ptr_factory_.GetWeakPtr(), stream_width, stream_height,
- min_frame_rate, max_frame_rate, std::move(callback))));
-}
-
-void RendererFacingCrosImageCapture::OnIntentHandled(
- uint32_t intent_id,
- bool is_success,
- const std::vector<uint8_t>& captured_data) {
- intent_callback_.Run(intent_id, is_success, captured_data);
-}
-
-} // namespace media \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h b/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h
deleted file mode 100644
index 21f8e1019ce..00000000000
--- a/chromium/media/capture/video/chromeos/renderer_facing_cros_image_capture.h
+++ /dev/null
@@ -1,85 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_RENDERER_FACING_CROS_IMAGE_CAPTURE_H_
-#define MEDIA_CAPTURE_VIDEO_CHROMEOS_RENDERER_FACING_CROS_IMAGE_CAPTURE_H_
-
-#include <string>
-
-#include "media/capture/capture_export.h"
-#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
-#include "mojo/public/cpp/bindings/binding_set.h"
-
-namespace media {
-
-// Intermediate layer for communicating from renderer to CrosImageCapture
-// implementation. It will map the source id recognized by renderer to the
-// actual video device id.
-class CAPTURE_EXPORT RendererFacingCrosImageCapture
- : public cros::mojom::CrosImageCapture {
- public:
- using WithRealIdCallback =
- base::OnceCallback<void(const base::Optional<std::string>&)>;
- using DeviceIdMappingCallback =
- base::RepeatingCallback<void(const std::string&, WithRealIdCallback)>;
- using IntentCallback = base::RepeatingCallback<
- void(uint32_t, bool, const std::vector<uint8_t>&)>;
-
- // Create an intermediate layer between renderer to the actual
- // CrosImageCapture implementation. This class should use |api_ptr| to
- // communicate with the actual CrosImageCapture implementation and use
- // |mapping_callback| to map the device id for every calls that inputs device
- // id.
- RendererFacingCrosImageCapture(cros::mojom::CrosImageCapturePtr api_ptr,
- DeviceIdMappingCallback mapping_callback,
- IntentCallback intent_callback);
- ~RendererFacingCrosImageCapture() override;
-
- void GetCameraInfoWithRealId(GetCameraInfoCallback callback,
- const base::Optional<std::string>& device_id);
-
- void SetReprocessOptionWithRealId(
- cros::mojom::Effect effect,
- SetReprocessOptionCallback callback,
- const base::Optional<std::string>& device_id);
-
- void SetFpsRangeWithRealId(const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_frame_rate,
- const int32_t max_frame_rate,
- SetFpsRangeCallback callback,
- const base::Optional<std::string>& device_id);
-
- // cros::mojom::CrosImageCapture implementations.
- void GetCameraInfo(const std::string& source_id,
- GetCameraInfoCallback callback) override;
- void SetReprocessOption(const std::string& source_id,
- cros::mojom::Effect effect,
- SetReprocessOptionCallback callback) override;
- void SetFpsRange(const std::string& source_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_frame_rate,
- const int32_t max_frame_rate,
- SetFpsRangeCallback callback) override;
-
- void OnIntentHandled(uint32_t intent_id,
- bool is_success,
- const std::vector<uint8_t>& captured_data) override;
-
- private:
- cros::mojom::CrosImageCapturePtr cros_image_capture_;
-
- DeviceIdMappingCallback mapping_callback_;
-
- IntentCallback intent_callback_;
-
- base::WeakPtrFactory<RendererFacingCrosImageCapture> weak_ptr_factory_;
-
- DISALLOW_COPY_AND_ASSIGN(RendererFacingCrosImageCapture);
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_RENDERER_FACING_CROS_IMAGE_CAPTURE_H_ \ No newline at end of file
diff --git a/chromium/media/capture/video/chromeos/reprocess_manager.cc b/chromium/media/capture/video/chromeos/reprocess_manager.cc
deleted file mode 100644
index d2c97981317..00000000000
--- a/chromium/media/capture/video/chromeos/reprocess_manager.cc
+++ /dev/null
@@ -1,260 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/chromeos/reprocess_manager.h"
-
-#include <functional>
-#include <utility>
-
-#include "media/capture/video/chromeos/camera_metadata_utils.h"
-
-namespace media {
-
-namespace {
-
-void OnStillCaptureDone(media::mojom::ImageCapture::TakePhotoCallback callback,
- int status,
- mojom::BlobPtr blob) {
- std::move(callback).Run(std::move(blob));
-}
-
-} // namespace
-
-ReprocessTask::ReprocessTask() = default;
-
-ReprocessTask::ReprocessTask(ReprocessTask&& other)
- : effect(other.effect),
- callback(std::move(other.callback)),
- extra_metadata(std::move(other.extra_metadata)) {}
-
-ReprocessTask::~ReprocessTask() = default;
-
-// static
-int ReprocessManager::GetReprocessReturnCode(
- cros::mojom::Effect effect,
- const cros::mojom::CameraMetadataPtr* metadata) {
- if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
- auto* portrait_mode_segmentation_result = GetMetadataEntry(
- *metadata, static_cast<cros::mojom::CameraMetadataTag>(
- kPortraitModeSegmentationResultVendorKey));
- CHECK(portrait_mode_segmentation_result);
- return static_cast<int>((*portrait_mode_segmentation_result)->data[0]);
- }
- return kReprocessSuccess;
-}
-
-ReprocessManager::ReprocessManager(CameraInfoGetter get_camera_info)
- : sequenced_task_runner_(base::CreateSequencedTaskRunnerWithTraits(
- {base::TaskPriority::USER_VISIBLE})),
- impl(std::make_unique<ReprocessManager::ReprocessManagerImpl>(
- std::move(get_camera_info))) {}
-
-ReprocessManager::~ReprocessManager() {
- sequenced_task_runner_->DeleteSoon(FROM_HERE, std::move(impl));
-}
-
-void ReprocessManager::SetReprocessOption(
- const std::string& device_id,
- cros::mojom::Effect effect,
- cros::mojom::CrosImageCapture::SetReprocessOptionCallback
- reprocess_result_callback) {
- sequenced_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(
- &ReprocessManager::ReprocessManagerImpl::SetReprocessOption,
- base::Unretained(impl.get()), device_id, effect,
- std::move(reprocess_result_callback)));
-}
-
-void ReprocessManager::ConsumeReprocessOptions(
- const std::string& device_id,
- media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
- base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback) {
- sequenced_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(
- &ReprocessManager::ReprocessManagerImpl::ConsumeReprocessOptions,
- base::Unretained(impl.get()), device_id,
- std::move(take_photo_callback), std::move(consumption_callback)));
-}
-
-void ReprocessManager::Flush(const std::string& device_id) {
- sequenced_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&ReprocessManager::ReprocessManagerImpl::Flush,
- base::Unretained(impl.get()), device_id));
-}
-
-void ReprocessManager::GetCameraInfo(const std::string& device_id,
- GetCameraInfoCallback callback) {
- sequenced_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&ReprocessManager::ReprocessManagerImpl::GetCameraInfo,
- base::Unretained(impl.get()), device_id,
- std::move(callback)));
-}
-
-void ReprocessManager::SetFpsRange(
- const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_fps,
- const int32_t max_fps,
- cros::mojom::CrosImageCapture::SetFpsRangeCallback callback) {
- sequenced_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&ReprocessManager::ReprocessManagerImpl::SetFpsRange,
- base::Unretained(impl.get()), device_id, stream_width,
- stream_height, min_fps, max_fps, std::move(callback)));
-}
-
-void ReprocessManager::GetFpsRange(const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- GetFpsRangeCallback callback) {
- sequenced_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&ReprocessManager::ReprocessManagerImpl::GetFpsRange,
- base::Unretained(impl.get()), device_id, stream_width,
- stream_height, std::move(callback)));
-}
-
-ReprocessManager::ReprocessManagerImpl::ReprocessManagerImpl(
- CameraInfoGetter get_camera_info)
- : get_camera_info_(std::move(get_camera_info)) {}
-
-ReprocessManager::ReprocessManagerImpl::~ReprocessManagerImpl() = default;
-
-void ReprocessManager::ReprocessManagerImpl::SetReprocessOption(
- const std::string& device_id,
- cros::mojom::Effect effect,
- cros::mojom::CrosImageCapture::SetReprocessOptionCallback
- reprocess_result_callback) {
- ReprocessTask task;
- task.effect = effect;
- task.callback = std::move(reprocess_result_callback);
-
- if (effect == cros::mojom::Effect::PORTRAIT_MODE) {
- std::vector<uint8_t> portrait_mode_value(sizeof(int32_t));
- *reinterpret_cast<int32_t*>(portrait_mode_value.data()) = 1;
- cros::mojom::CameraMetadataEntryPtr e =
- cros::mojom::CameraMetadataEntry::New();
- e->tag =
- static_cast<cros::mojom::CameraMetadataTag>(kPortraitModeVendorKey);
- e->type = cros::mojom::EntryType::TYPE_INT32;
- e->count = 1;
- e->data = std::move(portrait_mode_value);
- task.extra_metadata.push_back(std::move(e));
- }
-
- reprocess_task_queue_map_[device_id].push(std::move(task));
-}
-
-void ReprocessManager::ReprocessManagerImpl::ConsumeReprocessOptions(
- const std::string& device_id,
- media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
- base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback) {
- ReprocessTaskQueue result_task_queue;
-
- ReprocessTask still_capture_task;
- still_capture_task.effect = cros::mojom::Effect::NO_EFFECT;
- still_capture_task.callback =
- base::BindOnce(&OnStillCaptureDone, std::move(take_photo_callback));
- result_task_queue.push(std::move(still_capture_task));
-
- auto& task_queue = reprocess_task_queue_map_[device_id];
- while (!task_queue.empty()) {
- result_task_queue.push(std::move(task_queue.front()));
- task_queue.pop();
- }
- std::move(consumption_callback).Run(std::move(result_task_queue));
-}
-
-void ReprocessManager::ReprocessManagerImpl::Flush(
- const std::string& device_id) {
- auto empty_queue = ReprocessTaskQueue();
- reprocess_task_queue_map_[device_id].swap(empty_queue);
-
- auto empty_map = ResolutionFpsRangeMap();
- resolution_fps_range_map_[device_id].swap(empty_map);
-}
-
-void ReprocessManager::ReprocessManagerImpl::GetCameraInfo(
- const std::string& device_id,
- GetCameraInfoCallback callback) {
- std::move(callback).Run(get_camera_info_.Run(device_id));
-}
-
-void ReprocessManager::ReprocessManagerImpl::SetFpsRange(
- const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_fps,
- const int32_t max_fps,
- cros::mojom::CrosImageCapture::SetFpsRangeCallback callback) {
- const int entry_length = 2;
-
- auto camera_info = get_camera_info_.Run(device_id);
- auto& static_metadata = camera_info->static_camera_characteristics;
- auto available_fps_range_entries = GetMetadataEntryAsSpan<int32_t>(
- static_metadata, cros::mojom::CameraMetadataTag::
- ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
- CHECK(available_fps_range_entries.size() % entry_length == 0);
-
- bool is_valid = false;
- for (size_t i = 0; i < available_fps_range_entries.size();
- i += entry_length) {
- if (available_fps_range_entries[i] == min_fps &&
- available_fps_range_entries[i + 1] == max_fps) {
- is_valid = true;
- break;
- }
- }
-
- auto resolution = gfx::Size(stream_width, stream_height);
- auto& fps_map = resolution_fps_range_map_[device_id];
- if (!is_valid) {
- // If the input range is invalid, we should still clear the cache range so
- // that it will fallback to use default fps range rather than the cache one.
- auto it = fps_map.find(resolution);
- if (it != fps_map.end()) {
- fps_map.erase(it);
- }
- std::move(callback).Run(false);
- return;
- }
-
- auto fps_range = gfx::Range(min_fps, max_fps);
- fps_map[resolution] = fps_range;
- std::move(callback).Run(true);
-}
-
-void ReprocessManager::ReprocessManagerImpl::GetFpsRange(
- const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- GetFpsRangeCallback callback) {
- if (resolution_fps_range_map_.find(device_id) ==
- resolution_fps_range_map_.end()) {
- std::move(callback).Run({});
- return;
- }
-
- auto resolution = gfx::Size(stream_width, stream_height);
- auto& fps_map = resolution_fps_range_map_[device_id];
- if (fps_map.find(resolution) == fps_map.end()) {
- std::move(callback).Run({});
- return;
- }
-
- std::move(callback).Run(fps_map[resolution]);
-}
-
-bool ReprocessManager::ReprocessManagerImpl::SizeComparator::operator()(
- const gfx::Size size_1,
- const gfx::Size size_2) const {
- return size_1.width() < size_2.width() || (size_1.width() == size_2.width() &&
- size_1.height() < size_2.height());
-}
-
-} // namespace media
diff --git a/chromium/media/capture/video/chromeos/reprocess_manager.h b/chromium/media/capture/video/chromeos/reprocess_manager.h
deleted file mode 100644
index ac2add5c23e..00000000000
--- a/chromium/media/capture/video/chromeos/reprocess_manager.h
+++ /dev/null
@@ -1,162 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_CHROMEOS_REPROCESS_MANAGER_H_
-#define MEDIA_CAPTURE_VIDEO_CHROMEOS_REPROCESS_MANAGER_H_
-
-#include <queue>
-#include <set>
-#include <string>
-#include <vector>
-
-#include "base/containers/flat_set.h"
-#include "base/sequenced_task_runner.h"
-#include "base/task/post_task.h"
-#include "media/capture/capture_export.h"
-#include "media/capture/mojom/image_capture.mojom.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
-#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
-#include "mojo/public/cpp/bindings/binding.h"
-#include "ui/gfx/geometry/size.h"
-#include "ui/gfx/range/range.h"
-
-namespace media {
-
-struct ReprocessTask {
- public:
- ReprocessTask();
- ReprocessTask(ReprocessTask&& other);
- ~ReprocessTask();
- cros::mojom::Effect effect;
- cros::mojom::CrosImageCapture::SetReprocessOptionCallback callback;
- std::vector<cros::mojom::CameraMetadataEntryPtr> extra_metadata;
-};
-
-using ReprocessTaskQueue = base::queue<ReprocessTask>;
-
-// TODO(shik): Get the keys from VendorTagOps by names instead (b/130774415).
-constexpr uint32_t kPortraitModeVendorKey = 0x80000000;
-constexpr uint32_t kPortraitModeSegmentationResultVendorKey = 0x80000001;
-constexpr int32_t kReprocessSuccess = 0;
-
-// ReprocessManager is used to communicate between the reprocess requester and
-// the consumer. When reprocess is requested, the reprocess information will be
-// wrapped as a ReprocessTask and stored in the queue. When consumption, all
-// ReprocessTask in the queue will be dumped and a default NO_EFFECT task will
-// be added on the top of the result queue. Note that all calls will be
-// sequentialize to a single sequence.
-class CAPTURE_EXPORT ReprocessManager {
- public:
- using CameraInfoGetter = base::RepeatingCallback<cros::mojom::CameraInfoPtr(
- const std::string& device_id)>;
- using GetCameraInfoCallback =
- base::OnceCallback<void(cros::mojom::CameraInfoPtr camera_info)>;
- using GetFpsRangeCallback =
- base::OnceCallback<void(base::Optional<gfx::Range>)>;
-
- class ReprocessManagerImpl {
- public:
- struct SizeComparator {
- bool operator()(const gfx::Size size_1, const gfx::Size size_2) const;
- };
-
- using ResolutionFpsRangeMap =
- base::flat_map<gfx::Size, gfx::Range, SizeComparator>;
-
- ReprocessManagerImpl(CameraInfoGetter get_camera_info);
- ~ReprocessManagerImpl();
-
- void SetReprocessOption(
- const std::string& device_id,
- cros::mojom::Effect effect,
- cros::mojom::CrosImageCapture::SetReprocessOptionCallback
- reprocess_result_callback);
-
- void ConsumeReprocessOptions(
- const std::string& device_id,
- media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
- base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback);
-
- void Flush(const std::string& device_id);
-
- void GetCameraInfo(const std::string& device_id,
- GetCameraInfoCallback callback);
-
- void SetFpsRange(
- const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_fps,
- const int32_t max_fps,
- cros::mojom::CrosImageCapture::SetFpsRangeCallback callback);
-
- void GetFpsRange(const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- GetFpsRangeCallback callback);
-
- private:
- base::flat_map<std::string, base::queue<ReprocessTask>>
- reprocess_task_queue_map_;
- base::flat_map<std::string, ResolutionFpsRangeMap>
- resolution_fps_range_map_;
-
- CameraInfoGetter get_camera_info_;
-
- DISALLOW_COPY_AND_ASSIGN(ReprocessManagerImpl);
- };
-
- static int GetReprocessReturnCode(
- cros::mojom::Effect effect,
- const cros::mojom::CameraMetadataPtr* metadata);
- ReprocessManager(CameraInfoGetter callback);
- ~ReprocessManager();
-
- // Sets the reprocess option for given device id and effect. Each reprocess
- // option has a corressponding callback.
- void SetReprocessOption(
- const std::string& device_id,
- cros::mojom::Effect effect,
- cros::mojom::CrosImageCapture::SetReprocessOptionCallback
- reprocess_result_callback);
-
- // Consumes all ReprocessTasks in the queue. A default NO_EFFECT task will be
- // added on the top of the result queue.
- void ConsumeReprocessOptions(
- const std::string& device_id,
- media::mojom::ImageCapture::TakePhotoCallback take_photo_callback,
- base::OnceCallback<void(ReprocessTaskQueue)> consumption_callback);
-
- // Clears all temporary queues and maps that is used for given device id.
- void Flush(const std::string& device_id);
-
- // Gets camera information for current active device.
- void GetCameraInfo(const std::string& device_id,
- GetCameraInfoCallback callback);
-
- // Sets fps range for given device.
- void SetFpsRange(const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- const int32_t min_fps,
- const int32_t max_fps,
- cros::mojom::CrosImageCapture::SetFpsRangeCallback callback);
-
- // Gets fps range for given device and resolution.
- void GetFpsRange(const std::string& device_id,
- const uint32_t stream_width,
- const uint32_t stream_height,
- GetFpsRangeCallback callback);
-
- private:
- scoped_refptr<base::SequencedTaskRunner> sequenced_task_runner_;
- std::unique_ptr<ReprocessManagerImpl> impl;
-
- DISALLOW_COPY_AND_ASSIGN(ReprocessManager);
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_CHROMEOS_REPROCESS_MANAGER_H_
diff --git a/chromium/media/capture/video/chromeos/request_builder.h b/chromium/media/capture/video/chromeos/request_builder.h
index 2127adba95b..a23ef6a1a05 100644
--- a/chromium/media/capture/video/chromeos/request_builder.h
+++ b/chromium/media/capture/video/chromeos/request_builder.h
@@ -11,7 +11,7 @@
#include "base/optional.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video_capture_types.h"
#include "mojo/public/cpp/bindings/binding.h"
diff --git a/chromium/media/capture/video/chromeos/request_manager.cc b/chromium/media/capture/video/chromeos/request_manager.cc
index d0fe84fe877..8579e40f083 100644
--- a/chromium/media/capture/video/chromeos/request_manager.cc
+++ b/chromium/media/capture/video/chromeos/request_manager.cc
@@ -20,7 +20,6 @@
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
-#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
#include "mojo/public/cpp/platform/platform_handle.h"
#include "mojo/public/cpp/system/platform_handle.h"
@@ -41,7 +40,8 @@ RequestManager::RequestManager(
VideoCaptureBufferType buffer_type,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner)
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
+ CameraAppDeviceImpl* camera_app_device)
: callback_ops_(this, std::move(callback_ops_request)),
capture_interface_(std::move(capture_interface)),
device_context_(device_context),
@@ -56,7 +56,7 @@ RequestManager::RequestManager(
capturing_(false),
partial_result_count_(1),
first_frame_shutter_time_(base::TimeTicks()),
- weak_ptr_factory_(this) {
+ camera_app_device_(std::move(camera_app_device)) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(callback_ops_.is_bound());
DCHECK(device_context_);
@@ -89,6 +89,13 @@ void RequestManager::SetUpStreamsAndBuffers(
*reinterpret_cast<int32_t*>((*partial_count)->data.data());
}
+ auto pipeline_depth = GetMetadataEntryAsSpan<uint8_t>(
+ static_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
+ CHECK_EQ(pipeline_depth.size(), 1u);
+ pipeline_depth_ = pipeline_depth[0];
+ preview_buffers_queued_ = 0;
+
// Set the last received frame number for each stream types to be undefined.
for (const auto& stream : streams) {
StreamType stream_type = StreamIdToStreamType(stream->id);
@@ -96,7 +103,7 @@ void RequestManager::SetUpStreamsAndBuffers(
}
stream_buffer_manager_->SetUpStreamsAndBuffers(
- capture_format, std::move(static_metadata), std::move(streams));
+ capture_format, static_metadata, std::move(streams));
}
cros::mojom::Camera3StreamPtr RequestManager::GetStreamConfiguration(
@@ -294,11 +301,18 @@ void RequestManager::PrepareCaptureRequest() {
}
if (!is_reprocess_request && !is_oneshot_request && !is_preview_request) {
+ // We have to keep the pipeline full.
+ if (preview_buffers_queued_ < pipeline_depth_) {
+ ipc_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&RequestManager::PrepareCaptureRequest, GetWeakPtr()));
+ }
return;
}
auto capture_request = request_builder_->BuildRequest(
std::move(stream_types), std::move(settings), input_buffer_id);
+ CHECK_GT(capture_request->output_buffers.size(), 0u);
CaptureResult& pending_result =
pending_results_[capture_request->frame_number];
@@ -318,6 +332,10 @@ void RequestManager::PrepareCaptureRequest() {
pending_reprocess_tasks_queue_.pop();
}
+ if (is_preview_request) {
+ ++preview_buffers_queued_;
+ }
+
UpdateCaptureSettings(&capture_request->settings);
capture_interface_->ProcessCaptureRequest(
std::move(capture_request),
@@ -375,9 +393,19 @@ bool RequestManager::TryPrepareReprocessRequest(
bool RequestManager::TryPreparePreviewRequest(
std::set<StreamType>* stream_types,
cros::mojom::CameraMetadataPtr* settings) {
- if (!stream_buffer_manager_->HasFreeBuffers({StreamType::kPreviewOutput})) {
+ if (preview_buffers_queued_ == pipeline_depth_) {
return false;
}
+ if (!stream_buffer_manager_->HasFreeBuffers({StreamType::kPreviewOutput})) {
+ // Try our best to reserve an usable buffer. If the reservation still
+ // fails, then we'd have to drop the camera frame.
+ DLOG(WARNING) << "Late request for reserving preview buffer";
+ stream_buffer_manager_->ReserveBuffer(StreamType::kPreviewOutput);
+ if (!stream_buffer_manager_->HasFreeBuffers({StreamType::kPreviewOutput})) {
+ DLOG(WARNING) << "No free buffer for preview stream";
+ return false;
+ }
+ }
stream_types->insert({StreamType::kPreviewOutput});
*settings = repeating_request_settings_.Clone();
@@ -721,6 +749,12 @@ void RequestManager::SubmitCaptureResult(
observer->OnResultMetadataAvailable(pending_result.metadata);
}
+ if (camera_app_device_) {
+ camera_app_device_->OnResultMetadataAvailable(
+ pending_result.metadata,
+ static_cast<cros::mojom::StreamType>(stream_type));
+ }
+
// Wait on release fence before delivering the result buffer to client.
if (stream_buffer->release_fence.is_valid()) {
const int kSyncWaitTimeoutMs = 1000;
@@ -767,6 +801,11 @@ void RequestManager::SubmitCaptureResult(
stream_buffer_manager_->ReleaseBufferFromCaptureResult(stream_type,
buffer_ipc_id);
}
+
+ if (stream_type == StreamType::kPreviewOutput) {
+ --preview_buffers_queued_;
+ }
+
pending_result.unsubmitted_buffer_count--;
if (pending_result.unsubmitted_buffer_count == 0) {
@@ -844,7 +883,7 @@ void RequestManager::SubmitCapturedJpegBuffer(uint32_t frame_number,
if (blob) {
int task_status = kReprocessSuccess;
if (stream_buffer_manager_->IsReprocessSupported()) {
- task_status = ReprocessManager::GetReprocessReturnCode(
+ task_status = CameraAppDeviceImpl::GetReprocessReturnCode(
pending_result.reprocess_effect, &pending_result.metadata);
}
std::move(pending_result.still_capture_callback)
diff --git a/chromium/media/capture/video/chromeos/request_manager.h b/chromium/media/capture/video/chromeos/request_manager.h
index 7a14a20427a..6758e4735ba 100644
--- a/chromium/media/capture/video/chromeos/request_manager.h
+++ b/chromium/media/capture/video/chromeos/request_manager.h
@@ -15,9 +15,10 @@
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "media/capture/mojom/image_capture.mojom.h"
+#include "media/capture/video/chromeos/camera_app_device_impl.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_app.mojom.h"
#include "media/capture/video/chromeos/request_builder.h"
#include "media/capture/video/chromeos/stream_buffer_manager.h"
#include "media/capture/video_capture_types.h"
@@ -125,7 +126,8 @@ class CAPTURE_EXPORT RequestManager final
VideoCaptureBufferType buffer_type,
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
- scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner);
+ scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
+ CameraAppDeviceImpl* camera_app_device);
~RequestManager() override;
// Sets up the stream context and allocate buffers according to the
@@ -302,6 +304,15 @@ class CAPTURE_EXPORT RequestManager final
// shot.
uint32_t partial_result_count_;
+ // The pipeline depth reported in the ANDROID_REQUEST_PIPELINE_MAX_DEPTH
+ // metadata.
+ size_t pipeline_depth_;
+
+ // The number of preview buffers queued to the camera service. The request
+ // manager needs to try its best to queue |pipeline_depth_| preview buffers to
+ // avoid camera frame drops.
+ size_t preview_buffers_queued_;
+
// The shutter time of the first frame. We derive the |timestamp| of a
// frame using the difference between the frame's shutter time and
// |first_frame_shutter_time_|.
@@ -360,7 +371,9 @@ class CAPTURE_EXPORT RequestManager final
// duplicate or out of order of frames.
std::map<StreamType, uint32_t> last_received_frame_number_map_;
- base::WeakPtrFactory<RequestManager> weak_ptr_factory_;
+ CameraAppDeviceImpl* camera_app_device_; // Weak.
+
+ base::WeakPtrFactory<RequestManager> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(RequestManager);
};
diff --git a/chromium/media/capture/video/chromeos/request_manager_unittest.cc b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
index f70dbd7feee..cfa1f85be2a 100644
--- a/chromium/media/capture/video/chromeos/request_manager_unittest.cc
+++ b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
@@ -11,7 +11,7 @@
#include "base/bind.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/capture/video/blob_utils.h"
@@ -19,7 +19,6 @@
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
#include "media/capture/video/chromeos/mock_video_capture_client.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
#include "media/capture/video/chromeos/stream_buffer_manager.h"
#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -99,7 +98,7 @@ class RequestManagerTest : public ::testing::Test {
[](const uint8_t* buffer, const uint32_t bytesused,
const VideoCaptureFormat& capture_format,
const int rotation) { return mojom::Blob::New(); }),
- base::ThreadTaskRunnerHandle::Get());
+ base::ThreadTaskRunnerHandle::Get(), nullptr);
}
void TearDown() override { request_manager_.reset(); }
@@ -146,6 +145,17 @@ class RequestManagerTest : public ::testing::Test {
entry->data.assign(as_int8, as_int8 + entry->count * sizeof(int32_t));
static_metadata->entries->push_back(std::move(entry));
+ entry = cros::mojom::CameraMetadataEntry::New();
+ entry->index = 2;
+ entry->tag =
+ cros::mojom::CameraMetadataTag::ANDROID_REQUEST_PIPELINE_MAX_DEPTH;
+ entry->type = cros::mojom::EntryType::TYPE_BYTE;
+ entry->count = 1;
+ uint8_t pipeline_max_depth = 1;
+ entry->data.assign(&pipeline_max_depth,
+ &pipeline_max_depth + entry->count * sizeof(uint8_t));
+ static_metadata->entries->push_back(std::move(entry));
+
return static_metadata;
}
@@ -270,7 +280,7 @@ class RequestManagerTest : public ::testing::Test {
private:
std::unique_ptr<base::RunLoop> run_loop_;
bool quit_;
- base::test::ScopedTaskEnvironment scoped_test_environment_;
+ base::test::TaskEnvironment scoped_test_environment_;
};
// A basic sanity test to capture one frame with the capture loop.
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index f6dfdec8b03..261c335797c 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -29,8 +29,7 @@ StreamBufferManager::StreamBufferManager(
std::unique_ptr<CameraBufferFactory> camera_buffer_factory)
: device_context_(device_context),
video_capture_use_gmb_(video_capture_use_gmb),
- camera_buffer_factory_(std::move(camera_buffer_factory)),
- weak_ptr_factory_(this) {
+ camera_buffer_factory_(std::move(camera_buffer_factory)) {
if (video_capture_use_gmb_) {
gmb_support_ = std::make_unique<gpu::GpuMemoryBufferSupport>();
}
@@ -175,6 +174,8 @@ void StreamBufferManager::SetUpStreamsAndBuffers(
++j) {
ReserveBuffer(stream_type);
}
+ CHECK_EQ(stream_context_[stream_type]->free_buffers.size(),
+ stream_context_[stream_type]->stream->max_buffers);
DVLOG(2) << "Allocated "
<< stream_context_[stream_type]->stream->max_buffers << " buffers";
}
@@ -317,10 +318,7 @@ void StreamBufferManager::ReserveBufferFromPool(StreamType stream_type) {
if (!device_context_->ReserveVideoCaptureBufferFromPool(
stream_context->buffer_dimension,
stream_context->capture_format.pixel_format, &vcd_buffer)) {
- device_context_->SetErrorState(
- media::VideoCaptureError::
- kCrosHalV3BufferManagerFailedToCreateGpuMemoryBuffer,
- FROM_HERE, "Failed to reserve video capture buffer");
+ DLOG(WARNING) << "Failed to reserve video capture buffer";
return;
}
auto gmb = gmb_support_->CreateGpuMemoryBufferImplFromHandle(
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.h b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
index 5aea0563840..c940509006f 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
@@ -19,7 +19,7 @@
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
-#include "media/capture/video/chromeos/mojo/camera3.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video_capture_types.h"
#include "mojo/public/cpp/bindings/binding.h"
@@ -152,7 +152,7 @@ class CAPTURE_EXPORT StreamBufferManager final {
std::unique_ptr<CameraBufferFactory> camera_buffer_factory_;
- base::WeakPtrFactory<StreamBufferManager> weak_ptr_factory_;
+ base::WeakPtrFactory<StreamBufferManager> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(StreamBufferManager);
};
diff --git a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
index c343b6c656c..3920435bfdd 100644
--- a/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
+++ b/chromium/media/capture/video/chromeos/vendor_tag_ops_delegate.h
@@ -9,7 +9,7 @@
#include <string>
#include <vector>
-#include "media/capture/video/chromeos/mojo/camera_common.mojom.h"
+#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
namespace media {
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
index 12ba25ddbe3..70ae8f6f13a 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
@@ -19,7 +19,6 @@
#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
#include "ui/display/display.h"
#include "ui/display/display_observer.h"
#include "ui/display/screen.h"
@@ -105,7 +104,8 @@ VideoCaptureDeviceChromeOSHalv3::VideoCaptureDeviceChromeOSHalv3(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
const VideoCaptureDeviceDescriptor& device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- ReprocessManager* reprocess_manager)
+ CameraAppDeviceImpl* camera_app_device,
+ base::OnceClosure cleanup_callback)
: device_descriptor_(device_descriptor),
camera_hal_delegate_(std::move(camera_hal_delegate)),
capture_task_runner_(base::ThreadTaskRunnerHandle::Get()),
@@ -119,10 +119,10 @@ VideoCaptureDeviceChromeOSHalv3::VideoCaptureDeviceChromeOSHalv3(
rotates_with_device_(lens_facing_ !=
VideoFacingMode::MEDIA_VIDEO_FACING_NONE),
rotation_(0),
- reprocess_manager_(reprocess_manager),
+ camera_app_device_(camera_app_device),
+ cleanup_callback_(std::move(cleanup_callback)),
power_manager_client_proxy_(
- base::MakeRefCounted<PowerManagerClientProxy>()),
- weak_ptr_factory_(this) {
+ base::MakeRefCounted<PowerManagerClientProxy>()) {
power_manager_client_proxy_->Init(weak_ptr_factory_.GetWeakPtr(),
capture_task_runner_,
std::move(ui_task_runner));
@@ -133,6 +133,7 @@ VideoCaptureDeviceChromeOSHalv3::~VideoCaptureDeviceChromeOSHalv3() {
DCHECK(!camera_device_ipc_thread_.IsRunning());
screen_observer_delegate_->RemoveObserver();
power_manager_client_proxy_->Shutdown();
+ std::move(cleanup_callback_).Run();
}
// VideoCaptureDevice implementation.
@@ -152,9 +153,10 @@ void VideoCaptureDeviceChromeOSHalv3::AllocateAndStart(
}
capture_params_ = params;
device_context_ = std::make_unique<CameraDeviceContext>(std::move(client));
+
camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
device_descriptor_, camera_hal_delegate_,
- camera_device_ipc_thread_.task_runner(), reprocess_manager_);
+ camera_device_ipc_thread_.task_runner(), camera_app_device_);
OpenDevice();
}
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
index e0214aac0cc..2e9fea397ac 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
@@ -24,10 +24,10 @@ class Display;
namespace media {
+class CameraAppDeviceImpl;
class CameraHalDelegate;
class CameraDeviceContext;
class CameraDeviceDelegate;
-class ReprocessManager;
// Implementation of VideoCaptureDevice for ChromeOS with CrOS camera HALv3.
class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
@@ -38,7 +38,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
const VideoCaptureDeviceDescriptor& device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
- ReprocessManager* reprocess_manager);
+ CameraAppDeviceImpl* camera_app_device,
+ base::OnceClosure cleanup_callback);
~VideoCaptureDeviceChromeOSHalv3() final;
@@ -95,11 +96,13 @@ class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
const bool rotates_with_device_;
int rotation_;
- ReprocessManager* reprocess_manager_; // weak
+ CameraAppDeviceImpl* camera_app_device_; // Weak.
+
+ base::OnceClosure cleanup_callback_;
scoped_refptr<PowerManagerClientProxy> power_manager_client_proxy_;
- base::WeakPtrFactory<VideoCaptureDeviceChromeOSHalv3> weak_ptr_factory_;
+ base::WeakPtrFactory<VideoCaptureDeviceChromeOSHalv3> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceChromeOSHalv3);
};
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
index 6a4fc145c51..da4a3c5914c 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.cc
@@ -8,9 +8,8 @@
#include "base/memory/ptr_util.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
-#include "media/capture/video/chromeos/cros_image_capture_impl.h"
-#include "media/capture/video/chromeos/reprocess_manager.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
namespace media {
@@ -22,19 +21,17 @@ gpu::GpuMemoryBufferManager* g_gpu_buffer_manager = nullptr;
} // namespace
VideoCaptureDeviceFactoryChromeOS::VideoCaptureDeviceFactoryChromeOS(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer)
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_screen_observer,
+ CameraAppDeviceBridgeImpl* camera_app_device_bridge)
: task_runner_for_screen_observer_(task_runner_for_screen_observer),
camera_hal_ipc_thread_("CameraHalIpcThread"),
- initialized_(Init()),
- weak_ptr_factory_(this) {
- auto get_camera_info =
- base::BindRepeating(&VideoCaptureDeviceFactoryChromeOS::GetCameraInfo,
- base::Unretained(this));
- reprocess_manager_ =
- std::make_unique<ReprocessManager>(std::move(get_camera_info));
-}
+ camera_app_device_bridge_(camera_app_device_bridge),
+ initialized_(Init()) {}
VideoCaptureDeviceFactoryChromeOS::~VideoCaptureDeviceFactoryChromeOS() {
+ if (camera_app_device_bridge_) {
+ camera_app_device_bridge_->UnsetCameraInfoGetter();
+ }
camera_hal_delegate_->Reset();
camera_hal_ipc_thread_.Stop();
}
@@ -48,7 +45,7 @@ VideoCaptureDeviceFactoryChromeOS::CreateDevice(
}
return camera_hal_delegate_->CreateDevice(task_runner_for_screen_observer_,
device_descriptor,
- reprocess_manager_.get());
+ camera_app_device_bridge_);
}
void VideoCaptureDeviceFactoryChromeOS::GetSupportedFormats(
@@ -94,22 +91,20 @@ bool VideoCaptureDeviceFactoryChromeOS::Init() {
camera_hal_delegate_ =
new CameraHalDelegate(camera_hal_ipc_thread_.task_runner());
camera_hal_delegate_->RegisterCameraClient();
- return true;
-}
-cros::mojom::CameraInfoPtr VideoCaptureDeviceFactoryChromeOS::GetCameraInfo(
- const std::string& device_id) {
- if (!initialized_) {
- return {};
+ // Since the |camera_hal_delegate_| is initialized on the constructor of this
+ // object and is destroyed after |camera_app_device_bridge_| unsetting its
+ // reference, it is safe to use base::Unretained() here.
+ if (camera_app_device_bridge_) {
+ camera_app_device_bridge_->SetCameraInfoGetter(
+ base::BindRepeating(&CameraHalDelegate::GetCameraInfoFromDeviceId,
+ base::Unretained(camera_hal_delegate_.get())));
}
- return camera_hal_delegate_->GetCameraInfoFromDeviceId(device_id);
+ return true;
}
-void VideoCaptureDeviceFactoryChromeOS::BindCrosImageCaptureRequest(
- cros::mojom::CrosImageCaptureRequest request) {
- mojo::MakeStrongBinding(
- std::make_unique<CrosImageCaptureImpl>(reprocess_manager_.get()),
- std::move(request));
+bool VideoCaptureDeviceFactoryChromeOS::IsSupportedCameraAppDeviceBridge() {
+ return true;
}
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
index cee840f8fdf..62fa4b5599e 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_factory_chromeos.h
@@ -11,22 +11,22 @@
#include "base/single_thread_task_runner.h"
#include "components/chromeos_camera/common/mjpeg_decode_accelerator.mojom.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
-#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
#include "media/capture/video/video_capture_device_factory.h"
namespace media {
+class CameraAppDeviceBridgeImpl;
+
using MojoMjpegDecodeAcceleratorFactoryCB = base::RepeatingCallback<void(
chromeos_camera::mojom::MjpegDecodeAcceleratorRequest)>;
-class ReprocessManager;
-
class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
: public VideoCaptureDeviceFactory {
public:
explicit VideoCaptureDeviceFactoryChromeOS(
scoped_refptr<base::SingleThreadTaskRunner>
- task_runner_for_screen_observer);
+ task_runner_for_screen_observer,
+ CameraAppDeviceBridgeImpl* camera_app_device_bridge);
~VideoCaptureDeviceFactoryChromeOS() override;
@@ -39,21 +39,16 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
void GetDeviceDescriptors(
VideoCaptureDeviceDescriptors* device_descriptors) final;
+ bool IsSupportedCameraAppDeviceBridge() override;
+
static gpu::GpuMemoryBufferManager* GetBufferManager();
static void SetGpuBufferManager(gpu::GpuMemoryBufferManager* buffer_manager);
- void BindCrosImageCaptureRequest(
- cros::mojom::CrosImageCaptureRequest request);
-
private:
// Initializes the factory. The factory is functional only after this call
// succeeds.
bool Init();
- // Gets camera info for the given |device_id|. Returns null CameraInfoPtr on
- // error.
- cros::mojom::CameraInfoPtr GetCameraInfo(const std::string& device_id);
-
const scoped_refptr<base::SingleThreadTaskRunner>
task_runner_for_screen_observer_;
@@ -67,11 +62,12 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryChromeOS final
// |camera_hal_ipc_thread_|.
scoped_refptr<CameraHalDelegate> camera_hal_delegate_;
- std::unique_ptr<ReprocessManager> reprocess_manager_;
+ CameraAppDeviceBridgeImpl* camera_app_device_bridge_; // Weak.
bool initialized_;
- base::WeakPtrFactory<VideoCaptureDeviceFactoryChromeOS> weak_ptr_factory_;
+ base::WeakPtrFactory<VideoCaptureDeviceFactoryChromeOS> weak_ptr_factory_{
+ this};
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryChromeOS);
};
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
index 91f3d014750..798cccf1270 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
@@ -5,6 +5,7 @@
#include "media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h"
#include "base/bind.h"
+#include "base/bind_helpers.h"
#include "base/metrics/histogram_macros.h"
#include "components/chromeos_camera/mojo_mjpeg_decode_accelerator.h"
#include "media/base/media_switches.h"
@@ -21,11 +22,9 @@ VideoCaptureJpegDecoderImpl::VideoCaptureJpegDecoderImpl(
decode_done_cb_(std::move(decode_done_cb)),
send_log_message_cb_(std::move(send_log_message_cb)),
has_received_decoded_frame_(false),
- next_bitstream_buffer_id_(0),
- in_buffer_id_(
- chromeos_camera::MjpegDecodeAccelerator::kInvalidBitstreamBufferId),
- decoder_status_(INIT_PENDING),
- weak_ptr_factory_(this) {}
+ next_task_id_(0),
+ task_id_(chromeos_camera::MjpegDecodeAccelerator::kInvalidTaskId),
+ decoder_status_(INIT_PENDING) {}
VideoCaptureJpegDecoderImpl::~VideoCaptureJpegDecoderImpl() {
DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
@@ -59,7 +58,7 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
DCHECK(decoder_);
TRACE_EVENT_ASYNC_BEGIN0("jpeg", "VideoCaptureJpegDecoderImpl decoding",
- next_bitstream_buffer_id_);
+ next_task_id_);
TRACE_EVENT0("jpeg", "VideoCaptureJpegDecoderImpl::DecodeCapturedData");
// TODO(kcwu): enqueue decode requests in case decoding is not fast enough
@@ -73,55 +72,61 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
}
// Enlarge input buffer if necessary.
- if (!in_shared_memory_.get() ||
- in_buffer_size > in_shared_memory_->mapped_size()) {
+ if (!in_shared_region_.IsValid() || !in_shared_mapping_.IsValid() ||
+ in_buffer_size > in_shared_mapping_.size()) {
// Reserve 2x space to avoid frequent reallocations for initial frames.
const size_t reserved_size = 2 * in_buffer_size;
- in_shared_memory_.reset(new base::SharedMemory);
- if (!in_shared_memory_->CreateAndMapAnonymous(reserved_size)) {
+ in_shared_region_ = base::UnsafeSharedMemoryRegion::Create(reserved_size);
+ if (!in_shared_region_.IsValid()) {
base::AutoLock lock(lock_);
decoder_status_ = FAILED;
- LOG(WARNING) << "CreateAndMapAnonymous failed, size=" << reserved_size;
+ LOG(WARNING) << "UnsafeSharedMemoryRegion::Create failed, size="
+ << reserved_size;
+ return;
+ }
+ in_shared_mapping_ = in_shared_region_.Map();
+ if (!in_shared_mapping_.IsValid()) {
+ base::AutoLock lock(lock_);
+ decoder_status_ = FAILED;
+ LOG(WARNING) << "UnsafeSharedMemoryRegion::Map failed, size="
+ << reserved_size;
return;
}
}
- memcpy(in_shared_memory_->memory(), data, in_buffer_size);
+ memcpy(in_shared_mapping_.memory(), data, in_buffer_size);
- // No need to lock for |in_buffer_id_| since IsDecoding_Locked() is false.
- in_buffer_id_ = next_bitstream_buffer_id_;
- media::BitstreamBuffer in_buffer(in_buffer_id_, in_shared_memory_->handle(),
- false /* read_only */, in_buffer_size);
+ // No need to lock for |task_id_| since IsDecoding_Locked() is false.
+ task_id_ = next_task_id_;
+ media::BitstreamBuffer in_buffer(task_id_, in_shared_region_.Duplicate(),
+ in_buffer_size);
// Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
- next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
+ next_task_id_ = (next_task_id_ + 1) & 0x3FFFFFFF;
// The API of |decoder_| requires us to wrap the |out_buffer| in a VideoFrame.
const gfx::Size dimensions = frame_format.frame_size;
- std::unique_ptr<media::VideoCaptureBufferHandle> out_buffer_access =
- out_buffer.handle_provider->GetHandleForInProcessAccess();
- base::SharedMemoryHandle out_handle =
- out_buffer.handle_provider->GetNonOwnedSharedMemoryHandleForLegacyIPC();
+ base::UnsafeSharedMemoryRegion out_region =
+ out_buffer.handle_provider->DuplicateAsUnsafeRegion();
+ DCHECK(out_region.IsValid());
+ base::WritableSharedMemoryMapping out_mapping = out_region.Map();
+ DCHECK(out_mapping.IsValid());
scoped_refptr<media::VideoFrame> out_frame =
- media::VideoFrame::WrapExternalSharedMemory(
- media::PIXEL_FORMAT_I420, // format
- dimensions, // coded_size
- gfx::Rect(dimensions), // visible_rect
- dimensions, // natural_size
- out_buffer_access->data(), // data
- out_buffer_access->mapped_size(), // data_size
- out_handle, // handle
- 0, // shared_memory_offset
- timestamp); // timestamp
+ media::VideoFrame::WrapExternalData(
+ media::PIXEL_FORMAT_I420, // format
+ dimensions, // coded_size
+ gfx::Rect(dimensions), // visible_rect
+ dimensions, // natural_size
+ out_mapping.GetMemoryAsSpan<uint8_t>().data(), // data
+ out_mapping.size(), // data_size
+ timestamp); // timestamp
if (!out_frame) {
base::AutoLock lock(lock_);
decoder_status_ = FAILED;
LOG(ERROR) << "DecodeCapturedData: WrapExternalSharedMemory failed";
return;
}
- // Hold onto the buffer access handle for the lifetime of the VideoFrame, to
- // ensure the data pointers remain valid.
- out_frame->AddDestructionObserver(base::BindOnce(
- [](std::unique_ptr<media::VideoCaptureBufferHandle> handle) {},
- std::move(out_buffer_access)));
+ out_frame->BackWithOwnedSharedMemory(std::move(out_region),
+ std::move(out_mapping));
+
out_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE,
frame_format.frame_rate);
@@ -149,12 +154,16 @@ void VideoCaptureJpegDecoderImpl::DecodeCapturedData(
// |decoder_task_runner_|.
decoder_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(&chromeos_camera::MjpegDecodeAccelerator::Decode,
- base::Unretained(decoder_.get()), std::move(in_buffer),
- std::move(out_frame)));
+ base::BindOnce(
+ [](chromeos_camera::MjpegDecodeAccelerator* decoder,
+ BitstreamBuffer in_buffer, scoped_refptr<VideoFrame> out_frame) {
+ decoder->Decode(std::move(in_buffer), std::move(out_frame));
+ },
+ base::Unretained(decoder_.get()), std::move(in_buffer),
+ std::move(out_frame)));
}
-void VideoCaptureJpegDecoderImpl::VideoFrameReady(int32_t bitstream_buffer_id) {
+void VideoCaptureJpegDecoderImpl::VideoFrameReady(int32_t task_id) {
DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
TRACE_EVENT0("jpeg", "VideoCaptureJpegDecoderImpl::VideoFrameReady");
if (!has_received_decoded_frame_) {
@@ -168,26 +177,23 @@ void VideoCaptureJpegDecoderImpl::VideoFrameReady(int32_t bitstream_buffer_id) {
return;
}
- if (bitstream_buffer_id != in_buffer_id_) {
- LOG(ERROR) << "Unexpected bitstream_buffer_id " << bitstream_buffer_id
- << ", expected " << in_buffer_id_;
+ if (task_id != task_id_) {
+ LOG(ERROR) << "Unexpected task_id " << task_id << ", expected " << task_id_;
return;
}
- in_buffer_id_ =
- chromeos_camera::MjpegDecodeAccelerator::kInvalidBitstreamBufferId;
+ task_id_ = chromeos_camera::MjpegDecodeAccelerator::kInvalidTaskId;
std::move(decode_done_closure_).Run();
TRACE_EVENT_ASYNC_END0("jpeg", "VideoCaptureJpegDecoderImpl decoding",
- bitstream_buffer_id);
+ task_id);
}
void VideoCaptureJpegDecoderImpl::NotifyError(
- int32_t bitstream_buffer_id,
+ int32_t task_id,
chromeos_camera::MjpegDecodeAccelerator::Error error) {
DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
- LOG(ERROR) << "Decode error, bitstream_buffer_id=" << bitstream_buffer_id
- << ", error=" << error;
+ LOG(ERROR) << "Decode error, task_id=" << task_id << ", error=" << error;
send_log_message_cb_.Run("Gpu Jpeg decoder failed");
base::AutoLock lock(lock_);
decode_done_closure_.Reset();
@@ -235,11 +241,4 @@ void VideoCaptureJpegDecoderImpl::RecordInitDecodeUMA_Locked() {
decoder_status_ == INIT_PASSED);
}
-void VideoCaptureJpegDecoderImpl::DestroyDecoderOnIOThread(
- base::WaitableEvent* event) {
- DCHECK(decoder_task_runner_->RunsTasksInCurrentSequence());
- decoder_.reset();
- event->Signal();
-}
-
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
index 86b14dfb293..7bb0296b25a 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.h
@@ -13,6 +13,7 @@
#include "base/callback.h"
#include "base/macros.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/memory/weak_ptr.h"
#include "base/sequence_checker.h"
#include "components/chromeos_camera/mojo_mjpeg_decode_accelerator.h"
@@ -26,7 +27,7 @@ namespace media {
// Implementation of media::VideoCaptureJpegDecoder that delegates to a
// chromeos_camera::mojom::MjpegDecodeAccelerator. When a frame is received in
-// DecodeCapturedData(), it is copied to |in_shared_memory| for IPC transport
+// DecodeCapturedData(), it is copied to |in_shared_region_| for IPC transport
// to |decoder_|. When the decoder is finished with the frame, |decode_done_cb_|
// is invoked. Until |decode_done_cb_| is invoked, subsequent calls to
// DecodeCapturedData() are ignored.
@@ -93,20 +94,21 @@ class CAPTURE_EXPORT VideoCaptureJpegDecoderImpl
base::OnceClosure decode_done_closure_;
// Next id for input BitstreamBuffer.
- int32_t next_bitstream_buffer_id_;
+ int32_t next_task_id_;
// The id for current input BitstreamBuffer being decoded.
- int32_t in_buffer_id_;
+ int32_t task_id_;
// Shared memory to store JPEG stream buffer. The input BitstreamBuffer is
// backed by this.
- std::unique_ptr<base::SharedMemory> in_shared_memory_;
+ base::UnsafeSharedMemoryRegion in_shared_region_;
+ base::WritableSharedMemoryMapping in_shared_mapping_;
STATUS decoder_status_;
SEQUENCE_CHECKER(sequence_checker_);
- base::WeakPtrFactory<VideoCaptureJpegDecoderImpl> weak_ptr_factory_;
+ base::WeakPtrFactory<VideoCaptureJpegDecoderImpl> weak_ptr_factory_{this};
DISALLOW_COPY_AND_ASSIGN(VideoCaptureJpegDecoderImpl);
};
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.cc b/chromium/media/capture/video/create_video_capture_device_factory.cc
index 9e4bcfcae03..18f519a521d 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/create_video_capture_device_factory.cc
@@ -13,6 +13,7 @@
#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
#elif defined(OS_CHROMEOS)
+#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
@@ -30,12 +31,36 @@ namespace media {
namespace {
+// Returns null if the corresponding switch is off.
std::unique_ptr<VideoCaptureDeviceFactory>
-CreatePlatformSpecificVideoCaptureDeviceFactory(
- scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
-#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
- return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
-#elif defined(OS_CHROMEOS)
+CreateFakeVideoCaptureDeviceFactory() {
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ // Use a Fake or File Video Device Factory if the command line flags are
+ // present, otherwise use the normal, platform-dependent, device factory.
+ if (command_line->HasSwitch(switches::kUseFakeDeviceForMediaStream)) {
+ if (command_line->HasSwitch(switches::kUseFileForFakeVideoCapture)) {
+ return std::make_unique<FileVideoCaptureDeviceFactory>();
+ } else {
+ std::vector<FakeVideoCaptureDeviceSettings> config;
+ FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
+ command_line->GetSwitchValueASCII(
+ switches::kUseFakeDeviceForMediaStream),
+ &config);
+ auto result = std::make_unique<FakeVideoCaptureDeviceFactory>();
+ result->SetToCustomDevicesConfig(config);
+ return std::move(result);
+ }
+ } else {
+ return nullptr;
+ }
+}
+
+#if defined(OS_CHROMEOS)
+std::unique_ptr<VideoCaptureDeviceFactory>
+CreateChromeOSVideoCaptureDeviceFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ media::CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
// On Chrome OS we have to support two use cases:
//
// 1. For devices that have the camera HAL v3 service running on Chrome OS,
@@ -46,10 +71,21 @@ CreatePlatformSpecificVideoCaptureDeviceFactory(
// some special devices that may never be able to implement a camera HAL
// v3.
if (ShouldUseCrosCameraService()) {
- return std::make_unique<VideoCaptureDeviceFactoryChromeOS>(ui_task_runner);
+ return std::make_unique<VideoCaptureDeviceFactoryChromeOS>(
+ ui_task_runner, camera_app_device_bridge);
} else {
return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
}
+}
+#endif // defined(OS_CHROMEOS)
+
+std::unique_ptr<VideoCaptureDeviceFactory>
+CreatePlatformSpecificVideoCaptureDeviceFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
+#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
+ return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
+#elif defined(OS_CHROMEOS)
+ return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner, {});
#elif defined(OS_WIN)
return std::make_unique<VideoCaptureDeviceFactoryWin>();
#elif defined(OS_MACOSX)
@@ -68,23 +104,9 @@ CreatePlatformSpecificVideoCaptureDeviceFactory(
std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
- const base::CommandLine* command_line =
- base::CommandLine::ForCurrentProcess();
- // Use a Fake or File Video Device Factory if the command line flags are
- // present, otherwise use the normal, platform-dependent, device factory.
- if (command_line->HasSwitch(switches::kUseFakeDeviceForMediaStream)) {
- if (command_line->HasSwitch(switches::kUseFileForFakeVideoCapture)) {
- return std::make_unique<FileVideoCaptureDeviceFactory>();
- } else {
- std::vector<FakeVideoCaptureDeviceSettings> config;
- FakeVideoCaptureDeviceFactory::ParseFakeDevicesConfigFromOptionsString(
- command_line->GetSwitchValueASCII(
- switches::kUseFakeDeviceForMediaStream),
- &config);
- auto result = std::make_unique<FakeVideoCaptureDeviceFactory>();
- result->SetToCustomDevicesConfig(config);
- return std::move(result);
- }
+ auto fake_device_factory = CreateFakeVideoCaptureDeviceFactory();
+ if (fake_device_factory) {
+ return fake_device_factory;
} else {
// |ui_task_runner| is needed for the Linux ChromeOS factory to retrieve
// screen rotations.
@@ -92,4 +114,20 @@ std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
}
}
+#if defined(OS_CHROMEOS)
+std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ media::CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
+ auto fake_device_factory = CreateFakeVideoCaptureDeviceFactory();
+ if (fake_device_factory) {
+ return fake_device_factory;
+ } else {
+ // |ui_task_runner| is needed for the Linux ChromeOS factory to retrieve
+ // screen rotations.
+ return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner,
+ camera_app_device_bridge);
+ }
+}
+#endif // defined(OS_CHROMEOS)
+
} // namespace media
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.h b/chromium/media/capture/video/create_video_capture_device_factory.h
index 7199bf800e3..b2450facdf5 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.h
+++ b/chromium/media/capture/video/create_video_capture_device_factory.h
@@ -13,10 +13,19 @@
namespace media {
+class CameraAppDeviceBridgeImpl;
+
std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
+#if defined(OS_CHROMEOS)
+std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
+CreateVideoCaptureDeviceFactory(
+ scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
+ media::CameraAppDeviceBridgeImpl* camera_app_device_bridge);
+#endif // defined(OS_CHROMEOS)
+
} // namespace media
#endif // MEDIA_CAPTURE_VIDEO_CREATE_VIDEO_CAPTURE_DEVICE_FACTORY_H_
diff --git a/chromium/media/capture/video/fake_video_capture_device.cc b/chromium/media/capture/video/fake_video_capture_device.cc
index 228c445b583..f8c3f37bfe4 100644
--- a/chromium/media/capture/video/fake_video_capture_device.cc
+++ b/chromium/media/capture/video/fake_video_capture_device.cc
@@ -39,6 +39,14 @@ static const int kBeepInterval = 500;
// Gradient travels from bottom to top in 5 seconds.
static const float kGradientFrequency = 1.f / 5;
+static const double kMinPan = 100.0;
+static const double kMaxPan = 400.0;
+static const double kPanStep = 1.0;
+
+static const double kMinTilt = 100.0;
+static const double kMaxTilt = 400.0;
+static const double kTiltStep = 1.0;
+
static const double kMinZoom = 100.0;
static const double kMaxZoom = 400.0;
static const double kZoomStep = 1.0;
@@ -120,7 +128,6 @@ gfx::ColorSpace GetDefaultColorSpace(VideoPixelFormat format) {
case PIXEL_FORMAT_I444:
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV420P10:
case PIXEL_FORMAT_YUV422P9:
@@ -541,6 +548,18 @@ void FakePhotoDevice::GetPhotoState(
photo_state->focus_distance->min = kMinFocusDistance;
photo_state->focus_distance->step = kFocusDistanceStep;
+ photo_state->pan = mojom::Range::New();
+ photo_state->pan->current = fake_device_state_->pan;
+ photo_state->pan->max = kMaxPan;
+ photo_state->pan->min = kMinPan;
+ photo_state->pan->step = kPanStep;
+
+ photo_state->tilt = mojom::Range::New();
+ photo_state->tilt->current = fake_device_state_->tilt;
+ photo_state->tilt->max = kMaxTilt;
+ photo_state->tilt->min = kMinTilt;
+ photo_state->tilt->step = kTiltStep;
+
photo_state->zoom = mojom::Range::New();
photo_state->zoom->current = fake_device_state_->zoom;
photo_state->zoom->max = kMaxZoom;
@@ -579,6 +598,14 @@ void FakePhotoDevice::SetPhotoOptions(
if (config_.should_fail_set_photo_options)
return;
+ if (settings->has_pan) {
+ device_state_write_access->pan =
+ std::max(kMinPan, std::min(settings->pan, kMaxPan));
+ }
+ if (settings->has_tilt) {
+ device_state_write_access->tilt =
+ std::max(kMinTilt, std::min(settings->tilt, kMaxTilt));
+ }
if (settings->has_zoom) {
device_state_write_access->zoom =
std::max(kMinZoom, std::min(settings->zoom, kMaxZoom));
diff --git a/chromium/media/capture/video/fake_video_capture_device.h b/chromium/media/capture/video/fake_video_capture_device.h
index 80e11fb56f9..e5ceb5603d2 100644
--- a/chromium/media/capture/video/fake_video_capture_device.h
+++ b/chromium/media/capture/video/fake_video_capture_device.h
@@ -102,12 +102,16 @@ class FakeVideoCaptureDevice : public VideoCaptureDevice {
// This is a separate struct because read-access to it is shared with several
// collaborating classes.
struct FakeDeviceState {
- FakeDeviceState(double zoom,
+ FakeDeviceState(double pan,
+ double tilt,
+ double zoom,
double exposure_time,
double focus_distance,
float frame_rate,
VideoPixelFormat pixel_format)
- : zoom(zoom),
+ : pan(pan),
+ tilt(tilt),
+ zoom(zoom),
exposure_time(exposure_time),
focus_distance(focus_distance),
format(gfx::Size(), frame_rate, pixel_format) {
@@ -117,6 +121,8 @@ struct FakeDeviceState {
: mojom::MeteringMode::CONTINUOUS;
}
+ double pan;
+ double tilt;
double zoom;
double exposure_time;
mojom::MeteringMode exposure_mode;
diff --git a/chromium/media/capture/video/fake_video_capture_device_factory.cc b/chromium/media/capture/video/fake_video_capture_device_factory.cc
index b4fada4de86..8af8625e74f 100644
--- a/chromium/media/capture/video/fake_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_factory.cc
@@ -33,6 +33,8 @@ static constexpr std::array<gfx::Size, 5> kDefaultResolutions{
gfx::Size(1280, 720), gfx::Size(1920, 1080)}};
static constexpr std::array<float, 1> kDefaultFrameRates{{20.0f}};
+static const double kInitialPan = 100.0;
+static const double kInitialTilt = 100.0;
static const double kInitialZoom = 100.0;
static const double kInitialExposureTime = 50.0;
static const double kInitialFocusDistance = 50.0;
@@ -133,8 +135,9 @@ FakeVideoCaptureDeviceFactory::CreateDeviceWithSettings(
const VideoCaptureFormat& initial_format = settings.supported_formats.front();
auto device_state = std::make_unique<FakeDeviceState>(
- kInitialZoom, kInitialExposureTime, kInitialFocusDistance,
- initial_format.frame_rate, initial_format.pixel_format);
+ kInitialPan, kInitialTilt, kInitialZoom, kInitialExposureTime,
+ kInitialFocusDistance, initial_format.frame_rate,
+ initial_format.pixel_format);
auto photo_frame_painter = std::make_unique<PacmanFramePainter>(
PacmanFramePainter::Format::SK_N32, device_state.get());
diff --git a/chromium/media/capture/video/fake_video_capture_device_unittest.cc b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
index 47dc96d69cc..f83fe157d5b 100644
--- a/chromium/media/capture/video/fake_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
@@ -13,7 +13,7 @@
#include "base/bind.h"
#include "base/command_line.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread.h"
#include "build/build_config.h"
@@ -58,16 +58,14 @@ class StubBufferHandleProvider
~StubBufferHandleProvider() override = default;
- mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit(
- bool read_only) override {
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override {
NOTREACHED();
- return mojo::ScopedSharedBufferHandle();
+ return {};
}
- base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC()
- override {
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override {
NOTREACHED();
- return base::SharedMemoryHandle();
+ return mojo::ScopedSharedBufferHandle();
}
std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess()
@@ -75,11 +73,9 @@ class StubBufferHandleProvider
return std::make_unique<StubBufferHandle>(mapped_size_, data_);
}
-#if defined(OS_CHROMEOS)
gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override {
return gfx::GpuMemoryBufferHandle();
}
-#endif
private:
const size_t mapped_size_;
@@ -204,7 +200,7 @@ class FakeVideoCaptureDeviceTestBase : public ::testing::Test {
const VideoCaptureFormat& last_format() const { return last_format_; }
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<VideoCaptureDeviceDescriptors> descriptors_;
std::unique_ptr<base::RunLoop> run_loop_;
std::unique_ptr<MockVideoCaptureDeviceClient> client_;
@@ -410,6 +406,17 @@ TEST_F(FakeVideoCaptureDeviceTest, GetAndSetCapabilities) {
EXPECT_EQ(96, state->width->min);
EXPECT_EQ(1920, state->width->max);
EXPECT_EQ(1, state->width->step);
+
+ EXPECT_EQ(100, state->pan->min);
+ EXPECT_EQ(400, state->pan->max);
+ EXPECT_EQ(1, state->pan->step);
+ EXPECT_GE(state->pan->current, state->pan->min);
+ EXPECT_GE(state->pan->max, state->pan->current);
+ EXPECT_EQ(100, state->tilt->min);
+ EXPECT_EQ(400, state->tilt->max);
+ EXPECT_EQ(1, state->tilt->step);
+ EXPECT_GE(state->tilt->current, state->tilt->min);
+ EXPECT_GE(state->tilt->max, state->tilt->current);
EXPECT_EQ(100, state->zoom->min);
EXPECT_EQ(400, state->zoom->max);
EXPECT_EQ(1, state->zoom->step);
diff --git a/chromium/media/capture/video/file_video_capture_device.cc b/chromium/media/capture/video/file_video_capture_device.cc
index 6797f83ed20..97356103731 100644
--- a/chromium/media/capture/video/file_video_capture_device.cc
+++ b/chromium/media/capture/video/file_video_capture_device.cc
@@ -370,8 +370,8 @@ void FileVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings,
settings->has_color_temperature || settings->has_iso ||
settings->has_brightness || settings->has_contrast ||
settings->has_saturation || settings->has_sharpness ||
- settings->has_focus_distance || settings->has_zoom ||
- settings->has_fill_light_mode) {
+ settings->has_focus_distance || settings->has_pan || settings->has_tilt ||
+ settings->has_zoom || settings->has_fill_light_mode) {
return;
}
diff --git a/chromium/media/capture/video/file_video_capture_device_unittest.cc b/chromium/media/capture/video/file_video_capture_device_unittest.cc
index c32d147251f..117dbeea0d0 100644
--- a/chromium/media/capture/video/file_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/file_video_capture_device_unittest.cc
@@ -9,7 +9,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/test_data_util.h"
#include "media/capture/video/file_video_capture_device.h"
@@ -67,7 +67,7 @@ class FileVideoCaptureDeviceTest : public ::testing::Test {
MockImageCaptureClient image_capture_client_;
std::unique_ptr<VideoCaptureDevice> device_;
VideoCaptureFormat last_format_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
};
TEST_F(FileVideoCaptureDeviceTest, GetPhotoState) {
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
index 5a8bd1f18b3..fd8bcb536b4 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.cc
@@ -352,30 +352,10 @@ void V4L2CaptureDelegate::AllocateAndStart(
capture_format_.frame_rate = frame_rate;
capture_format_.pixel_format = pixel_format;
- v4l2_requestbuffers r_buffer;
- FillV4L2RequestBuffer(&r_buffer, kNumVideoBuffers);
- if (DoIoctl(VIDIOC_REQBUFS, &r_buffer) < 0) {
- SetErrorState(VideoCaptureError::kV4L2ErrorRequestingMmapBuffers, FROM_HERE,
- "Error requesting MMAP buffers from V4L2");
- return;
- }
- for (unsigned int i = 0; i < r_buffer.count; ++i) {
- if (!MapAndQueueBuffer(i)) {
- SetErrorState(VideoCaptureError::kV4L2AllocateBufferFailed, FROM_HERE,
- "Allocate buffer failed");
- return;
- }
- }
-
- v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (DoIoctl(VIDIOC_STREAMON, &capture_type) < 0) {
- SetErrorState(VideoCaptureError::kV4L2VidiocStreamonFailed, FROM_HERE,
- "VIDIOC_STREAMON failed");
+ if (!StartStream())
return;
- }
client_->OnStarted();
- is_capturing_ = true;
// Post task to start fetching frames from v4l2.
v4l2_task_runner_->PostTask(
@@ -384,28 +364,10 @@ void V4L2CaptureDelegate::AllocateAndStart(
void V4L2CaptureDelegate::StopAndDeAllocate() {
DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
- // The order is important: stop streaming, clear |buffer_pool_|,
- // thus munmap()ing the v4l2_buffers, and then return them to the OS.
- v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (DoIoctl(VIDIOC_STREAMOFF, &capture_type) < 0) {
- SetErrorState(VideoCaptureError::kV4L2VidiocStreamoffFailed, FROM_HERE,
- "VIDIOC_STREAMOFF failed");
- return;
- }
-
- buffer_tracker_pool_.clear();
-
- v4l2_requestbuffers r_buffer;
- FillV4L2RequestBuffer(&r_buffer, 0);
- if (DoIoctl(VIDIOC_REQBUFS, &r_buffer) < 0) {
- SetErrorState(VideoCaptureError::kV4L2FailedToVidiocReqbufsWithCount0,
- FROM_HERE, "Failed to VIDIOC_REQBUFS with count = 0");
- }
-
+ StopStream();
// At this point we can close the device.
// This is also needed for correctly changing settings later via VIDIOC_S_FMT.
device_fd_.reset();
- is_capturing_ = false;
client_.reset();
}
@@ -423,6 +385,8 @@ void V4L2CaptureDelegate::GetPhotoState(
mojom::PhotoStatePtr photo_capabilities = mojo::CreateEmptyPhotoState();
+ photo_capabilities->pan = RetrieveUserControlRange(V4L2_CID_PAN_ABSOLUTE);
+ photo_capabilities->tilt = RetrieveUserControlRange(V4L2_CID_TILT_ABSOLUTE);
photo_capabilities->zoom = RetrieveUserControlRange(V4L2_CID_ZOOM_ABSOLUTE);
v4l2_queryctrl manual_focus_ctrl = {};
@@ -529,6 +493,22 @@ void V4L2CaptureDelegate::SetPhotoOptions(
if (!device_fd_.is_valid() || !is_capturing_)
return;
+ if (settings->has_pan) {
+ v4l2_control pan_current = {};
+ pan_current.id = V4L2_CID_PAN_ABSOLUTE;
+ pan_current.value = settings->pan;
+ if (DoIoctl(VIDIOC_S_CTRL, &pan_current) < 0)
+ DPLOG(ERROR) << "setting pan to " << settings->pan;
+ }
+
+ if (settings->has_tilt) {
+ v4l2_control tilt_current = {};
+ tilt_current.id = V4L2_CID_TILT_ABSOLUTE;
+ tilt_current.value = settings->tilt;
+ if (DoIoctl(VIDIOC_S_CTRL, &tilt_current) < 0)
+ DPLOG(ERROR) << "setting tilt to " << settings->tilt;
+ }
+
if (settings->has_zoom) {
v4l2_control zoom_current = {};
zoom_current.id = V4L2_CID_ZOOM_ABSOLUTE;
@@ -814,6 +794,34 @@ bool V4L2CaptureDelegate::MapAndQueueBuffer(int index) {
return true;
}
+bool V4L2CaptureDelegate::StartStream() {
+ DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
+ DCHECK(!is_capturing_);
+
+ v4l2_requestbuffers r_buffer;
+ FillV4L2RequestBuffer(&r_buffer, kNumVideoBuffers);
+ if (DoIoctl(VIDIOC_REQBUFS, &r_buffer) < 0) {
+ SetErrorState(VideoCaptureError::kV4L2ErrorRequestingMmapBuffers, FROM_HERE,
+ "Error requesting MMAP buffers from V4L2");
+ return false;
+ }
+ for (unsigned int i = 0; i < r_buffer.count; ++i) {
+ if (!MapAndQueueBuffer(i)) {
+ SetErrorState(VideoCaptureError::kV4L2AllocateBufferFailed, FROM_HERE,
+ "Allocate buffer failed");
+ return false;
+ }
+ }
+ v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (DoIoctl(VIDIOC_STREAMON, &capture_type) < 0) {
+ SetErrorState(VideoCaptureError::kV4L2VidiocStreamonFailed, FROM_HERE,
+ "VIDIOC_STREAMON failed");
+ return false;
+ }
+ is_capturing_ = true;
+ return true;
+}
+
void V4L2CaptureDelegate::DoCapture() {
DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
if (!is_capturing_)
@@ -834,7 +842,21 @@ void V4L2CaptureDelegate::DoCapture() {
// throw an error if it times out too many times.
if (result == 0) {
timeout_count_++;
- if (timeout_count_ >= kContinuousTimeoutLimit) {
+ if (timeout_count_ == 1) {
+ // TODO(crbug.com/1010557): this is an unfortunate workaround for an issue
+ // with the Huddly GO camera where the device seems to get into a deadlock
+ // state. As best as we can tell for now, there is a synchronization issue
+ // in older kernels, and stopping and starting the stream gets the camera
+ // out of this bad state. Upgrading the kernel is difficult so this is our
+ // way out for now.
+ DLOG(WARNING) << "Restarting camera stream";
+ if (!StopStream() || !StartStream())
+ return;
+ v4l2_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&V4L2CaptureDelegate::DoCapture, GetWeakPtr()));
+ return;
+ } else if (timeout_count_ >= kContinuousTimeoutLimit) {
SetErrorState(
VideoCaptureError::kV4L2MultipleContinuousTimeoutsWhileReadPolling,
FROM_HERE, "Multiple continuous timeouts while read-polling.");
@@ -925,6 +947,33 @@ void V4L2CaptureDelegate::DoCapture() {
FROM_HERE, base::BindOnce(&V4L2CaptureDelegate::DoCapture, GetWeakPtr()));
}
+bool V4L2CaptureDelegate::StopStream() {
+ DCHECK(v4l2_task_runner_->BelongsToCurrentThread());
+ DCHECK(is_capturing_);
+ is_capturing_ = false;
+
+ // The order is important: stop streaming, clear |buffer_pool_|,
+ // thus munmap()ing the v4l2_buffers, and then return them to the OS.
+ v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (DoIoctl(VIDIOC_STREAMOFF, &capture_type) < 0) {
+ SetErrorState(VideoCaptureError::kV4L2VidiocStreamoffFailed, FROM_HERE,
+ "VIDIOC_STREAMOFF failed");
+ return false;
+ }
+
+ buffer_tracker_pool_.clear();
+
+ v4l2_requestbuffers r_buffer;
+ FillV4L2RequestBuffer(&r_buffer, 0);
+ if (DoIoctl(VIDIOC_REQBUFS, &r_buffer) < 0) {
+ SetErrorState(VideoCaptureError::kV4L2FailedToVidiocReqbufsWithCount0,
+ FROM_HERE, "Failed to VIDIOC_REQBUFS with count = 0");
+ return false;
+ }
+
+ return true;
+}
+
void V4L2CaptureDelegate::SetErrorState(VideoCaptureError error,
const base::Location& from_here,
const std::string& reason) {
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate.h b/chromium/media/capture/video/linux/v4l2_capture_delegate.h
index b8fbad8b115..8d8c262d9f8 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate.h
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate.h
@@ -100,7 +100,9 @@ class CAPTURE_EXPORT V4L2CaptureDelegate final {
// enqueues it (VIDIOC_QBUF) back into V4L2.
bool MapAndQueueBuffer(int index);
+ bool StartStream();
void DoCapture();
+ bool StopStream();
void SetErrorState(VideoCaptureError error,
const base::Location& from_here,
diff --git a/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc b/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
index 23ad2c06e26..86b13589deb 100644
--- a/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
+++ b/chromium/media/capture/video/linux/v4l2_capture_delegate_unittest.cc
@@ -7,7 +7,7 @@
#include "base/files/file_enumerator.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/capture/video/linux/v4l2_capture_delegate.h"
@@ -186,7 +186,7 @@ class V4L2CaptureDelegateTest : public ::testing::Test {
0)) {}
~V4L2CaptureDelegateTest() override = default;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
VideoCaptureDeviceDescriptor device_descriptor_;
scoped_refptr<V4L2CaptureDevice> v4l2_;
std::unique_ptr<V4L2CaptureDelegate> delegate_;
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
index eecb165c154..88334bb44d5 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
@@ -3,8 +3,9 @@
// found in the LICENSE file.
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
+
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/capture/video/linux/fake_v4l2_impl.h"
#include "media/capture/video/mock_video_capture_device_client.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -82,7 +83,7 @@ class VideoCaptureDeviceFactoryLinuxTest : public ::testing::Test {
std::move(fake_device_provider));
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
FakeV4L2Impl* fake_v4l2_;
DescriptorDeviceProvider* fake_device_provider_;
std::unique_ptr<VideoCaptureDeviceFactoryLinux> factory_;
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm b/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
index ca95f533cb8..8329266f5e7 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/mac/video_capture_device_factory_mac.h"
+
#include "base/bind.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/capture/video/mac/video_capture_device_mac.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -17,8 +18,8 @@ namespace media {
// required, because on MacOS the capture code must run on a CFRunLoop
// enabled message loop.
void RunTestCase(base::OnceClosure test_case) {
- base::test::ScopedTaskEnvironment scoped_task_environment(
- base::test::ScopedTaskEnvironment::MainThreadType::UI);
+ base::test::TaskEnvironment task_environment(
+ base::test::TaskEnvironment::MainThreadType::UI);
base::RunLoop run_loop;
base::ThreadTaskRunnerHandle::Get()->PostTask(
FROM_HERE, base::BindOnce(
diff --git a/chromium/media/capture/video/shared_memory_buffer_tracker.cc b/chromium/media/capture/video/shared_memory_buffer_tracker.cc
index 6143fd8db7f..6cd0b698736 100644
--- a/chromium/media/capture/video/shared_memory_buffer_tracker.cc
+++ b/chromium/media/capture/video/shared_memory_buffer_tracker.cc
@@ -5,10 +5,31 @@
#include "media/capture/video/shared_memory_buffer_tracker.h"
#include "base/logging.h"
+#include "media/base/video_frame.h"
+#include "mojo/public/cpp/system/platform_handle.h"
#include "ui/gfx/geometry/size.h"
namespace {
+// A local VideoCaptureBufferHandle implementation used with
+// GetHandleForInProcessAccess. This does not own the mapping, so the tracker
+// that generates this must outlive it.
+class SharedMemoryBufferTrackerHandle : public media::VideoCaptureBufferHandle {
+ public:
+ explicit SharedMemoryBufferTrackerHandle(
+ const base::WritableSharedMemoryMapping& mapping)
+ : mapped_size_(mapping.size()),
+ data_(mapping.GetMemoryAsSpan<uint8_t>().data()) {}
+
+ size_t mapped_size() const final { return mapped_size_; }
+ uint8_t* data() const final { return data_; }
+ const uint8_t* const_data() const final { return data_; }
+
+ private:
+ const size_t mapped_size_;
+ uint8_t* data_;
+};
+
size_t CalculateRequiredBufferSize(
const gfx::Size& dimensions,
media::VideoPixelFormat format,
@@ -39,9 +60,12 @@ SharedMemoryBufferTracker::~SharedMemoryBufferTracker() = default;
bool SharedMemoryBufferTracker::Init(const gfx::Size& dimensions,
VideoPixelFormat format,
const mojom::PlaneStridesPtr& strides) {
+ DCHECK(!region_.IsValid());
const size_t buffer_size =
CalculateRequiredBufferSize(dimensions, format, strides);
- return provider_.InitForSize(buffer_size);
+ region_ = base::UnsafeSharedMemoryRegion::Create(buffer_size);
+ mapping_ = {};
+ return region_.IsValid();
}
bool SharedMemoryBufferTracker::IsReusableForFormat(
@@ -54,28 +78,35 @@ bool SharedMemoryBufferTracker::IsReusableForFormat(
std::unique_ptr<VideoCaptureBufferHandle>
SharedMemoryBufferTracker::GetMemoryMappedAccess() {
- return provider_.GetHandleForInProcessAccess();
+ DCHECK(region_.IsValid());
+ if (!mapping_.IsValid()) {
+ mapping_ = region_.Map();
+ }
+ DCHECK(mapping_.IsValid());
+ return std::make_unique<SharedMemoryBufferTrackerHandle>(mapping_);
}
-mojo::ScopedSharedBufferHandle SharedMemoryBufferTracker::GetHandleForTransit(
- bool read_only) {
- return provider_.GetHandleForInterProcessTransit(read_only);
+base::UnsafeSharedMemoryRegion
+SharedMemoryBufferTracker::DuplicateAsUnsafeRegion() {
+ DCHECK(region_.IsValid());
+ return region_.Duplicate();
}
-base::SharedMemoryHandle
-SharedMemoryBufferTracker::GetNonOwnedSharedMemoryHandleForLegacyIPC() {
- return provider_.GetNonOwnedSharedMemoryHandleForLegacyIPC();
+mojo::ScopedSharedBufferHandle
+SharedMemoryBufferTracker::DuplicateAsMojoBuffer() {
+ DCHECK(region_.IsValid());
+ return mojo::WrapUnsafeSharedMemoryRegion(region_.Duplicate());
}
-#if defined(OS_CHROMEOS)
gfx::GpuMemoryBufferHandle
SharedMemoryBufferTracker::GetGpuMemoryBufferHandle() {
- return provider_.GetGpuMemoryBufferHandle();
+ NOTREACHED() << "Unsupported operation";
+ return gfx::GpuMemoryBufferHandle();
}
-#endif
uint32_t SharedMemoryBufferTracker::GetMemorySizeInBytes() {
- return provider_.GetMemorySizeInBytes();
+ DCHECK(region_.IsValid());
+ return region_.GetSize();
}
} // namespace media
diff --git a/chromium/media/capture/video/shared_memory_buffer_tracker.h b/chromium/media/capture/video/shared_memory_buffer_tracker.h
index ecefde55648..8eeda002bf4 100644
--- a/chromium/media/capture/video/shared_memory_buffer_tracker.h
+++ b/chromium/media/capture/video/shared_memory_buffer_tracker.h
@@ -5,7 +5,6 @@
#ifndef MEDIA_CAPTURE_VIDEO_SHARED_MEMORY_BUFFER_TRACKER_H_
#define MEDIA_CAPTURE_VIDEO_SHARED_MEMORY_BUFFER_TRACKER_H_
-#include "media/capture/video/shared_memory_handle_provider.h"
#include "media/capture/video/video_capture_buffer_handle.h"
#include "media/capture/video/video_capture_buffer_tracker.h"
@@ -15,7 +14,9 @@ class Size;
namespace media {
-// Tracker specifics for SharedMemory.
+// A tracker backed by unsafe shared memory. An unsafe region is necessary
+// because a buffer may be used multiple times in an output media::VideoFrame to
+// a decoder cross-process where it is written.
class SharedMemoryBufferTracker final : public VideoCaptureBufferTracker {
public:
SharedMemoryBufferTracker();
@@ -29,16 +30,15 @@ class SharedMemoryBufferTracker final : public VideoCaptureBufferTracker {
VideoPixelFormat format,
const mojom::PlaneStridesPtr& strides) override;
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override;
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override;
std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() override;
- mojo::ScopedSharedBufferHandle GetHandleForTransit(bool read_only) override;
- base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC() override;
-#if defined(OS_CHROMEOS)
gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override;
-#endif
uint32_t GetMemorySizeInBytes() override;
private:
- SharedMemoryHandleProvider provider_;
+ base::UnsafeSharedMemoryRegion region_;
+ base::WritableSharedMemoryMapping mapping_;
DISALLOW_COPY_AND_ASSIGN(SharedMemoryBufferTracker);
};
diff --git a/chromium/media/capture/video/shared_memory_handle_provider.cc b/chromium/media/capture/video/shared_memory_handle_provider.cc
deleted file mode 100644
index cda831501fb..00000000000
--- a/chromium/media/capture/video/shared_memory_handle_provider.cc
+++ /dev/null
@@ -1,169 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/shared_memory_handle_provider.h"
-
-namespace media {
-
-SharedMemoryHandleProvider::SharedMemoryHandleProvider() {
-#if DCHECK_IS_ON()
- map_ref_count_ = 0;
-#endif
-}
-
-SharedMemoryHandleProvider::~SharedMemoryHandleProvider() {
- base::AutoLock lock(mapping_lock_);
-
- // If the tracker is being destroyed, there must be no outstanding
- // Handles. If this DCHECK() triggers, it means that either there is a logic
- // flaw in VideoCaptureBufferPoolImpl, or a client did not delete all of its
- // owned VideoCaptureBufferHandles before calling Pool::ReliquishXYZ().
-#if DCHECK_IS_ON()
- DCHECK_EQ(map_ref_count_, 0);
-#endif
-
- if (shared_memory_ && shared_memory_->memory()) {
- DVLOG(3) << __func__ << ": Unmapping memory for in-process access @"
- << shared_memory_->memory() << '.';
- CHECK(shared_memory_->Unmap());
- }
-}
-
-bool SharedMemoryHandleProvider::InitForSize(size_t size) {
-#if DCHECK_IS_ON()
- DCHECK_EQ(map_ref_count_, 0);
-#endif
- DCHECK(!shared_memory_);
- shared_memory_.emplace();
- if (shared_memory_->CreateAnonymous(size)) {
- mapped_size_ = size;
- read_only_flag_ = false;
- return true;
- }
- return false;
-}
-
-bool SharedMemoryHandleProvider::InitFromMojoHandle(
- mojo::ScopedSharedBufferHandle buffer_handle) {
-#if DCHECK_IS_ON()
- DCHECK_EQ(map_ref_count_, 0);
-#endif
- DCHECK(!shared_memory_);
-
- base::SharedMemoryHandle memory_handle;
- mojo::UnwrappedSharedMemoryHandleProtection protection;
- const MojoResult result = mojo::UnwrapSharedMemoryHandle(
- std::move(buffer_handle), &memory_handle, &mapped_size_, &protection);
- if (result != MOJO_RESULT_OK)
- return false;
- read_only_flag_ =
- protection == mojo::UnwrappedSharedMemoryHandleProtection::kReadOnly;
- shared_memory_.emplace(memory_handle, read_only_flag_);
- return true;
-}
-
-#if defined(OS_LINUX)
-bool SharedMemoryHandleProvider::InitAsReadOnlyFromRawFileDescriptor(
- mojo::ScopedHandle fd_handle,
- uint32_t memory_size_in_bytes) {
- base::PlatformFile platform_file;
- const MojoResult result =
- mojo::UnwrapPlatformFile(std::move(fd_handle), &platform_file);
- if (result != MOJO_RESULT_OK)
- return false;
- base::UnguessableToken guid = base::UnguessableToken::Create();
- base::SharedMemoryHandle memory_handle(
- base::FileDescriptor(platform_file, true), 0u, guid);
- mapped_size_ = memory_size_in_bytes;
- read_only_flag_ = true;
- shared_memory_.emplace(memory_handle, read_only_flag_);
- return true;
-}
-#endif // defined(OS_LINUX)
-
-mojo::ScopedSharedBufferHandle
-SharedMemoryHandleProvider::GetHandleForInterProcessTransit(bool read_only) {
- if (read_only_flag_ && !read_only) {
- // Wanted read-write access, but read-only access is all that is available.
- NOTREACHED();
- return mojo::ScopedSharedBufferHandle();
- }
- // TODO(https://crbug.com/803136): This does not actually obey |read_only| in
- // any capacity because it uses DuplicateHandle. In order to properly obey
- // |read_only| (when true), we need to use |SharedMemory::GetReadOnlyHandle()|
- // but that is not possible. With the base::SharedMemory API and this
- // SharedMemoryHandleProvider API as they are today, it isn't possible to know
- // whether |shared_memory_| even supports read-only duplication. Note that
- // changing |kReadWrite| to |kReadOnly| does NOT affect the ability to map
- // the handle read-write.
- return mojo::WrapSharedMemoryHandle(
- base::SharedMemory::DuplicateHandle(shared_memory_->handle()),
- mapped_size_, mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
-}
-
-base::SharedMemoryHandle
-SharedMemoryHandleProvider::GetNonOwnedSharedMemoryHandleForLegacyIPC() {
- return shared_memory_->handle();
-}
-
-uint32_t SharedMemoryHandleProvider::GetMemorySizeInBytes() {
- return static_cast<uint32_t>(mapped_size_);
-}
-
-std::unique_ptr<VideoCaptureBufferHandle>
-SharedMemoryHandleProvider::GetHandleForInProcessAccess() {
- {
- base::AutoLock lock(mapping_lock_);
-#if DCHECK_IS_ON()
- DCHECK_GE(map_ref_count_, 0);
- ++map_ref_count_;
-#endif
- if (!shared_memory_->memory()) {
- CHECK(shared_memory_->Map(mapped_size_));
- DVLOG(3) << __func__ << ": Mapped memory for in-process access @"
- << shared_memory_->memory() << '.';
- }
- }
-
- return std::make_unique<Handle>(this);
-}
-
-#if defined(OS_CHROMEOS)
-gfx::GpuMemoryBufferHandle
-SharedMemoryHandleProvider::GetGpuMemoryBufferHandle() {
- NOTREACHED() << "Unsupported operation";
- return gfx::GpuMemoryBufferHandle();
-}
-#endif
-
-#if DCHECK_IS_ON()
-void SharedMemoryHandleProvider::OnHandleDestroyed() {
- base::AutoLock lock(mapping_lock_);
- DCHECK_GT(map_ref_count_, 0);
- --map_ref_count_;
-}
-#endif
-
-SharedMemoryHandleProvider::Handle::Handle(SharedMemoryHandleProvider* owner)
- : owner_(owner) {}
-
-SharedMemoryHandleProvider::Handle::~Handle() {
-#if DCHECK_IS_ON()
- owner_->OnHandleDestroyed();
-#endif
-}
-
-size_t SharedMemoryHandleProvider::Handle::mapped_size() const {
- return owner_->mapped_size_;
-}
-
-uint8_t* SharedMemoryHandleProvider::Handle::data() const {
- return static_cast<uint8_t*>(owner_->shared_memory_->memory());
-}
-
-const uint8_t* SharedMemoryHandleProvider::Handle::const_data() const {
- return static_cast<const uint8_t*>(owner_->shared_memory_->memory());
-}
-
-} // namespace media
diff --git a/chromium/media/capture/video/shared_memory_handle_provider.h b/chromium/media/capture/video/shared_memory_handle_provider.h
deleted file mode 100644
index 8e2f3c125f8..00000000000
--- a/chromium/media/capture/video/shared_memory_handle_provider.h
+++ /dev/null
@@ -1,104 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_CAPTURE_VIDEO_SHARED_MEMORY_HANDLE_PROVIDER_H_
-#define MEDIA_CAPTURE_VIDEO_SHARED_MEMORY_HANDLE_PROVIDER_H_
-
-#include <memory>
-
-#include "base/logging.h"
-#include "base/memory/shared_memory.h"
-#include "base/optional.h"
-#include "build/build_config.h"
-#include "media/capture/capture_export.h"
-#include "media/capture/video/video_capture_buffer_handle.h"
-#include "media/capture/video/video_capture_device.h"
-#include "mojo/public/cpp/system/platform_handle.h"
-
-namespace media {
-
-// Provides handles from a single, owned base::SharedMemory instance.
-class CAPTURE_EXPORT SharedMemoryHandleProvider
- : public VideoCaptureDevice::Client::Buffer::HandleProvider {
- public:
- // Note: One of the two InitXYZ() methods must be called before using any of
- // the HandleProvider methods.
- SharedMemoryHandleProvider();
-
- ~SharedMemoryHandleProvider() override;
-
- // Initialize by creating anonymous shared memory of the given |size|. Returns
- // false if the operation failed.
- bool InitForSize(size_t size);
-
- // Initialize by duplicating an existing mojo |buffer_handle|. Returns false
- // if the operation failed.
- bool InitFromMojoHandle(mojo::ScopedSharedBufferHandle buffer_handle);
-
-// This requires platforms where base::SharedMemoryHandle is backed by a
-// file descriptor.
-#if defined(OS_LINUX)
- bool InitAsReadOnlyFromRawFileDescriptor(mojo::ScopedHandle fd_handle,
- uint32_t memory_size_in_bytes);
-#endif // defined(OS_LINUX)
-
- uint32_t GetMemorySizeInBytes();
-
- // Implementation of Buffer::HandleProvider:
- mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit(
- bool read_only) override;
- base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC() override;
- std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess()
- override;
-#if defined(OS_CHROMEOS)
- gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override;
-#endif
-
- private:
- // Accessor to mapped memory. When the first of these is created, the shared
- // memory is mapped. The unmapping, however, does not occur until the
- // SharedMemoryHandleProvider is destroyed. Therefore, the provider must
- // outlive all of its Handles.
- class Handle : public VideoCaptureBufferHandle {
- public:
- explicit Handle(SharedMemoryHandleProvider* owner);
- ~Handle() final;
-
- size_t mapped_size() const final;
- uint8_t* data() const final;
- const uint8_t* const_data() const final;
-
- private:
- SharedMemoryHandleProvider* const owner_;
- };
-
-#if DCHECK_IS_ON()
- // Called by Handle to decrement |map_ref_count_|. This is thread-safe.
- void OnHandleDestroyed();
-#endif
-
- // These are set by one of the InitXYZ() methods.
- base::Optional<base::SharedMemory> shared_memory_;
- size_t mapped_size_;
- bool read_only_flag_;
-
- // Synchronizes changes to |map_ref_count_| and Map() and Unmap() operations
- // on |shared_memory_|. This is because the thread that calls
- // GetHandleForInProcessAccess() may pass ownership of the returned Handle to
- // code that runs on a diffrent thread.
- base::Lock mapping_lock_;
-
-#if DCHECK_IS_ON()
- // The number of Handle instances that are referencing the mapped memory. This
- // is only used while DCHECKs are turned on, as a sanity-check that the object
- // graph/lifetimes have not changed in a bad way.
- int map_ref_count_;
-#endif
-
- DISALLOW_COPY_AND_ASSIGN(SharedMemoryHandleProvider);
-};
-
-} // namespace media
-
-#endif // MEDIA_CAPTURE_VIDEO_SHARED_MEMORY_HANDLE_PROVIDER_H_
diff --git a/chromium/media/capture/video/shared_memory_handle_provider_unittest.cc b/chromium/media/capture/video/shared_memory_handle_provider_unittest.cc
deleted file mode 100644
index ac24e22fe1e..00000000000
--- a/chromium/media/capture/video/shared_memory_handle_provider_unittest.cc
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright (c) 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/capture/video/shared_memory_handle_provider.h"
-
-#include <stddef.h>
-#include <stdint.h>
-
-#include <memory>
-#include <utility>
-
-#include "base/memory/shared_memory.h"
-#include "mojo/public/cpp/system/buffer.h"
-#include "mojo/public/cpp/system/platform_handle.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace media {
-
-namespace {
-
-const size_t kMemorySize = 1024;
-
-} // anonymous namespace
-
-class SharedMemoryHandleProviderTest : public ::testing::Test {
- public:
- SharedMemoryHandleProviderTest() = default;
- ~SharedMemoryHandleProviderTest() override = default;
-
- void UnwrapAndVerifyMojoHandle(
- mojo::ScopedSharedBufferHandle buffer_handle,
- size_t expected_size,
- mojo::UnwrappedSharedMemoryHandleProtection expected_protection) {
- base::SharedMemoryHandle memory_handle;
- size_t memory_size = 0;
- mojo::UnwrappedSharedMemoryHandleProtection protection;
- const MojoResult result = mojo::UnwrapSharedMemoryHandle(
- std::move(buffer_handle), &memory_handle, &memory_size, &protection);
- EXPECT_EQ(MOJO_RESULT_OK, result);
- EXPECT_EQ(expected_size, memory_size);
- EXPECT_EQ(expected_protection, protection);
- }
-
- protected:
- SharedMemoryHandleProvider handle_provider_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(SharedMemoryHandleProviderTest);
-};
-
-TEST_F(SharedMemoryHandleProviderTest,
- VerifyInterProcessTransitHandleForReadOnly) {
- handle_provider_.InitForSize(kMemorySize);
-
- auto mojo_handle =
- handle_provider_.GetHandleForInterProcessTransit(true /* read_only */);
-
- // TODO(https://crbug.com/803136): See comment within
- // GetHandleForInterProcessTransit() for an explanation of why this
- // intentionally read-write even though it ought to be read-only.
- UnwrapAndVerifyMojoHandle(
- std::move(mojo_handle), kMemorySize,
- mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
-}
-
-TEST_F(SharedMemoryHandleProviderTest,
- VerifyInterProcessTransitHandleForReadWrite) {
- handle_provider_.InitForSize(kMemorySize);
-
- auto mojo_handle =
- handle_provider_.GetHandleForInterProcessTransit(false /* read_only */);
- UnwrapAndVerifyMojoHandle(
- std::move(mojo_handle), kMemorySize,
- mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
-}
-
-} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_handle.cc b/chromium/media/capture/video/video_capture_buffer_handle.cc
index 935af7718e7..693c3d18fd2 100644
--- a/chromium/media/capture/video/video_capture_buffer_handle.cc
+++ b/chromium/media/capture/video/video_capture_buffer_handle.cc
@@ -7,7 +7,6 @@
#include "base/logging.h"
namespace media {
-#if defined(OS_CHROMEOS)
NullHandle::NullHandle() = default;
@@ -28,5 +27,4 @@ const uint8_t* NullHandle::const_data() const {
return nullptr;
}
-#endif
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_handle.h b/chromium/media/capture/video/video_capture_buffer_handle.h
index 63bfe016ebd..617135931fc 100644
--- a/chromium/media/capture/video/video_capture_buffer_handle.h
+++ b/chromium/media/capture/video/video_capture_buffer_handle.h
@@ -21,7 +21,6 @@ class CAPTURE_EXPORT VideoCaptureBufferHandle {
virtual const uint8_t* const_data() const = 0;
};
-#if defined(OS_CHROMEOS)
// This is a null implementation used by GpuMemoryBufferTracker, as
// GpuMemoryBuffer should not be accessed on the CPU outside of the Chrome OS
// VideoCaptureDevice implementation.
@@ -33,7 +32,6 @@ class CAPTURE_EXPORT NullHandle final : public VideoCaptureBufferHandle {
uint8_t* data() const override;
const uint8_t* const_data() const override;
};
-#endif
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_pool.h b/chromium/media/capture/video/video_capture_buffer_pool.h
index 6b8e6eabdef..c6e9f36edff 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool.h
@@ -43,13 +43,13 @@ class CAPTURE_EXPORT VideoCaptureBufferPool
public:
static constexpr int kInvalidId = -1;
- // Provides a duplicate handle to the buffer. Destruction of this scoped Mojo
- // handle does not result in releasing the shared memory held by the pool.
- virtual mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit(
- int buffer_id,
- bool read_only) = 0;
-
- virtual base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC(
+ // Provides a duplicate region referring to the buffer. Destruction of this
+ // duplicate does not result in releasing the shared memory held by the
+ // pool. The buffer will be writable. This may be called as necessary to
+ // create regions.
+ virtual base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion(
+ int buffer_id) = 0;
+ virtual mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer(
int buffer_id) = 0;
virtual mojom::SharedMemoryViaRawFileDescriptorPtr
@@ -59,10 +59,8 @@ class CAPTURE_EXPORT VideoCaptureBufferPool
virtual std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess(
int buffer_id) = 0;
-#if defined(OS_CHROMEOS)
virtual gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle(
int buffer_id) = 0;
-#endif
// Reserve or allocate a buffer to support a packed frame of |dimensions| of
// pixel |format| and return its id. If the pool is already at maximum
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
index b919c7f64a7..2ffaea80b55 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
@@ -7,6 +7,7 @@
#include <memory>
#include "base/logging.h"
+#include "base/memory/platform_shared_memory_region.h"
#include "base/memory/ptr_util.h"
#include "build/build_config.h"
#include "media/capture/video/video_capture_buffer_handle.h"
@@ -27,30 +28,28 @@ VideoCaptureBufferPoolImpl::VideoCaptureBufferPoolImpl(
VideoCaptureBufferPoolImpl::~VideoCaptureBufferPoolImpl() = default;
-mojo::ScopedSharedBufferHandle
-VideoCaptureBufferPoolImpl::GetHandleForInterProcessTransit(int buffer_id,
- bool read_only) {
+base::UnsafeSharedMemoryRegion
+VideoCaptureBufferPoolImpl::DuplicateAsUnsafeRegion(int buffer_id) {
base::AutoLock lock(lock_);
VideoCaptureBufferTracker* tracker = GetTracker(buffer_id);
if (!tracker) {
NOTREACHED() << "Invalid buffer_id.";
- return mojo::ScopedSharedBufferHandle();
+ return {};
}
- return tracker->GetHandleForTransit(read_only);
+ return tracker->DuplicateAsUnsafeRegion();
}
-base::SharedMemoryHandle
-VideoCaptureBufferPoolImpl::GetNonOwnedSharedMemoryHandleForLegacyIPC(
- int buffer_id) {
+mojo::ScopedSharedBufferHandle
+VideoCaptureBufferPoolImpl::DuplicateAsMojoBuffer(int buffer_id) {
base::AutoLock lock(lock_);
VideoCaptureBufferTracker* tracker = GetTracker(buffer_id);
if (!tracker) {
NOTREACHED() << "Invalid buffer_id.";
- return base::SharedMemoryHandle();
+ return mojo::ScopedSharedBufferHandle();
}
- return tracker->GetNonOwnedSharedMemoryHandleForLegacyIPC();
+ return tracker->DuplicateAsMojoBuffer();
}
mojom::SharedMemoryViaRawFileDescriptorPtr
@@ -67,11 +66,17 @@ VideoCaptureBufferPoolImpl::CreateSharedMemoryViaRawFileDescriptorStruct(
return 0u;
}
+ // Convert the mojo::ScopedSharedBufferHandle to a PlatformSharedMemoryRegion
+ // in order to extract the platform file descriptor.
+ base::subtle::PlatformSharedMemoryRegion platform_region =
+ mojo::UnwrapPlatformSharedMemoryRegion(tracker->DuplicateAsMojoBuffer());
+ if (!platform_region.IsValid()) {
+ NOTREACHED();
+ return 0u;
+ }
+ base::subtle::ScopedFDPair fds = platform_region.PassPlatformHandle();
auto result = mojom::SharedMemoryViaRawFileDescriptor::New();
- result->file_descriptor_handle = mojo::WrapPlatformFile(
- base::SharedMemory::DuplicateHandle(
- tracker->GetNonOwnedSharedMemoryHandleForLegacyIPC())
- .GetHandle());
+ result->file_descriptor_handle = mojo::WrapPlatformFile(fds.fd.release());
result->shared_memory_size_in_bytes = tracker->GetMemorySizeInBytes();
return result;
#else
@@ -93,7 +98,6 @@ VideoCaptureBufferPoolImpl::GetHandleForInProcessAccess(int buffer_id) {
return tracker->GetMemoryMappedAccess();
}
-#if defined(OS_CHROMEOS)
gfx::GpuMemoryBufferHandle VideoCaptureBufferPoolImpl::GetGpuMemoryBufferHandle(
int buffer_id) {
base::AutoLock lock(lock_);
@@ -106,7 +110,6 @@ gfx::GpuMemoryBufferHandle VideoCaptureBufferPoolImpl::GetGpuMemoryBufferHandle(
return tracker->GetGpuMemoryBufferHandle();
}
-#endif
VideoCaptureDevice::Client::ReserveResult
VideoCaptureBufferPoolImpl::ReserveForProducer(
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.h b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
index 492fa090ec8..e56f86ce579 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
@@ -35,18 +35,14 @@ class CAPTURE_EXPORT VideoCaptureBufferPoolImpl
int count);
// VideoCaptureBufferPool implementation.
- mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit(
- int buffer_id,
- bool read_only) override;
- base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC(
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion(
int buffer_id) override;
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer(int buffer_id) override;
mojom::SharedMemoryViaRawFileDescriptorPtr
CreateSharedMemoryViaRawFileDescriptorStruct(int buffer_id) override;
std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess(
int buffer_id) override;
-#if defined(OS_CHROMEOS)
gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle(int buffer_id) override;
-#endif
VideoCaptureDevice::Client::ReserveResult ReserveForProducer(
const gfx::Size& dimensions,
VideoPixelFormat format,
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker.h b/chromium/media/capture/video/video_capture_buffer_tracker.h
index 3e7ee880908..19811b064b5 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker.h
@@ -7,17 +7,16 @@
#include <memory>
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/synchronization/lock.h"
#include "media/capture/mojom/video_capture_types.mojom.h"
#include "media/capture/video/video_capture_buffer_handle.h"
#include "media/capture/video_capture_types.h"
#include "mojo/public/cpp/system/buffer.h"
-#if defined(OS_CHROMEOS)
namespace gfx {
struct GpuMemoryBufferHandle;
}
-#endif
namespace media {
@@ -47,13 +46,10 @@ class CAPTURE_EXPORT VideoCaptureBufferTracker {
virtual uint32_t GetMemorySizeInBytes() = 0;
virtual std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() = 0;
- virtual mojo::ScopedSharedBufferHandle GetHandleForTransit(
- bool read_only) = 0;
- virtual base::SharedMemoryHandle
- GetNonOwnedSharedMemoryHandleForLegacyIPC() = 0;
-#if defined(OS_CHROMEOS)
+
+ virtual base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() = 0;
+ virtual mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() = 0;
virtual gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() = 0;
-#endif
private:
// Indicates whether this VideoCaptureBufferTracker is currently referenced by
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index e481fa64a91..2f0540bbd73 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -23,6 +23,7 @@
#include "base/files/file.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/single_thread_task_runner.h"
#include "base/time/time.h"
#include "build/build_config.h"
@@ -95,15 +96,19 @@ class CAPTURE_EXPORT VideoCaptureDevice
class CAPTURE_EXPORT HandleProvider {
public:
virtual ~HandleProvider() {}
- virtual mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit(
- bool read_only) = 0;
- virtual base::SharedMemoryHandle
- GetNonOwnedSharedMemoryHandleForLegacyIPC() = 0;
+
+ // Duplicate as an writable (unsafe) shared memory region.
+ virtual base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() = 0;
+
+ // Duplicate as a writable (unsafe) mojo buffer.
+ virtual mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() = 0;
+
+ // Access a |VideoCaptureBufferHandle| for local, writable memory.
virtual std::unique_ptr<VideoCaptureBufferHandle>
GetHandleForInProcessAccess() = 0;
-#if defined(OS_CHROMEOS)
+
+ // Clone a |GpuMemoryBufferHandle| for IPC.
virtual gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() = 0;
-#endif
};
Buffer();
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index 6f0013f9beb..1f056943965 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -31,11 +31,9 @@ namespace {
bool IsFormatSupported(media::VideoPixelFormat pixel_format) {
return (pixel_format == media::PIXEL_FORMAT_I420 ||
-#if defined(OS_CHROMEOS)
- // Used by GpuMemoryBuffer on Chrome OS.
+ // NV12 and MJPEG are used by GpuMemoryBuffer on Chrome OS.
pixel_format == media::PIXEL_FORMAT_NV12 ||
pixel_format == media::PIXEL_FORMAT_MJPEG ||
-#endif
pixel_format == media::PIXEL_FORMAT_Y16);
}
@@ -128,24 +126,19 @@ class BufferPoolBufferHandleProvider
int buffer_id)
: buffer_pool_(std::move(buffer_pool)), buffer_id_(buffer_id) {}
- // Implementation of HandleProvider:
- mojo::ScopedSharedBufferHandle GetHandleForInterProcessTransit(
- bool read_only) override {
- return buffer_pool_->GetHandleForInterProcessTransit(buffer_id_, read_only);
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override {
+ return buffer_pool_->DuplicateAsUnsafeRegion(buffer_id_);
}
- base::SharedMemoryHandle GetNonOwnedSharedMemoryHandleForLegacyIPC()
- override {
- return buffer_pool_->GetNonOwnedSharedMemoryHandleForLegacyIPC(buffer_id_);
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override {
+ return buffer_pool_->DuplicateAsMojoBuffer(buffer_id_);
+ }
+ gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override {
+ return buffer_pool_->GetGpuMemoryBufferHandle(buffer_id_);
}
std::unique_ptr<VideoCaptureBufferHandle> GetHandleForInProcessAccess()
override {
return buffer_pool_->GetHandleForInProcessAccess(buffer_id_);
}
-#if defined(OS_CHROMEOS)
- gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override {
- return buffer_pool_->GetGpuMemoryBufferHandle(buffer_id_);
- }
-#endif
private:
const scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
@@ -490,8 +483,7 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(const gfx::Size& frame_size,
switch (target_buffer_type_) {
case VideoCaptureBufferType::kSharedMemory:
buffer_handle->set_shared_buffer_handle(
- buffer_pool_->GetHandleForInterProcessTransit(buffer_id,
- true /*read_only*/));
+ buffer_pool_->DuplicateAsMojoBuffer(buffer_id));
break;
case VideoCaptureBufferType::kSharedMemoryViaRawFileDescriptor:
buffer_handle->set_shared_memory_via_raw_file_descriptor(
@@ -501,12 +493,10 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(const gfx::Size& frame_size,
case VideoCaptureBufferType::kMailboxHolder:
NOTREACHED();
break;
-#if defined(OS_CHROMEOS)
case VideoCaptureBufferType::kGpuMemoryBuffer:
buffer_handle->set_gpu_memory_buffer_handle(
buffer_pool_->GetGpuMemoryBufferHandle(buffer_id));
break;
-#endif
}
receiver_->OnNewBuffer(buffer_id, std::move(buffer_handle));
buffer_ids_known_by_receiver_.push_back(buffer_id);
diff --git a/chromium/media/capture/video/video_capture_device_factory.cc b/chromium/media/capture/video/video_capture_device_factory.cc
index 4d903b6a714..0170051832d 100644
--- a/chromium/media/capture/video/video_capture_device_factory.cc
+++ b/chromium/media/capture/video/video_capture_device_factory.cc
@@ -26,4 +26,10 @@ void VideoCaptureDeviceFactory::GetCameraLocationsAsync(
NOTIMPLEMENTED();
}
+#if defined(OS_CHROMEOS)
+bool VideoCaptureDeviceFactory::IsSupportedCameraAppDeviceBridge() {
+ return false;
+}
+#endif // defined(OS_CHROMEOS)
+
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index 57e8c87cb87..c1e17c7bc90 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -59,6 +59,10 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
std::unique_ptr<VideoCaptureDeviceDescriptors> device_descriptors,
DeviceDescriptorsCallback result_callback);
+#if defined(OS_CHROMEOS)
+ virtual bool IsSupportedCameraAppDeviceBridge();
+#endif // defined(OS_CHROMEOS)
+
protected:
base::ThreadChecker thread_checker_;
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index 72a28aa974e..a84bb11fe66 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -15,7 +15,7 @@
#include "base/memory/ref_counted.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -250,8 +250,7 @@ class VideoCaptureDeviceTest
#if defined(OS_MACOSX)
// Video capture code on MacOSX must run on a CFRunLoop enabled thread
// for interaction with AVFoundation.
- scoped_task_environment_(
- base::test::ScopedTaskEnvironment::MainThreadType::UI),
+ task_environment_(base::test::TaskEnvironment::MainThreadType::UI),
#endif
device_descriptors_(new VideoCaptureDeviceDescriptors()),
main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
@@ -462,7 +461,7 @@ class VideoCaptureDeviceTest
#if defined(OS_WIN)
base::win::ScopedCOMInitializer initialize_com_;
#endif
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<VideoCaptureDeviceDescriptors> device_descriptors_;
std::unique_ptr<base::RunLoop> run_loop_;
scoped_refptr<base::TaskRunner> main_thread_task_runner_;
diff --git a/chromium/media/capture/video/video_capture_system.h b/chromium/media/capture/video/video_capture_system.h
index c4cdaaf3c5b..016b8e2e517 100644
--- a/chromium/media/capture/video/video_capture_system.h
+++ b/chromium/media/capture/video/video_capture_system.h
@@ -8,10 +8,6 @@
#include "media/capture/video/video_capture_device_factory.h"
#include "media/capture/video/video_capture_device_info.h"
-#if defined(OS_CHROMEOS)
-#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
-#endif // defined(OS_CHROMEOS)
-
namespace media {
// GetDeviceInfosAsync() should be called at least once before calling
@@ -33,12 +29,6 @@ class CAPTURE_EXPORT VideoCaptureSystem {
// wrong.
virtual std::unique_ptr<VideoCaptureDevice> CreateDevice(
const std::string& device_id) = 0;
-
-#if defined(OS_CHROMEOS)
- // Pass the mojo request to bind with DeviceFactory for Chrome OS.
- virtual void BindCrosImageCaptureRequest(
- cros::mojom::CrosImageCaptureRequest request) = 0;
-#endif // defined(OS_CHROMEOS)
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_system_impl.cc b/chromium/media/capture/video/video_capture_system_impl.cc
index a0e69a9b7ed..aef3390a767 100644
--- a/chromium/media/capture/video/video_capture_system_impl.cc
+++ b/chromium/media/capture/video/video_capture_system_impl.cc
@@ -11,13 +11,6 @@
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
-#if defined(OS_CHROMEOS)
-#include "base/command_line.h"
-#include "media/base/media_switches.h"
-#include "media/capture/video/chromeos/public/cros_features.h"
-#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
-#endif // defined(OS_CHROMEOS)
-
namespace {
// Compares two VideoCaptureFormat by checking smallest frame_size area, then
@@ -165,18 +158,4 @@ void VideoCaptureSystemImpl::DeviceInfosReady(
ProcessDeviceInfoRequest();
}
-#if defined(OS_CHROMEOS)
-void VideoCaptureSystemImpl::BindCrosImageCaptureRequest(
- cros::mojom::CrosImageCaptureRequest request) {
- CHECK(factory_);
-
- if (media::ShouldUseCrosCameraService() &&
- !base::CommandLine::ForCurrentProcess()->HasSwitch(
- switches::kUseFakeDeviceForMediaStream)) {
- static_cast<VideoCaptureDeviceFactoryChromeOS*>(factory_.get())
- ->BindCrosImageCaptureRequest(std::move(request));
- }
-}
-#endif // defined(OS_CHROMEOS)
-
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_system_impl.h b/chromium/media/capture/video/video_capture_system_impl.h
index 0cbd5c88030..581979ae5a6 100644
--- a/chromium/media/capture/video/video_capture_system_impl.h
+++ b/chromium/media/capture/video/video_capture_system_impl.h
@@ -7,10 +7,6 @@
#include "media/capture/video/video_capture_system.h"
-#if defined(OS_CHROMEOS)
-#include "media/capture/video/chromeos/mojo/cros_image_capture.mojom.h"
-#endif // defined(OS_CHROMEOS)
-
namespace media {
// Layer on top of VideoCaptureDeviceFactory that translates device descriptors
@@ -26,11 +22,6 @@ class CAPTURE_EXPORT VideoCaptureSystemImpl : public VideoCaptureSystem {
std::unique_ptr<VideoCaptureDevice> CreateDevice(
const std::string& device_id) override;
-#if defined(OS_CHROMEOS)
- void BindCrosImageCaptureRequest(
- cros::mojom::CrosImageCaptureRequest request) override;
-#endif // defined(OS_CHROMEOS)
-
private:
using DeviceEnumQueue = std::list<DeviceInfoCallback>;
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index ec43c94c317..e843e15f074 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -92,7 +92,7 @@ const char* const kModelIdsBlacklistedForMediaFoundation[] = {
// also https://crbug.com/924528
"04ca:7047", "04ca:7048",
// HP Elitebook 840 G1
- "04f2:b3ed", "04f2:b3ca", "05c8:035d",
+ "04f2:b3ed", "04f2:b3ca", "05c8:035d", "05c8:0369",
// RBG/IR camera for Windows Hello Face Auth. See https://crbug.com/984864.
"13d3:5257"};
@@ -360,8 +360,7 @@ bool VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation() {
VideoCaptureDeviceFactoryWin::VideoCaptureDeviceFactoryWin()
: use_media_foundation_(
base::FeatureList::IsEnabled(media::kMediaFoundationVideoCapture)),
- com_thread_("Windows Video Capture COM Thread"),
- weak_ptr_factory_(this) {
+ com_thread_("Windows Video Capture COM Thread") {
mf_enum_device_sources_func_ =
PlatformSupportsMediaFoundation() ? MFEnumDeviceSources : nullptr;
direct_show_enum_devices_func_ =
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.h b/chromium/media/capture/video/win/video_capture_device_factory_win.h
index 3248a0cf37d..5d2cf9cf571 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.h
@@ -107,7 +107,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactoryWin
base::Thread com_thread_;
scoped_refptr<base::SingleThreadTaskRunner> origin_task_runner_;
std::unordered_set<IAsyncOperation<DeviceInformationCollection*>*> async_ops_;
- base::WeakPtrFactory<VideoCaptureDeviceFactoryWin> weak_ptr_factory_;
+ base::WeakPtrFactory<VideoCaptureDeviceFactoryWin> weak_ptr_factory_{this};
DISALLOW_COPY_AND_ASSIGN(VideoCaptureDeviceFactoryWin);
};
diff --git a/chromium/media/capture/video_capture_types.h b/chromium/media/capture/video_capture_types.h
index 7610db4f52b..98fca4a9bd0 100644
--- a/chromium/media/capture/video_capture_types.h
+++ b/chromium/media/capture/video_capture_types.h
@@ -9,6 +9,7 @@
#include <vector>
+#include "base/unguessable_token.h"
#include "build/build_config.h"
#include "media/base/video_types.h"
#include "media/capture/capture_export.h"
@@ -18,7 +19,7 @@ namespace media {
// TODO(wjia): this type should be defined in a common place and
// shared with device manager.
-typedef int VideoCaptureSessionId;
+using VideoCaptureSessionId = base::UnguessableToken;
// Policies for capture devices that have source content that varies in size.
// It is up to the implementation how the captured content will be transformed
@@ -58,9 +59,7 @@ enum class VideoCaptureBufferType {
kSharedMemory,
kSharedMemoryViaRawFileDescriptor,
kMailboxHolder,
-#if defined(OS_CHROMEOS)
kGpuMemoryBuffer
-#endif
};
// WARNING: Do not change the values assigned to the entries. They are used for
diff --git a/chromium/media/cast/cast_config.h b/chromium/media/cast/cast_config.h
index 0e32a8f8e72..f01e9a1d2b6 100644
--- a/chromium/media/cast/cast_config.h
+++ b/chromium/media/cast/cast_config.h
@@ -12,7 +12,7 @@
#include "base/callback.h"
#include "base/memory/ref_counted.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/single_thread_task_runner.h"
#include "base/time/time.h"
@@ -251,7 +251,7 @@ typedef base::Callback<void(scoped_refptr<base::SingleThreadTaskRunner>,
ReceiveVideoEncodeAcceleratorCallback;
typedef base::Callback<void(const ReceiveVideoEncodeAcceleratorCallback&)>
CreateVideoEncodeAcceleratorCallback;
-typedef base::Callback<void(std::unique_ptr<base::SharedMemory>)>
+typedef base::Callback<void(base::UnsafeSharedMemoryRegion)>
ReceiveVideoEncodeMemoryCallback;
typedef base::Callback<void(size_t size,
const ReceiveVideoEncodeMemoryCallback&)>
diff --git a/chromium/media/cast/net/udp_packet_pipe_unittest.cc b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
index 1bdae8a6632..b5a209a4030 100644
--- a/chromium/media/cast/net/udp_packet_pipe_unittest.cc
+++ b/chromium/media/cast/net/udp_packet_pipe_unittest.cc
@@ -12,7 +12,7 @@
#include "base/containers/circular_deque.h"
#include "base/macros.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
@@ -39,7 +39,7 @@ class UdpPacketPipeTest : public ::testing::Test {
}
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<UdpPacketPipeWriter> writer_;
std::unique_ptr<UdpPacketPipeReader> reader_;
base::circular_deque<std::unique_ptr<Packet>> packets_read_;
@@ -59,7 +59,7 @@ TEST_F(UdpPacketPipeTest, Normal) {
EXPECT_CALL(done_callback, Run()).Times(1);
writer_->Write(new base::RefCountedData<Packet>(packet1),
done_callback.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// |packet2| can not be completely written in the data pipe due to capacity
// limit.
@@ -67,7 +67,7 @@ TEST_F(UdpPacketPipeTest, Normal) {
EXPECT_CALL(done_callback2, Run()).Times(0);
writer_->Write(new base::RefCountedData<Packet>(packet2),
done_callback2.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
testing::Mock::VerifyAndClearExpectations(&done_callback2);
EXPECT_TRUE(packets_read_.empty());
@@ -75,7 +75,7 @@ TEST_F(UdpPacketPipeTest, Normal) {
EXPECT_CALL(done_callback2, Run()).Times(1);
reader_->Read(
base::BindOnce(&UdpPacketPipeTest::OnPacketRead, base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(1u, packets_read_.size());
EXPECT_EQ(0, std::memcmp(packet1.data(), packets_read_.front()->data(),
packet1.size()));
@@ -84,7 +84,7 @@ TEST_F(UdpPacketPipeTest, Normal) {
// Reads |packet2| from the pipe.
reader_->Read(
base::BindOnce(&UdpPacketPipeTest::OnPacketRead, base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(1u, packets_read_.size());
EXPECT_EQ(0, std::memcmp(packet2.data(), packets_read_.front()->data(),
packet2.size()));
diff --git a/chromium/media/cast/net/udp_transport_unittest.cc b/chromium/media/cast/net/udp_transport_unittest.cc
index 50775a44aef..54c2a98b1d6 100644
--- a/chromium/media/cast/net/udp_transport_unittest.cc
+++ b/chromium/media/cast/net/udp_transport_unittest.cc
@@ -13,7 +13,7 @@
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/cast/net/cast_transport_config.h"
#include "media/cast/net/udp_packet_pipe.h"
#include "media/cast/test/utility/net_utility.h"
@@ -71,18 +71,17 @@ static void UpdateCastTransportStatus(CastTransportStatus status) {
class UdpTransportImplTest : public ::testing::Test {
public:
UdpTransportImplTest()
- : scoped_task_environment_(
- base::test::ScopedTaskEnvironment::MainThreadType::IO) {
+ : task_environment_(base::test::TaskEnvironment::MainThreadType::IO) {
net::IPEndPoint free_local_port1 = test::GetFreeLocalPort();
net::IPEndPoint free_local_port2 = test::GetFreeLocalPort();
send_transport_ = std::make_unique<UdpTransportImpl>(
- scoped_task_environment_.GetMainThreadTaskRunner(), free_local_port1,
+ task_environment_.GetMainThreadTaskRunner(), free_local_port1,
free_local_port2, base::BindRepeating(&UpdateCastTransportStatus));
send_transport_->SetSendBufferSize(65536);
recv_transport_ = std::make_unique<UdpTransportImpl>(
- scoped_task_environment_.GetMainThreadTaskRunner(), free_local_port2,
+ task_environment_.GetMainThreadTaskRunner(), free_local_port2,
free_local_port1, base::BindRepeating(&UpdateCastTransportStatus));
recv_transport_->SetSendBufferSize(65536);
}
@@ -90,7 +89,7 @@ class UdpTransportImplTest : public ::testing::Test {
~UdpTransportImplTest() override = default;
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<UdpTransportImpl> send_transport_;
diff --git a/chromium/media/cast/sender/external_video_encoder.cc b/chromium/media/cast/sender/external_video_encoder.cc
index 3c72dd5f3c8..d3f644e142d 100644
--- a/chromium/media/cast/sender/external_video_encoder.cc
+++ b/chromium/media/cast/sender/external_video_encoder.cc
@@ -10,7 +10,8 @@
#include "base/bind.h"
#include "base/command_line.h"
#include "base/logging.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/shared_memory_mapping.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/metrics/histogram_macros.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
@@ -191,32 +192,39 @@ class ExternalVideoEncoder::VEAClientImpl
// Copy the |video_frame| into the input buffer provided by the VEA
// implementation, and with the exact row stride required. Note that, even
// if |video_frame|'s stride matches VEA's requirement, |video_frame|'s
- // memory backing (heap, base::ReadOnlySharedMemoryRegion, etc.) could be
- // something VEA can't handle (as of this writing, it uses the legacy shmem
- // API). http://crbug.com/888153
+ // memory backing (heap, base::UnsafeSharedMemoryRegion, etc.) could be
+ // something VEA can't handle (as of this writing, it expects an unsafe
+ // region).
//
// TODO(crbug.com/888829): Revisit whether we can remove this memcpy, if VEA
// can accept other "memory backing" methods.
- const int index = free_input_buffer_index_.back();
- base::SharedMemory* const input_buffer = input_buffers_[index].get();
- scoped_refptr<media::VideoFrame> frame =
- VideoFrame::WrapExternalSharedMemory(
- video_frame->format(), frame_coded_size_,
- video_frame->visible_rect(), video_frame->visible_rect().size(),
- static_cast<uint8_t*>(input_buffer->memory()),
- input_buffer->mapped_size(), input_buffer->handle(), 0,
- video_frame->timestamp());
- if (!frame || !media::I420CopyWithPadding(*video_frame, frame.get())) {
- LOG(DFATAL) << "Error: ExternalVideoEncoder: copy failed.";
- AbortLatestEncodeAttemptDueToErrors();
- return;
- }
-
- frame->AddDestructionObserver(media::BindToCurrentLoop(base::Bind(
- &ExternalVideoEncoder::VEAClientImpl::ReturnInputBufferToPool, this,
- index)));
- free_input_buffer_index_.pop_back();
+ scoped_refptr<media::VideoFrame> frame = video_frame;
+ if (video_frame->coded_size() != frame_coded_size_ ||
+ video_frame->storage_type() !=
+ media::VideoFrame::StorageType::STORAGE_SHMEM) {
+ const int index = free_input_buffer_index_.back();
+ std::pair<base::UnsafeSharedMemoryRegion,
+ base::WritableSharedMemoryMapping>* input_buffer =
+ input_buffers_[index].get();
+ DCHECK(input_buffer->first.IsValid());
+ DCHECK(input_buffer->second.IsValid());
+ frame = VideoFrame::WrapExternalData(
+ video_frame->format(), frame_coded_size_, video_frame->visible_rect(),
+ video_frame->visible_rect().size(),
+ input_buffer->second.GetMemoryAsSpan<uint8_t>().data(),
+ input_buffer->second.size(), video_frame->timestamp());
+ if (!frame || !media::I420CopyWithPadding(*video_frame, frame.get())) {
+ LOG(DFATAL) << "Error: ExternalVideoEncoder: copy failed.";
+ AbortLatestEncodeAttemptDueToErrors();
+ return;
+ }
+ frame->BackWithSharedMemory(&input_buffer->first);
+ frame->AddDestructionObserver(media::BindToCurrentLoop(base::Bind(
+ &ExternalVideoEncoder::VEAClientImpl::ReturnInputBufferToPool, this,
+ index)));
+ free_input_buffer_index_.pop_back();
+ }
// BitstreamBufferReady will be called once the encoder is done.
video_encode_accelerator_->Encode(std::move(frame), key_frame_requested);
}
@@ -270,9 +278,11 @@ class ExternalVideoEncoder::VEAClientImpl
NotifyError(media::VideoEncodeAccelerator::kPlatformFailureError);
return;
}
- base::SharedMemory* output_buffer =
- output_buffers_[bitstream_buffer_id].get();
- if (metadata.payload_size_bytes > output_buffer->mapped_size()) {
+ const char* output_buffer_memory = output_buffers_[bitstream_buffer_id]
+ .second.GetMemoryAsSpan<char>()
+ .data();
+ if (metadata.payload_size_bytes >
+ output_buffers_[bitstream_buffer_id].second.size()) {
NOTREACHED();
VLOG(1) << "BitstreamBufferReady(): invalid payload_size = "
<< metadata.payload_size_bytes;
@@ -288,8 +298,7 @@ class ExternalVideoEncoder::VEAClientImpl
//
// TODO(miu): Should |stream_header_| be an std::ostringstream for
// performance reasons?
- stream_header_.append(static_cast<const char*>(output_buffer->memory()),
- metadata.payload_size_bytes);
+ stream_header_.append(output_buffer_memory, metadata.payload_size_bytes);
} else if (!in_progress_frame_encodes_.empty()) {
const InProgressExternalVideoFrameEncode& request =
in_progress_frame_encodes_.front();
@@ -310,9 +319,8 @@ class ExternalVideoEncoder::VEAClientImpl
encoded_frame->data = stream_header_;
stream_header_.clear();
}
- encoded_frame->data.append(
- static_cast<const char*>(output_buffer->memory()),
- metadata.payload_size_bytes);
+ encoded_frame->data.append(output_buffer_memory,
+ metadata.payload_size_bytes);
DCHECK(!encoded_frame->data.empty()) << "BUG: Encoder must provide data.";
// If FRAME_DURATION metadata was provided in the source VideoFrame,
@@ -408,9 +416,8 @@ class ExternalVideoEncoder::VEAClientImpl
video_encode_accelerator_->UseOutputBitstreamBuffer(
media::BitstreamBuffer(
bitstream_buffer_id,
- output_buffers_[bitstream_buffer_id]->handle(),
- false /* read_only */,
- output_buffers_[bitstream_buffer_id]->mapped_size()));
+ output_buffers_[bitstream_buffer_id].first.Duplicate(),
+ output_buffers_[bitstream_buffer_id].first.GetSize()));
}
}
@@ -430,22 +437,25 @@ class ExternalVideoEncoder::VEAClientImpl
}
// Note: This method can be called on any thread.
- void OnCreateSharedMemory(std::unique_ptr<base::SharedMemory> memory) {
+ void OnCreateSharedMemory(base::UnsafeSharedMemoryRegion memory) {
task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VEAClientImpl::OnReceivedSharedMemory, this,
std::move(memory)));
}
- void OnCreateInputSharedMemory(std::unique_ptr<base::SharedMemory> memory) {
+ void OnCreateInputSharedMemory(base::UnsafeSharedMemoryRegion memory) {
task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VEAClientImpl::OnReceivedInputSharedMemory,
this, std::move(memory)));
}
- void OnReceivedSharedMemory(std::unique_ptr<base::SharedMemory> memory) {
+ void OnReceivedSharedMemory(base::UnsafeSharedMemoryRegion memory) {
DCHECK(task_runner_->RunsTasksInCurrentSequence());
- output_buffers_.push_back(std::move(memory));
+ base::WritableSharedMemoryMapping mapping = memory.Map();
+ DCHECK(mapping.IsValid());
+ output_buffers_.push_back(
+ std::make_pair(std::move(memory), std::move(mapping)));
// Wait until all requested buffers are received.
if (output_buffers_.size() < kOutputBufferCount)
@@ -454,17 +464,22 @@ class ExternalVideoEncoder::VEAClientImpl
// Immediately provide all output buffers to the VEA.
for (size_t i = 0; i < output_buffers_.size(); ++i) {
video_encode_accelerator_->UseOutputBitstreamBuffer(
- media::BitstreamBuffer(
- static_cast<int32_t>(i), output_buffers_[i]->handle(),
- false /* read_only */, output_buffers_[i]->mapped_size()));
+ media::BitstreamBuffer(static_cast<int32_t>(i),
+ output_buffers_[i].first.Duplicate(),
+ output_buffers_[i].first.GetSize()));
}
}
- void OnReceivedInputSharedMemory(std::unique_ptr<base::SharedMemory> memory) {
+ void OnReceivedInputSharedMemory(base::UnsafeSharedMemoryRegion region) {
DCHECK(task_runner_->RunsTasksInCurrentSequence());
- if (memory.get()) {
- input_buffers_.push_back(std::move(memory));
+ if (region.IsValid()) {
+ base::WritableSharedMemoryMapping mapping = region.Map();
+ DCHECK(mapping.IsValid());
+ input_buffers_.push_back(
+ std::make_unique<std::pair<base::UnsafeSharedMemoryRegion,
+ base::WritableSharedMemoryMapping>>(
+ std::move(region), std::move(mapping)));
free_input_buffer_index_.push_back(input_buffers_.size() - 1);
}
allocate_input_buffer_in_progress_ = false;
@@ -559,13 +574,19 @@ class ExternalVideoEncoder::VEAClientImpl
H264Parser h264_parser_;
// Shared memory buffers for output with the VideoAccelerator.
- std::vector<std::unique_ptr<base::SharedMemory>> output_buffers_;
+ std::vector<std::pair<base::UnsafeSharedMemoryRegion,
+ base::WritableSharedMemoryMapping>>
+ output_buffers_;
// Shared memory buffers for input video frames with the VideoAccelerator.
// These buffers will be allocated only when copy is needed to match the
// required coded size for encoder. They are allocated on-demand, up to
- // |max_allowed_input_buffers_|.
- std::vector<std::unique_ptr<base::SharedMemory>> input_buffers_;
+ // |max_allowed_input_buffers_|. A VideoFrame wrapping the region will point
+ // to it, so std::unique_ptr is used to ensure the region has a stable address
+ // even if the vector grows or shrinks.
+ std::vector<std::unique_ptr<std::pair<base::UnsafeSharedMemoryRegion,
+ base::WritableSharedMemoryMapping>>>
+ input_buffers_;
// Available input buffer index. These buffers are used in FILO order.
std::vector<int> free_input_buffer_index_;
diff --git a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc
index c766742f81f..ed8a56f2d3e 100644
--- a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc
+++ b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.cc
@@ -17,9 +17,7 @@ FakeVideoEncodeAcceleratorFactory::FakeVideoEncodeAcceleratorFactory(
will_init_succeed_(true),
auto_respond_(false),
vea_response_count_(0),
- shm_response_count_(0),
- last_response_vea_(nullptr),
- last_response_shm_(nullptr) {}
+ shm_response_count_(0) {}
FakeVideoEncodeAcceleratorFactory::~FakeVideoEncodeAcceleratorFactory() =
default;
@@ -56,10 +54,9 @@ void FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator(
void FakeVideoEncodeAcceleratorFactory::CreateSharedMemory(
size_t size, const ReceiveVideoEncodeMemoryCallback& callback) {
DCHECK(!callback.is_null());
- DCHECK(!next_response_shm_);
+ DCHECK(!next_response_shm_.IsValid());
- next_response_shm_.reset(new base::SharedMemory());
- CHECK(next_response_shm_->CreateAndMapAnonymous(size));
+ next_response_shm_ = base::UnsafeSharedMemoryRegion::Create(size);
shm_response_callback_ = callback;
if (auto_respond_)
RespondWithSharedMemory();
@@ -67,15 +64,13 @@ void FakeVideoEncodeAcceleratorFactory::CreateSharedMemory(
void FakeVideoEncodeAcceleratorFactory::RespondWithVideoEncodeAccelerator() {
DCHECK(next_response_vea_.get());
- last_response_vea_ = next_response_vea_.get();
++vea_response_count_;
std::move(vea_response_callback_)
.Run(task_runner_, std::move(next_response_vea_));
}
void FakeVideoEncodeAcceleratorFactory::RespondWithSharedMemory() {
- DCHECK(next_response_shm_.get());
- last_response_shm_ = next_response_shm_.get();
+ DCHECK(next_response_shm_.IsValid());
++shm_response_count_;
std::move(shm_response_callback_).Run(std::move(next_response_shm_));
}
diff --git a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
index f7eee4976cf..238ec163beb 100644
--- a/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
+++ b/chromium/media/cast/sender/fake_video_encode_accelerator_factory.h
@@ -12,7 +12,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
-#include "base/memory/shared_memory.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/single_thread_task_runner.h"
#include "media/cast/cast_config.h"
#include "media/video/fake_video_encode_accelerator.h"
@@ -35,16 +35,6 @@ class FakeVideoEncodeAcceleratorFactory {
return shm_response_count_;
}
- // These return the instance last responded. It is up to the caller to
- // determine whether the pointer is still valid, since this factory does not
- // own these objects anymore.
- media::FakeVideoEncodeAccelerator* last_response_vea() const {
- return static_cast<media::FakeVideoEncodeAccelerator*>(last_response_vea_);
- }
- base::SharedMemory* last_response_shm() const {
- return last_response_shm_;
- }
-
// Set whether the next created media::FakeVideoEncodeAccelerator will
// initialize successfully.
void SetInitializationWillSucceed(bool will_init_succeed);
@@ -69,7 +59,7 @@ class FakeVideoEncodeAcceleratorFactory {
void RespondWithVideoEncodeAccelerator();
// Runs the |callback| provided to the last call to
- // CreateSharedMemory() with the new base::SharedMemory instance.
+ // CreateSharedMemory() with the new base::UnsafeSharedMemoryRegion instance.
void RespondWithSharedMemory();
private:
@@ -78,12 +68,10 @@ class FakeVideoEncodeAcceleratorFactory {
bool auto_respond_;
std::unique_ptr<media::VideoEncodeAccelerator> next_response_vea_;
ReceiveVideoEncodeAcceleratorCallback vea_response_callback_;
- std::unique_ptr<base::SharedMemory> next_response_shm_;
+ base::UnsafeSharedMemoryRegion next_response_shm_;
ReceiveVideoEncodeMemoryCallback shm_response_callback_;
int vea_response_count_;
int shm_response_count_;
- media::VideoEncodeAccelerator* last_response_vea_;
- base::SharedMemory* last_response_shm_;
DISALLOW_COPY_AND_ASSIGN(FakeVideoEncodeAcceleratorFactory);
};
diff --git a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
index db064becda9..f93238fb391 100644
--- a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
+++ b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
@@ -13,8 +13,8 @@
#include "base/run_loop.h"
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/power_monitor_test_base.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/test/test_suite.h"
#include "media/base/cdm_context.h"
#include "media/base/decoder_buffer.h"
@@ -215,9 +215,9 @@ class H264VideoToolboxEncoderTest : public ::testing::Test {
std::unique_ptr<TestPowerSource>(power_source_));
cast_environment_ = new CastEnvironment(
- &clock_, scoped_task_environment_.GetMainThreadTaskRunner(),
- scoped_task_environment_.GetMainThreadTaskRunner(),
- scoped_task_environment_.GetMainThreadTaskRunner());
+ &clock_, task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner());
encoder_.reset(new H264VideoToolboxEncoder(
cast_environment_, video_sender_config_,
base::Bind(&SaveOperationalStatus, &operational_status_)));
@@ -253,7 +253,7 @@ class H264VideoToolboxEncoderTest : public ::testing::Test {
static FrameSenderConfig video_sender_config_;
base::SimpleTestTickClock clock_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
scoped_refptr<CastEnvironment> cast_environment_;
std::unique_ptr<VideoEncoder> encoder_;
OperationalStatus operational_status_;
diff --git a/chromium/media/cdm/BUILD.gn b/chromium/media/cdm/BUILD.gn
index 0eceda9cc87..f1467a48339 100644
--- a/chromium/media/cdm/BUILD.gn
+++ b/chromium/media/cdm/BUILD.gn
@@ -104,13 +104,6 @@ source_set("cdm") {
]
}
}
-
- if (is_fuchsia) {
- sources += [
- "fuchsia/fuchsia_cdm_factory.cc",
- "fuchsia/fuchsia_cdm_factory.h",
- ]
- }
}
static_library("cdm_paths") {
@@ -120,6 +113,7 @@ static_library("cdm_paths") {
]
deps = [
"//base",
+ "//media:media_buildflags",
]
}
@@ -155,6 +149,7 @@ source_set("unit_tests") {
if (enable_library_cdms) {
sources += [
"cdm_adapter_unittest.cc",
+ "cdm_paths_unittest.cc",
"external_clear_key_test_helper.cc",
"external_clear_key_test_helper.h",
"mock_helpers.cc",
diff --git a/chromium/media/cdm/aes_decryptor_unittest.cc b/chromium/media/cdm/aes_decryptor_unittest.cc
index 0664e40a707..304baa91f4e 100644
--- a/chromium/media/cdm/aes_decryptor_unittest.cc
+++ b/chromium/media/cdm/aes_decryptor_unittest.cc
@@ -16,7 +16,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/test/scoped_feature_list.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/values.h"
#include "media/base/cdm_callback_promise.h"
#include "media/base/cdm_config.h"
@@ -493,7 +493,7 @@ class AesDecryptorTest : public testing::TestWithParam<TestType> {
}
// Must be the first member to be initialized first and destroyed last.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<MockCdmClient> cdm_client_;
scoped_refptr<ContentDecryptionModule> cdm_;
diff --git a/chromium/media/cdm/cdm_adapter_unittest.cc b/chromium/media/cdm/cdm_adapter_unittest.cc
index 39f4b7e039b..037e776ec5e 100644
--- a/chromium/media/cdm/cdm_adapter_unittest.cc
+++ b/chromium/media/cdm/cdm_adapter_unittest.cc
@@ -13,7 +13,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/cdm_callback_promise.h"
#include "media/base/cdm_key_information.h"
#include "media/base/content_decryption_module.h"
@@ -177,7 +177,7 @@ class CdmAdapterTestBase : public testing::Test,
}
}
- void RunUntilIdle() { scoped_task_environment_.RunUntilIdle(); }
+ void RunUntilIdle() { task_environment_.RunUntilIdle(); }
StrictMock<MockCdmClient> cdm_client_;
StrictMock<MockCdmAuxiliaryHelper>* cdm_helper_ = nullptr;
@@ -185,7 +185,7 @@ class CdmAdapterTestBase : public testing::Test,
// Keep track of the loaded CDM.
scoped_refptr<ContentDecryptionModule> cdm_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
private:
DISALLOW_COPY_AND_ASSIGN(CdmAdapterTestBase);
diff --git a/chromium/media/cdm/cdm_paths.cc b/chromium/media/cdm/cdm_paths.cc
index d68501255f3..7414d71fca6 100644
--- a/chromium/media/cdm/cdm_paths.cc
+++ b/chromium/media/cdm/cdm_paths.cc
@@ -6,7 +6,7 @@
#include <string>
-#include "build/build_config.h"
+#include "media/media_buildflags.h"
namespace media {
@@ -24,60 +24,16 @@ const base::Token kClearKeyCdmDifferentGuid{0xc3914773474bdb02ull,
// this ID is based on the pepper plugin MIME type.
const char kClearKeyCdmFileSystemId[] = "application_x-ppapi-clearkey-cdm";
-// Note: This file must be in sync with cdm_paths.gni.
-// TODO(xhwang): Improve how we enable platform specific path. See
-// http://crbug.com/468584
-#if (defined(OS_MACOSX) || defined(OS_WIN)) && \
- (defined(ARCH_CPU_X86) || defined(ARCH_CPU_X86_64))
-#define CDM_USE_PLATFORM_SPECIFIC_PATH
-#endif
-
-#if defined(CDM_USE_PLATFORM_SPECIFIC_PATH)
-
-// Special path used in chrome components.
-const char kPlatformSpecific[] = "_platform_specific";
-
-// Name of the component platform in the manifest.
-const char kComponentPlatform[] =
-#if defined(OS_MACOSX)
- "mac";
-#elif defined(OS_WIN)
- "win";
-#elif defined(OS_CHROMEOS)
- "cros";
-#elif defined(OS_LINUX)
- "linux";
-#else
- "unsupported_platform";
-#endif
-
-// Name of the component architecture in the manifest.
-const char kComponentArch[] =
-#if defined(ARCH_CPU_X86)
- "x86";
-#elif defined(ARCH_CPU_X86_64)
- "x64";
-#elif defined(ARCH_CPU_ARMEL)
- "arm";
-#else
- "unsupported_arch";
-#endif
-
base::FilePath GetPlatformSpecificDirectory(const std::string& cdm_base_path) {
- base::FilePath path;
- const std::string kPlatformArch =
- std::string(kComponentPlatform) + "_" + kComponentArch;
- return path.AppendASCII(cdm_base_path)
- .AppendASCII(kPlatformSpecific)
- .AppendASCII(kPlatformArch);
-}
-
-#else // defined(CDM_USE_PLATFORM_SPECIFIC_PATH)
+ // CDM_PLATFORM_SPECIFIC_PATH is specified in cdm_paths.gni.
+ const std::string kPlatformSpecific = BUILDFLAG(CDM_PLATFORM_SPECIFIC_PATH);
+ if (kPlatformSpecific.empty())
+ return base::FilePath();
-base::FilePath GetPlatformSpecificDirectory(const std::string& cdm_base_path) {
- return base::FilePath();
+ return base::FilePath()
+ .AppendASCII(cdm_base_path)
+ .AppendASCII(kPlatformSpecific)
+ .NormalizePathSeparators();
}
-#endif // defined(CDM_USE_PLATFORM_SPECIFIC_PATH)
-
} // namespace media
diff --git a/chromium/media/cdm/cdm_paths_unittest.cc b/chromium/media/cdm/cdm_paths_unittest.cc
new file mode 100644
index 00000000000..707993c2748
--- /dev/null
+++ b/chromium/media/cdm/cdm_paths_unittest.cc
@@ -0,0 +1,110 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/cdm/cdm_paths.h"
+
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "build/build_config.h"
+#include "media/media_buildflags.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+// Only verify platform specific path on some platforms.
+// TODO(crbug.com/971433). Move the CDMs out of the install directory on
+// ChromeOS.
+#if (defined(OS_MACOSX) || defined(OS_WIN) || \
+ (defined(OS_LINUX) && !defined(OS_CHROMEOS))) && \
+ (defined(ARCH_CPU_X86) || defined(ARCH_CPU_X86_64))
+#define CDM_USE_PLATFORM_SPECIFIC_PATH
+#endif
+
+namespace media {
+
+namespace {
+
+#if defined(CDM_USE_PLATFORM_SPECIFIC_PATH)
+
+// Special path used in chrome components.
+const char kPlatformSpecific[] = "_platform_specific";
+
+// Name of the component platform.
+const char kComponentPlatform[] =
+#if defined(OS_MACOSX)
+ "mac";
+#elif defined(OS_WIN)
+ "win";
+#elif defined(OS_CHROMEOS)
+ "cros";
+#elif defined(OS_LINUX)
+ "linux";
+#else
+ "unsupported_platform";
+#endif
+
+// Name of the component architecture.
+const char kComponentArch[] =
+#if defined(ARCH_CPU_X86)
+ "x86";
+#elif defined(ARCH_CPU_X86_64)
+ "x64";
+#elif defined(ARCH_CPU_ARMEL)
+ "arm";
+#else
+ "unsupported_arch";
+#endif
+
+base::FilePath GetExpectedPlatformSpecificDirectory(
+ const std::string& base_path) {
+ base::FilePath path;
+ const std::string kPlatformArch =
+ std::string(kComponentPlatform) + "_" + kComponentArch;
+ return path.AppendASCII(base_path)
+ .AppendASCII(kPlatformSpecific)
+ .AppendASCII(kPlatformArch);
+}
+
+#else
+
+// If the CDM is not a component, it has no platform specific path.
+base::FilePath GetExpectedPlatformSpecificDirectory(
+ const std::string& base_path) {
+ return base::FilePath();
+}
+
+#endif // defined(CDM_USE_PLATFORM_SPECIFIC_PATH)
+
+std::string GetFlag() {
+ return BUILDFLAG(CDM_PLATFORM_SPECIFIC_PATH);
+}
+
+} // namespace
+
+TEST(CdmPathsTest, FlagSpecified) {
+#if defined(CDM_USE_PLATFORM_SPECIFIC_PATH)
+ EXPECT_FALSE(GetFlag().empty());
+#else
+ EXPECT_TRUE(GetFlag().empty());
+#endif
+}
+
+TEST(CdmPathsTest, Prefix) {
+ const char kPrefix[] = "prefix";
+ auto path = GetPlatformSpecificDirectory(kPrefix);
+
+#if defined(CDM_USE_PLATFORM_SPECIFIC_PATH)
+ EXPECT_TRUE(base::StartsWith(path.MaybeAsASCII(), kPrefix,
+ base::CompareCase::SENSITIVE));
+#else
+ EXPECT_TRUE(path.MaybeAsASCII().empty());
+#endif
+}
+
+TEST(CdmPathsTest, Expected) {
+ const char kPrefix[] = "cdm";
+ EXPECT_EQ(GetExpectedPlatformSpecificDirectory(kPrefix),
+ GetPlatformSpecificDirectory(kPrefix));
+}
+
+} // namespace media
diff --git a/chromium/media/cdm/external_clear_key_test_helper.cc b/chromium/media/cdm/external_clear_key_test_helper.cc
index b34ef3e4b87..d0a495cc387 100644
--- a/chromium/media/cdm/external_clear_key_test_helper.cc
+++ b/chromium/media/cdm/external_clear_key_test_helper.cc
@@ -9,6 +9,7 @@
#include "base/files/file_util.h"
#include "base/native_library.h"
#include "base/path_service.h"
+#include "build/build_config.h"
#include "media/cdm/api/content_decryption_module.h"
#include "media/cdm/cdm_paths.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -30,6 +31,10 @@ ExternalClearKeyTestHelper::~ExternalClearKeyTestHelper() {
}
void ExternalClearKeyTestHelper::LoadLibrary() {
+#if defined(OS_FUCHSIA)
+ library_path_ =
+ base::FilePath(base::GetLoadableModuleName(kClearKeyCdmLibraryName));
+#else // defined(OS_FUCHSIA)
// Determine the location of the CDM. It is expected to be in the same
// directory as the current module.
base::FilePath cdm_base_path;
@@ -39,6 +44,7 @@ void ExternalClearKeyTestHelper::LoadLibrary() {
library_path_ = cdm_base_path.AppendASCII(
base::GetLoadableModuleName(kClearKeyCdmLibraryName));
ASSERT_TRUE(base::PathExists(library_path_)) << library_path_.value();
+#endif // defined(OS_FUCHSIA)
// Now load the CDM library.
library_ = base::ScopedNativeLibrary(library_path_);
diff --git a/chromium/media/cdm/library_cdm/cdm_paths.gni b/chromium/media/cdm/library_cdm/cdm_paths.gni
index f8f94b7a785..59781c3288f 100644
--- a/chromium/media/cdm/library_cdm/cdm_paths.gni
+++ b/chromium/media/cdm/library_cdm/cdm_paths.gni
@@ -7,8 +7,6 @@
# Naming and folder structure below are following the recommendation for Chrome
# components. Component-updated CDMs must follow the same recommendation.
-# Note: This file must be in sync with cdm_paths.cc
-
# OS name for components is close to "target_os" but has some differences.
# Explicitly define what we use to avoid confusion.
if (is_chromeos) {
@@ -29,18 +27,24 @@ if (current_cpu == "x86" || current_cpu == "x64" || current_cpu == "arm") {
component_arch = "unsupported_arch"
}
-# Only enable platform specific path for Win and Mac, where CDMs are Chrome
-# components.
-# TODO(xhwang): Improve how we enable platform specific path. See
-# http://crbug.com/468584
-if ((is_win || is_mac) && (current_cpu == "x86" || current_cpu == "x64")) {
- _platform_specific_path =
+# Enable platform specific paths. This is required when the CDMs are Chrome
+# components, but is optional for other platforms.
+# Note: |cdm_platform_specific_path| is exported as a BUILDFLAG to
+# cdm_paths.cc.
+if ((is_win || is_mac || is_desktop_linux) &&
+ (current_cpu == "x86" || current_cpu == "x64")) {
+ cdm_platform_specific_path =
"_platform_specific/$component_os" + "_" + "$component_arch"
# Path of Clear Key and Widevine CDMs relative to the output dir.
- clearkey_cdm_path = "ClearKeyCdm/$_platform_specific_path"
- widevine_cdm_path = "WidevineCdm/$_platform_specific_path"
+ clearkey_cdm_path = "ClearKeyCdm/$cdm_platform_specific_path"
+ widevine_cdm_path = "WidevineCdm/$cdm_platform_specific_path"
+} else if (is_fuchsia) {
+ cdm_platform_specific_path = ""
+ clearkey_cdm_path = "lib"
+ widevine_cdm_path = "lib"
} else {
+ cdm_platform_specific_path = ""
clearkey_cdm_path = "."
widevine_cdm_path = "."
}
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.cc
index 477dc31c022..cdb67d9165e 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.cc
@@ -10,6 +10,7 @@
#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/stl_util.h"
+#include "base/strings/string_number_conversions.h"
namespace media {
@@ -47,28 +48,32 @@ const uint32_t kLargeDataSize = 20 * 1024 + 7;
} while(0);
#define CREATE_FILE_IO \
- ADD_TEST_STEP(ACTION_CREATE, kSuccess, NULL, 0)
+ ADD_TEST_STEP(ACTION_CREATE, kSuccess, nullptr, 0)
#define OPEN_FILE \
- ADD_TEST_STEP(ACTION_OPEN, kSuccess, NULL, 0)
+ ADD_TEST_STEP(ACTION_OPEN, kSuccess, nullptr, 0)
#define EXPECT_FILE_OPENED(status) \
- ADD_TEST_STEP(RESULT_OPEN, status, NULL, 0)
+ ADD_TEST_STEP(RESULT_OPEN, status, nullptr, 0)
#define READ_FILE \
- ADD_TEST_STEP(ACTION_READ, kSuccess, NULL, 0)
+ ADD_TEST_STEP(ACTION_READ, kSuccess, nullptr, 0)
#define EXPECT_FILE_READ(status, data, data_size) \
ADD_TEST_STEP(RESULT_READ, status, data, data_size)
+#define EXPECT_FILE_READ_EITHER(status, data, data_size, data2, data2_size) \
+ test_case->AddResultReadEither(cdm::FileIOClient::Status::status, (data), \
+ (data_size), (data2), (data2_size));
+
#define WRITE_FILE(data, data_size) \
ADD_TEST_STEP(ACTION_WRITE, kSuccess, data, data_size)
#define EXPECT_FILE_WRITTEN(status) \
- ADD_TEST_STEP(RESULT_WRITE, status, NULL, 0)
+ ADD_TEST_STEP(RESULT_WRITE, status, nullptr, 0)
#define CLOSE_FILE \
- ADD_TEST_STEP(ACTION_CLOSE, kSuccess, NULL, 0)
+ ADD_TEST_STEP(ACTION_CLOSE, kSuccess, nullptr, 0)
// FileIOTestRunner implementation.
@@ -150,7 +155,7 @@ void FileIOTestRunner::AddTests() {
START_TEST_CASE("ReadBeforeOpeningFile")
READ_FILE
- EXPECT_FILE_READ(kError, NULL, 0)
+ EXPECT_FILE_READ(kError, nullptr, 0)
END_TEST_CASE
START_TEST_CASE("WriteBeforeOpeningFile")
@@ -162,7 +167,7 @@ void FileIOTestRunner::AddTests() {
OPEN_FILE
READ_FILE
EXPECT_FILE_OPENED(kSuccess)
- EXPECT_FILE_READ(kError, NULL, 0)
+ EXPECT_FILE_READ(kError, nullptr, 0)
// After file opened, we can still do normal operations.
WRITE_FILE(kData, kDataSize)
EXPECT_FILE_WRITTEN(kSuccess)
@@ -189,7 +194,7 @@ void FileIOTestRunner::AddTests() {
EXPECT_FILE_WRITTEN(kSuccess)
READ_FILE
READ_FILE
- EXPECT_FILE_READ(kInUse, NULL, 0)
+ EXPECT_FILE_READ(kInUse, nullptr, 0)
EXPECT_FILE_READ(kSuccess, kData, kDataSize)
// Read again.
READ_FILE
@@ -201,7 +206,7 @@ void FileIOTestRunner::AddTests() {
EXPECT_FILE_OPENED(kSuccess)
WRITE_FILE(kData, kDataSize)
READ_FILE
- EXPECT_FILE_READ(kInUse, NULL, 0)
+ EXPECT_FILE_READ(kInUse, nullptr, 0)
EXPECT_FILE_WRITTEN(kSuccess)
// Read again.
READ_FILE
@@ -214,7 +219,7 @@ void FileIOTestRunner::AddTests() {
READ_FILE
WRITE_FILE(kData, kDataSize)
EXPECT_FILE_WRITTEN(kInUse)
- EXPECT_FILE_READ(kSuccess, NULL, 0)
+ EXPECT_FILE_READ(kSuccess, nullptr, 0)
// We can still do normal operations.
WRITE_FILE(kData, kDataSize)
EXPECT_FILE_WRITTEN(kSuccess)
@@ -238,7 +243,7 @@ void FileIOTestRunner::AddTests() {
OPEN_FILE
EXPECT_FILE_OPENED(kSuccess)
READ_FILE
- EXPECT_FILE_READ(kSuccess, NULL, 0)
+ EXPECT_FILE_READ(kSuccess, nullptr, 0)
END_TEST_CASE
START_TEST_CASE("WriteAndRead")
@@ -253,10 +258,10 @@ void FileIOTestRunner::AddTests() {
START_TEST_CASE("WriteAndReadEmptyFile")
OPEN_FILE
EXPECT_FILE_OPENED(kSuccess)
- WRITE_FILE(NULL, 0)
+ WRITE_FILE(nullptr, 0)
EXPECT_FILE_WRITTEN(kSuccess)
READ_FILE
- EXPECT_FILE_READ(kSuccess, NULL, 0)
+ EXPECT_FILE_READ(kSuccess, nullptr, 0)
END_TEST_CASE
START_TEST_CASE("WriteAndReadLargeData")
@@ -275,10 +280,10 @@ void FileIOTestRunner::AddTests() {
EXPECT_FILE_WRITTEN(kSuccess)
READ_FILE
EXPECT_FILE_READ(kSuccess, kData, kDataSize)
- WRITE_FILE(NULL, 0)
+ WRITE_FILE(nullptr, 0)
EXPECT_FILE_WRITTEN(kSuccess)
READ_FILE
- EXPECT_FILE_READ(kSuccess, NULL, 0)
+ EXPECT_FILE_READ(kSuccess, nullptr, 0)
END_TEST_CASE
START_TEST_CASE("OverwriteWithSmallerData")
@@ -353,7 +358,7 @@ void FileIOTestRunner::AddTests() {
EXPECT_FILE_OPENED(kSuccess)
// Read file which doesn't exist.
READ_FILE
- EXPECT_FILE_READ(kSuccess, NULL, 0)
+ EXPECT_FILE_READ(kSuccess, nullptr, 0)
// Write kData to file.
WRITE_FILE(kData, kDataSize)
EXPECT_FILE_WRITTEN(kSuccess)
@@ -376,11 +381,11 @@ void FileIOTestRunner::AddTests() {
READ_FILE
EXPECT_FILE_READ(kSuccess, kData, kDataSize)
// Overwrite file with zero bytes.
- WRITE_FILE(NULL, 0)
+ WRITE_FILE(nullptr, 0)
EXPECT_FILE_WRITTEN(kSuccess)
// Read file.
READ_FILE
- EXPECT_FILE_READ(kSuccess, NULL, 0)
+ EXPECT_FILE_READ(kSuccess, nullptr, 0)
END_TEST_CASE
START_TEST_CASE("OpenAfterOpen")
@@ -430,12 +435,17 @@ void FileIOTestRunner::AddTests() {
EXPECT_FILE_WRITTEN(kSuccess)
WRITE_FILE(kBigData, kBigDataSize)
CLOSE_FILE
- // Write() didn't finish and the content of the file is not modified.
+ // Write() is async, so it may or may not modify the content of the file.
CREATE_FILE_IO
OPEN_FILE
EXPECT_FILE_OPENED(kSuccess)
READ_FILE
- EXPECT_FILE_READ(kSuccess, kData, kDataSize)
+ // As Write() is async, it is possible that the second write above
+ // succeeds before the file is closed. So check that the contents
+ // is either data set.
+ EXPECT_FILE_READ_EITHER(kSuccess,
+ kData, kDataSize,
+ kBigData, kBigDataSize)
END_TEST_CASE
START_TEST_CASE("CloseDuringPendingOverwriteWithSmallerData")
@@ -445,12 +455,17 @@ void FileIOTestRunner::AddTests() {
EXPECT_FILE_WRITTEN(kSuccess)
WRITE_FILE(kData, kDataSize)
CLOSE_FILE
- // Write() didn't finish and the content of the file is not modified.
+ // Write() is async, so it may or may not modify the content of the file.
CREATE_FILE_IO
OPEN_FILE
EXPECT_FILE_OPENED(kSuccess)
READ_FILE
- EXPECT_FILE_READ(kSuccess, kBigData, kBigDataSize)
+ // As Write() is async, it is possible that the second write above
+ // succeeds before the file is closed. So check that the contents
+ // is either data set.
+ EXPECT_FILE_READ_EITHER(kSuccess,
+ kBigData, kBigDataSize,
+ kData, kDataSize)
END_TEST_CASE
START_TEST_CASE("CloseDuringPendingRead")
@@ -482,7 +497,12 @@ void FileIOTestRunner::AddTests() {
OPEN_FILE
EXPECT_FILE_OPENED(kSuccess)
READ_FILE
- EXPECT_FILE_READ(kSuccess, kData, kDataSize)
+ // As Write() is async, it is possible that the second write above
+ // succeeds before the file is closed. So check that the contents
+ // is either data set.
+ EXPECT_FILE_READ_EITHER(kSuccess,
+ kData, kDataSize,
+ kBigData, kBigDataSize)
CLOSE_FILE
}
END_TEST_CASE
@@ -530,6 +550,16 @@ void FileIOTest::AddTestStep(StepType type,
test_steps_.push_back(TestStep(type, status, data, data_size));
}
+void FileIOTest::AddResultReadEither(Status status,
+ const uint8_t* data,
+ uint32_t data_size,
+ const uint8_t* data2,
+ uint32_t data2_size) {
+ DCHECK_NE(data_size, data2_size);
+ test_steps_.push_back(TestStep(FileIOTest::RESULT_READ, status, data,
+ data_size, data2, data2_size));
+}
+
void FileIOTest::Run(const CompletionCB& completion_cb) {
FILE_IO_DVLOG(3) << "Run " << test_name_;
completion_cb_ = completion_cb;
@@ -538,7 +568,7 @@ void FileIOTest::Run(const CompletionCB& completion_cb) {
}
void FileIOTest::OnOpenComplete(Status status) {
- OnResult(TestStep(RESULT_OPEN, status, NULL, 0));
+ OnResult(TestStep(RESULT_OPEN, status));
}
void FileIOTest::OnReadComplete(Status status,
@@ -548,7 +578,7 @@ void FileIOTest::OnReadComplete(Status status,
}
void FileIOTest::OnWriteComplete(Status status) {
- OnResult(TestStep(RESULT_WRITE, status, NULL, 0));
+ OnResult(TestStep(RESULT_WRITE, status));
}
bool FileIOTest::IsResult(const TestStep& test_step) {
@@ -570,12 +600,19 @@ bool FileIOTest::IsResult(const TestStep& test_step) {
bool FileIOTest::MatchesResult(const TestStep& a, const TestStep& b) {
DCHECK(IsResult(a) && IsResult(b));
+ DCHECK(!b.data2);
+
if (a.type != b.type || a.status != b.status)
return false;
if (a.type != RESULT_READ || a.status != cdm::FileIOClient::Status::kSuccess)
return true;
+ // If |a| specifies a data2, compare it first. If the size matches, compare
+ // the contents.
+ if (a.data2 && b.data_size == a.data2_size)
+ return std::equal(a.data2, a.data2 + a.data2_size, b.data);
+
return (a.data_size == b.data_size &&
std::equal(a.data, a.data + a.data_size, b.data));
}
@@ -590,13 +627,14 @@ void FileIOTest::RunNextStep() {
TestStep test_step = test_steps_.front();
test_steps_.pop_front();
- cdm::FileIO* file_io = file_io_stack_.empty() ? NULL : file_io_stack_.top();
+ cdm::FileIO* file_io =
+ file_io_stack_.empty() ? nullptr : file_io_stack_.top();
switch (test_step.type) {
case ACTION_CREATE:
file_io = create_file_io_cb_.Run(this);
if (!file_io) {
- FILE_IO_DVLOG(3) << "Cannot create FileIO object.";
+ LOG(WARNING) << test_name_ << " cannot create FileIO object.";
OnTestComplete(false);
return;
}
@@ -627,6 +665,19 @@ void FileIOTest::RunNextStep() {
void FileIOTest::OnResult(const TestStep& result) {
DCHECK(IsResult(result));
if (!CheckResult(result)) {
+ LOG(WARNING) << test_name_ << " got unexpected result. type=" << result.type
+ << ", status=" << (uint32_t)result.status
+ << ", data_size=" << result.data_size << ", received data="
+ << (result.data
+ ? base::HexEncode(result.data, result.data_size)
+ : "<null>");
+ for (const auto& step : test_steps_) {
+ if (IsResult(step)) {
+ LOG(WARNING) << test_name_ << " expected type=" << step.type
+ << ", status=" << (uint32_t)step.status
+ << ", data_size=" << step.data_size;
+ }
+ }
OnTestComplete(false);
return;
}
@@ -660,7 +711,7 @@ void FileIOTest::OnTestComplete(bool success) {
file_io_stack_.pop();
}
FILE_IO_DVLOG(3) << test_name_ << (success ? " PASSED" : " FAILED");
- DLOG_IF(WARNING, !success) << test_name_ << " FAILED";
+ LOG_IF(WARNING, !success) << test_name_ << " FAILED";
std::move(completion_cb_).Run(success);
}
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h
index 4eb4b3ef025..a07fcc1f316 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_file_io_test.h
@@ -75,19 +75,34 @@ class FileIOTest : public cdm::FileIOClient {
Status status,
const uint8_t* data,
uint32_t data_size);
+ // Adds a test step in this test that expects a successful read of either
+ // |data| or |data2|. |this| object doesn't take the ownership of |data| or
+ // |data2|, which should be valid throughout the lifetime of |this| object.
+ void AddResultReadEither(Status status,
+ const uint8_t* data,
+ uint32_t data_size,
+ const uint8_t* data2,
+ uint32_t data2_size);
// Runs this test case and returns the test result through |completion_cb|.
void Run(const CompletionCB& completion_cb);
private:
struct TestStep {
- // |this| object doesn't take the ownership of |data|, which should be valid
- // throughout the lifetime of |this| object.
+ // |this| object doesn't take the ownership of |data| or |data2|, which
+ // should be valid throughout the lifetime of |this| object.
TestStep(StepType type,
Status status,
- const uint8_t* data,
- uint32_t data_size)
- : type(type), status(status), data(data), data_size(data_size) {}
+ const uint8_t* data = nullptr,
+ uint32_t data_size = 0,
+ const uint8_t* data2 = nullptr,
+ uint32_t data2_size = 0)
+ : type(type),
+ status(status),
+ data(data),
+ data_size(data_size),
+ data2(data2),
+ data2_size(data2_size) {}
StepType type;
@@ -97,6 +112,10 @@ class FileIOTest : public cdm::FileIOClient {
// Data to write in ACTION_WRITE, or read data in RESULT_READ.
const uint8_t* data;
uint32_t data_size;
+
+ // Alternate read data in RESULT_READ, if |data2| != nullptr.
+ const uint8_t* data2;
+ uint32_t data2_size;
};
// Returns whether |test_step| is a RESULT_* step.
diff --git a/chromium/media/device_monitors/device_monitor_udev.cc b/chromium/media/device_monitors/device_monitor_udev.cc
index cd8ae1cffb6..0c73ddf3deb 100644
--- a/chromium/media/device_monitors/device_monitor_udev.cc
+++ b/chromium/media/device_monitors/device_monitor_udev.cc
@@ -114,8 +114,9 @@ void DeviceMonitorLinux::BlockingTaskRunnerHelper::OnDevicesChanged(
}
DeviceMonitorLinux::DeviceMonitorLinux()
- : blocking_task_runner_(base::CreateSequencedTaskRunnerWithTraits(
- {base::MayBlock(), base::TaskPriority::USER_VISIBLE,
+ : blocking_task_runner_(base::CreateSequencedTaskRunner(
+ {base::ThreadPool(), base::MayBlock(),
+ base::TaskPriority::USER_VISIBLE,
base::TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN})),
blocking_task_helper_(new BlockingTaskRunnerHelper,
base::OnTaskRunnerDeleter(blocking_task_runner_)) {
diff --git a/chromium/media/device_monitors/system_message_window_win_unittest.cc b/chromium/media/device_monitors/system_message_window_win_unittest.cc
index a9eee371d9f..b0cd4af42cb 100644
--- a/chromium/media/device_monitors/system_message_window_win_unittest.cc
+++ b/chromium/media/device_monitors/system_message_window_win_unittest.cc
@@ -13,7 +13,7 @@
#include "base/run_loop.h"
#include "base/system/system_monitor.h"
#include "base/test/mock_devices_changed_observer.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -29,8 +29,8 @@ class SystemMessageWindowWinTest : public testing::Test {
}
// Run single threaded to not require explicit COM initialization
- base::test::ScopedTaskEnvironment scoped_task_environment_{
- base::test::ScopedTaskEnvironment::ThreadingMode::MAIN_THREAD_ONLY};
+ base::test::TaskEnvironment task_environment_{
+ base::test::TaskEnvironment::ThreadingMode::MAIN_THREAD_ONLY};
base::SystemMonitor system_monitor_;
base::MockDevicesChangedObserver observer_;
SystemMessageWindowWin window_;
diff --git a/chromium/media/filters/BUILD.gn b/chromium/media/filters/BUILD.gn
index f33ca5866f9..c66febdbf69 100644
--- a/chromium/media/filters/BUILD.gn
+++ b/chromium/media/filters/BUILD.gn
@@ -209,8 +209,14 @@ jumbo_source_set("filters") {
"fuchsia/fuchsia_video_decoder.h",
]
deps += [
+ "//gpu/command_buffer/client",
+ "//gpu/command_buffer/common",
+ "//gpu/ipc/common",
"//third_party/fuchsia-sdk/sdk:media",
"//third_party/fuchsia-sdk/sdk:mediacodec",
+ "//third_party/fuchsia-sdk/sdk:sys_cpp",
+ "//third_party/fuchsia-sdk/sdk:sysmem",
+ "//ui/ozone",
]
}
}
@@ -311,6 +317,12 @@ source_set("unit_tests") {
if (is_fuchsia) {
sources += [ "fuchsia/fuchsia_video_decoder_unittest.cc" ]
+ deps += [
+ "//components/viz/test:test_support",
+ "//gpu/command_buffer/client",
+ "//third_party/fuchsia-sdk/sdk:sys_cpp",
+ "//third_party/fuchsia-sdk/sdk:sysmem",
+ ]
}
# libvpx for running vpx test on chromecast doesn't support high bit depth.
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.cc b/chromium/media/filters/android/media_codec_audio_decoder.cc
index 9040ff98048..d1a3414f572 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.cc
+++ b/chromium/media/filters/android/media_codec_audio_decoder.cc
@@ -34,8 +34,7 @@ MediaCodecAudioDecoder::MediaCodecAudioDecoder(
sample_rate_(0),
media_crypto_context_(nullptr),
cdm_registration_id_(0),
- pool_(new AudioBufferMemoryPool()),
- weak_factory_(this) {
+ pool_(new AudioBufferMemoryPool()) {
DVLOG(1) << __func__;
}
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.h b/chromium/media/filters/android/media_codec_audio_decoder.h
index c735f5f3869..fffdd41bab7 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.h
+++ b/chromium/media/filters/android/media_codec_audio_decoder.h
@@ -210,7 +210,7 @@ class MEDIA_EXPORT MediaCodecAudioDecoder : public AudioDecoder,
// an encrypted stream.
JavaObjectPtr media_crypto_;
- base::WeakPtrFactory<MediaCodecAudioDecoder> weak_factory_;
+ base::WeakPtrFactory<MediaCodecAudioDecoder> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MediaCodecAudioDecoder);
};
diff --git a/chromium/media/filters/android/video_frame_extractor.cc b/chromium/media/filters/android/video_frame_extractor.cc
index dc1c299fac1..e50633bdc61 100644
--- a/chromium/media/filters/android/video_frame_extractor.cc
+++ b/chromium/media/filters/android/video_frame_extractor.cc
@@ -29,7 +29,7 @@
namespace media {
VideoFrameExtractor::VideoFrameExtractor(DataSource* data_source)
- : data_source_(data_source), video_stream_index_(-1), weak_factory_(this) {}
+ : data_source_(data_source), video_stream_index_(-1) {}
VideoFrameExtractor::~VideoFrameExtractor() = default;
diff --git a/chromium/media/filters/android/video_frame_extractor.h b/chromium/media/filters/android/video_frame_extractor.h
index 9117d35acd6..53257da31af 100644
--- a/chromium/media/filters/android/video_frame_extractor.h
+++ b/chromium/media/filters/android/video_frame_extractor.h
@@ -75,7 +75,7 @@ class MEDIA_EXPORT VideoFrameExtractor {
VideoFrameCallback video_frame_callback_;
- base::WeakPtrFactory<VideoFrameExtractor> weak_factory_;
+ base::WeakPtrFactory<VideoFrameExtractor> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(VideoFrameExtractor);
};
diff --git a/chromium/media/filters/android/video_frame_extractor_unittest.cc b/chromium/media/filters/android/video_frame_extractor_unittest.cc
index a0a3e72d44a..f659425c009 100644
--- a/chromium/media/filters/android/video_frame_extractor_unittest.cc
+++ b/chromium/media/filters/android/video_frame_extractor_unittest.cc
@@ -10,7 +10,7 @@
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_mock_time_task_runner.h"
#include "media/base/test_data_util.h"
#include "media/filters/file_data_source.h"
@@ -65,7 +65,7 @@ class VideoFrameExtractorTest : public testing::Test {
const base::FilePath& temp_dir() const { return temp_dir_.GetPath(); }
private:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
base::ScopedTempDir temp_dir_;
std::unique_ptr<FileDataSource> data_source_;
std::unique_ptr<VideoFrameExtractor> extractor_;
diff --git a/chromium/media/filters/aom_video_decoder_unittest.cc b/chromium/media/filters/aom_video_decoder_unittest.cc
index 27498ac641f..e1a929243e7 100644
--- a/chromium/media/filters/aom_video_decoder_unittest.cc
+++ b/chromium/media/filters/aom_video_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/limits.h"
@@ -164,7 +164,7 @@ class AomVideoDecoderTest : public testing::Test {
testing::StrictMock<MockMediaLog> media_log_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<AomVideoDecoder> decoder_;
scoped_refptr<DecoderBuffer> i_frame_buffer_;
diff --git a/chromium/media/filters/audio_decoder_stream_unittest.cc b/chromium/media/filters/audio_decoder_stream_unittest.cc
index d64bb1d083d..98b9181a5f1 100644
--- a/chromium/media/filters/audio_decoder_stream_unittest.cc
+++ b/chromium/media/filters/audio_decoder_stream_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "media/base/media_util.h"
#include "media/base/mock_filters.h"
@@ -113,7 +113,7 @@ class AudioDecoderStreamTest : public testing::Test {
std::move(closure).Run();
}
- base::test::ScopedTaskEnvironment task_environment_;
+ base::test::TaskEnvironment task_environment_;
NullMediaLog media_log_;
testing::NiceMock<MockDemuxerStream> demuxer_stream_{DemuxerStream::AUDIO};
AudioDecoderStream audio_decoder_stream_;
diff --git a/chromium/media/filters/audio_decoder_unittest.cc b/chromium/media/filters/audio_decoder_unittest.cc
index b5435e7abf3..188edd4d8de 100644
--- a/chromium/media/filters/audio_decoder_unittest.cc
+++ b/chromium/media/filters/audio_decoder_unittest.cc
@@ -16,7 +16,7 @@
#include "base/run_loop.h"
#include "base/strings/stringprintf.h"
#include "base/sys_byteorder.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/platform_thread.h"
#include "build/build_config.h"
#include "media/base/audio_buffer.h"
@@ -134,12 +134,12 @@ class AudioDecoderTest
switch (decoder_type_) {
case FFMPEG:
decoder_.reset(new FFmpegAudioDecoder(
- scoped_task_environment_.GetMainThreadTaskRunner(), &media_log_));
+ task_environment_.GetMainThreadTaskRunner(), &media_log_));
break;
#if defined(OS_ANDROID)
case MEDIA_CODEC:
decoder_.reset(new MediaCodecAudioDecoder(
- scoped_task_environment_.GetMainThreadTaskRunner()));
+ task_environment_.GetMainThreadTaskRunner()));
break;
#endif
}
@@ -393,7 +393,7 @@ class AudioDecoderTest
// that the decoder can be reinitialized with different parameters.
TestParams params_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NullMediaLog media_log_;
scoped_refptr<DecoderBuffer> data_;
diff --git a/chromium/media/filters/chunk_demuxer_unittest.cc b/chromium/media/filters/chunk_demuxer_unittest.cc
index 9f36319c138..a52051fe19f 100644
--- a/chromium/media/filters/chunk_demuxer_unittest.cc
+++ b/chromium/media/filters/chunk_demuxer_unittest.cc
@@ -20,7 +20,7 @@
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
#include "base/synchronization/waitable_event.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
@@ -1247,7 +1247,7 @@ class ChunkDemuxerTest : public ::testing::Test {
return true;
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<MockMediaLog> media_log_;
@@ -4415,7 +4415,7 @@ void QuitLoop(base::Closure quit_closure,
void DisableAndEnableDemuxerTracks(
ChunkDemuxer* demuxer,
- base::test::ScopedTaskEnvironment* scoped_task_environment) {
+ base::test::TaskEnvironment* task_environment) {
base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
base::WaitableEvent::InitialState::NOT_SIGNALED);
std::vector<MediaTrack::Id> audio_tracks;
@@ -4447,7 +4447,7 @@ void DisableAndEnableDemuxerTracks(
base::BindOnce(QuitLoop, base::Passed(enable_audio.QuitClosure())));
enable_audio.Run();
- scoped_task_environment->RunUntilIdle();
+ task_environment->RunUntilIdle();
}
}
@@ -4461,16 +4461,16 @@ TEST_F(ChunkDemuxerTest, StreamStatusNotifications) {
EXPECT_NE(nullptr, video_stream);
// Verify stream status changes without pending read.
- DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &task_environment_);
// Verify stream status changes with pending read.
bool read_done = false;
audio_stream->Read(base::Bind(&OnReadDone_EOSExpected, &read_done));
- DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &task_environment_);
EXPECT_TRUE(read_done);
read_done = false;
video_stream->Read(base::Bind(&OnReadDone_EOSExpected, &read_done));
- DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &task_environment_);
EXPECT_TRUE(read_done);
}
diff --git a/chromium/media/filters/dav1d_video_decoder_unittest.cc b/chromium/media/filters/dav1d_video_decoder_unittest.cc
index 27eb02a449b..ef342c7e52a 100644
--- a/chromium/media/filters/dav1d_video_decoder_unittest.cc
+++ b/chromium/media/filters/dav1d_video_decoder_unittest.cc
@@ -10,7 +10,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/limits.h"
@@ -172,7 +172,7 @@ class Dav1dVideoDecoderTest : public testing::Test {
testing::StrictMock<MockMediaLog> media_log_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<Dav1dVideoDecoder> decoder_;
scoped_refptr<DecoderBuffer> i_frame_buffer_;
diff --git a/chromium/media/filters/decoder_selector_unittest.cc b/chromium/media/filters/decoder_selector_unittest.cc
index a0ae2692959..eefa460e54d 100644
--- a/chromium/media/filters/decoder_selector_unittest.cc
+++ b/chromium/media/filters/decoder_selector_unittest.cc
@@ -10,7 +10,7 @@
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_util.h"
@@ -201,7 +201,7 @@ class DecoderSelectorTest : public ::testing::Test {
if (use_decrypting_decoder_) {
decoders.push_back(
std::make_unique<typename TypeParam::DecryptingDecoder>(
- scoped_task_environment_.GetMainThreadTaskRunner(), &media_log_));
+ task_environment_.GetMainThreadTaskRunner(), &media_log_));
}
#endif // !defined(OS_ANDROID)
@@ -246,7 +246,7 @@ class DecoderSelectorTest : public ::testing::Test {
void CreateDecoderSelector() {
decoder_selector_ =
std::make_unique<DecoderSelector<TypeParam::kStreamType>>(
- scoped_task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
base::BindRepeating(&Self::CreateDecoders, base::Unretained(this)),
&media_log_);
decoder_selector_->Initialize(
@@ -289,9 +289,9 @@ class DecoderSelectorTest : public ::testing::Test {
RunUntilIdle();
}
- void RunUntilIdle() { scoped_task_environment_.RunUntilIdle(); }
+ void RunUntilIdle() { task_environment_.RunUntilIdle(); }
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NullMediaLog media_log_;
std::unique_ptr<StreamTraits> traits_;
diff --git a/chromium/media/filters/decrypting_audio_decoder_unittest.cc b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
index e1d52e688b2..010d8277300 100644
--- a/chromium/media/filters/decrypting_audio_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
@@ -12,7 +12,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/audio_buffer.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
@@ -57,7 +57,7 @@ class DecryptingAudioDecoderTest : public testing::Test {
public:
DecryptingAudioDecoderTest()
: decoder_(new DecryptingAudioDecoder(
- scoped_task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
&media_log_)),
cdm_context_(new StrictMock<MockCdmContext>()),
decryptor_(new StrictMock<MockDecryptor>()),
@@ -244,7 +244,7 @@ class DecryptingAudioDecoderTest : public testing::Test {
MOCK_METHOD1(OnWaiting, void(WaitingReason));
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NullMediaLog media_log_;
std::unique_ptr<DecryptingAudioDecoder> decoder_;
std::unique_ptr<StrictMock<MockCdmContext>> cdm_context_;
diff --git a/chromium/media/filters/decrypting_demuxer_stream.cc b/chromium/media/filters/decrypting_demuxer_stream.cc
index 54eea044b41..b27967cc960 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream.cc
@@ -31,11 +31,7 @@ DecryptingDemuxerStream::DecryptingDemuxerStream(
const WaitingCB& waiting_cb)
: task_runner_(task_runner),
media_log_(media_log),
- state_(kUninitialized),
- waiting_cb_(waiting_cb),
- demuxer_stream_(NULL),
- decryptor_(NULL),
- key_added_while_decrypt_pending_(false) {}
+ waiting_cb_(waiting_cb) {}
std::string DecryptingDemuxerStream::GetDisplayName() const {
return "DecryptingDemuxerStream";
@@ -84,8 +80,8 @@ void DecryptingDemuxerStream::Read(const ReadCB& read_cb) {
read_cb_ = BindToCurrentLoop(read_cb);
state_ = kPendingDemuxerRead;
- demuxer_stream_->Read(
- base::Bind(&DecryptingDemuxerStream::DecryptBuffer, weak_this_));
+ demuxer_stream_->Read(base::Bind(
+ &DecryptingDemuxerStream::OnBufferReadFromDemuxerStream, weak_this_));
}
bool DecryptingDemuxerStream::IsReadPending() const {
@@ -104,8 +100,8 @@ void DecryptingDemuxerStream::Reset(const base::Closure& closure) {
// Reset() cannot complete if the read callback is still pending.
// Defer the resetting process in this case. The |reset_cb_| will be fired
- // after the read callback is fired - see DoDecryptBuffer() and
- // DoDeliverBuffer().
+ // after the read callback is fired - see OnBufferReadFromDemuxerStream() and
+ // OnBufferDecrypted().
if (state_ == kPendingDemuxerRead || state_ == kPendingDecrypt) {
DCHECK(read_cb_);
return;
@@ -114,8 +110,8 @@ void DecryptingDemuxerStream::Reset(const base::Closure& closure) {
if (state_ == kWaitingForKey) {
CompleteWaitingForDecryptionKey();
DCHECK(read_cb_);
- pending_buffer_to_decrypt_ = NULL;
- std::move(read_cb_).Run(kAborted, NULL);
+ pending_buffer_to_decrypt_ = nullptr;
+ std::move(read_cb_).Run(kAborted, nullptr);
}
DCHECK(!read_cb_);
@@ -166,30 +162,30 @@ DecryptingDemuxerStream::~DecryptingDemuxerStream() {
if (decryptor_) {
decryptor_->CancelDecrypt(GetDecryptorStreamType());
- decryptor_ = NULL;
+ decryptor_ = nullptr;
}
if (init_cb_)
std::move(init_cb_).Run(PIPELINE_ERROR_ABORT);
if (read_cb_)
- std::move(read_cb_).Run(kAborted, NULL);
+ std::move(read_cb_).Run(kAborted, nullptr);
if (reset_cb_)
std::move(reset_cb_).Run();
- pending_buffer_to_decrypt_ = NULL;
+ pending_buffer_to_decrypt_ = nullptr;
}
-void DecryptingDemuxerStream::DecryptBuffer(
+void DecryptingDemuxerStream::OnBufferReadFromDemuxerStream(
DemuxerStream::Status status,
scoped_refptr<DecoderBuffer> buffer) {
DVLOG(3) << __func__ << ": status = " << status;
DCHECK(task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kPendingDemuxerRead) << state_;
DCHECK(read_cb_);
- DCHECK_EQ(buffer.get() != NULL, status == kOk) << status;
+ DCHECK_EQ(buffer.get() != nullptr, status == kOk) << status;
// Even when |reset_cb_|, we need to pass |kConfigChanged| back to
// the caller so that the downstream decoder can be properly reinitialized.
if (status == kConfigChanged) {
- DVLOG(2) << "DoDecryptBuffer() - kConfigChanged.";
+ DVLOG(2) << __func__ << ": config change";
DCHECK_EQ(demuxer_stream_->type() == AUDIO, audio_config_.IsValidConfig());
DCHECK_EQ(demuxer_stream_->type() == VIDEO, video_config_.IsValidConfig());
@@ -198,14 +194,14 @@ void DecryptingDemuxerStream::DecryptBuffer(
InitializeDecoderConfig();
state_ = kIdle;
- std::move(read_cb_).Run(kConfigChanged, NULL);
+ std::move(read_cb_).Run(kConfigChanged, nullptr);
if (reset_cb_)
DoReset();
return;
}
if (reset_cb_) {
- std::move(read_cb_).Run(kAborted, NULL);
+ std::move(read_cb_).Run(kAborted, nullptr);
DoReset();
return;
}
@@ -223,14 +219,14 @@ void DecryptingDemuxerStream::DecryptBuffer(
DCHECK_EQ(kOk, status);
if (buffer->end_of_stream()) {
- DVLOG(2) << "DoDecryptBuffer() - EOS buffer.";
+ DVLOG(2) << __func__ << ": EOS buffer";
state_ = kIdle;
std::move(read_cb_).Run(kOk, std::move(buffer));
return;
}
if (!buffer->decrypt_config()) {
- DVLOG(2) << "DoDecryptBuffer() - clear buffer.";
+ DVLOG(2) << __func__ << ": clear buffer";
state_ = kIdle;
std::move(read_cb_).Run(kOk, std::move(buffer));
return;
@@ -252,10 +248,10 @@ void DecryptingDemuxerStream::DecryptPendingBuffer() {
decryptor_->Decrypt(
GetDecryptorStreamType(), pending_buffer_to_decrypt_,
BindToCurrentLoop(
- base::Bind(&DecryptingDemuxerStream::DeliverBuffer, weak_this_)));
+ base::Bind(&DecryptingDemuxerStream::OnBufferDecrypted, weak_this_)));
}
-void DecryptingDemuxerStream::DeliverBuffer(
+void DecryptingDemuxerStream::OnBufferDecrypted(
Decryptor::Status status,
scoped_refptr<DecoderBuffer> decrypted_buffer) {
DVLOG(3) << __func__ << " - status: " << status;
@@ -269,19 +265,19 @@ void DecryptingDemuxerStream::DeliverBuffer(
key_added_while_decrypt_pending_ = false;
if (reset_cb_) {
- pending_buffer_to_decrypt_ = NULL;
- std::move(read_cb_).Run(kAborted, NULL);
+ pending_buffer_to_decrypt_ = nullptr;
+ std::move(read_cb_).Run(kAborted, nullptr);
DoReset();
return;
}
- DCHECK_EQ(status == Decryptor::kSuccess, decrypted_buffer.get() != NULL);
+ DCHECK_EQ(status == Decryptor::kSuccess, decrypted_buffer.get() != nullptr);
if (status == Decryptor::kError || status == Decryptor::kNeedMoreData) {
DVLOG(2) << __func__ << ": Error with status " << status;
MEDIA_LOG(ERROR, media_log_)
<< GetDisplayName() << ": decrypt error " << status;
- pending_buffer_to_decrypt_ = NULL;
+ pending_buffer_to_decrypt_ = nullptr;
state_ = kIdle;
std::move(read_cb_).Run(kError, nullptr);
return;
@@ -319,7 +315,7 @@ void DecryptingDemuxerStream::DeliverBuffer(
if (pending_buffer_to_decrypt_->is_key_frame())
decrypted_buffer->set_is_key_frame(true);
- pending_buffer_to_decrypt_ = NULL;
+ pending_buffer_to_decrypt_ = nullptr;
state_ = kIdle;
std::move(read_cb_).Run(kOk, std::move(decrypted_buffer));
}
@@ -332,13 +328,15 @@ void DecryptingDemuxerStream::OnKeyAdded() {
return;
}
- if (state_ == kWaitingForKey) {
- CompleteWaitingForDecryptionKey();
- MEDIA_LOG(INFO, media_log_)
- << GetDisplayName() << ": key was added, resuming decrypt";
- state_ = kPendingDecrypt;
- DecryptPendingBuffer();
- }
+ // Nothing to do.
+ if (state_ != kWaitingForKey)
+ return;
+
+ CompleteWaitingForDecryptionKey();
+ MEDIA_LOG(INFO, media_log_)
+ << GetDisplayName() << ": key was added, resuming decrypt";
+ state_ = kPendingDecrypt;
+ DecryptPendingBuffer();
}
void DecryptingDemuxerStream::DoReset() {
@@ -347,7 +345,6 @@ void DecryptingDemuxerStream::DoReset() {
DCHECK(!read_cb_);
state_ = kIdle;
-
std::move(reset_cb_).Run();
}
diff --git a/chromium/media/filters/decrypting_demuxer_stream.h b/chromium/media/filters/decrypting_demuxer_stream.h
index 3a13a8874b3..085995bd842 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.h
+++ b/chromium/media/filters/decrypting_demuxer_stream.h
@@ -116,14 +116,14 @@ class MEDIA_EXPORT DecryptingDemuxerStream : public DemuxerStream {
};
// Callback for DemuxerStream::Read().
- void DecryptBuffer(DemuxerStream::Status status,
- scoped_refptr<DecoderBuffer> buffer);
+ void OnBufferReadFromDemuxerStream(DemuxerStream::Status status,
+ scoped_refptr<DecoderBuffer> buffer);
void DecryptPendingBuffer();
// Callback for Decryptor::Decrypt().
- void DeliverBuffer(Decryptor::Status status,
- scoped_refptr<DecoderBuffer> decrypted_buffer);
+ void OnBufferDecrypted(Decryptor::Status status,
+ scoped_refptr<DecoderBuffer> decrypted_buffer);
// Callback for the |decryptor_| to notify this object that a new key has been
// added.
@@ -144,23 +144,22 @@ class MEDIA_EXPORT DecryptingDemuxerStream : public DemuxerStream {
void CompleteWaitingForDecryptionKey();
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ MediaLog* const media_log_;
+ WaitingCB waiting_cb_;
- MediaLog* media_log_;
-
- State state_;
+ State state_ = kUninitialized;
PipelineStatusCB init_cb_;
ReadCB read_cb_;
base::Closure reset_cb_;
- WaitingCB waiting_cb_;
// Pointer to the input demuxer stream that will feed us encrypted buffers.
- DemuxerStream* demuxer_stream_;
+ DemuxerStream* demuxer_stream_ = nullptr;
AudioDecoderConfig audio_config_;
VideoDecoderConfig video_config_;
- Decryptor* decryptor_;
+ Decryptor* decryptor_ = nullptr;
// The buffer returned by the demuxer that needs to be decrypted.
scoped_refptr<media::DecoderBuffer> pending_buffer_to_decrypt_;
@@ -169,7 +168,7 @@ class MEDIA_EXPORT DecryptingDemuxerStream : public DemuxerStream {
// (in other words, this variable can only be set in state kPendingDecrypt).
// If this variable is true and kNoKey is returned then we need to try
// decrypting again in case the newly added key is the correct decryption key.
- bool key_added_while_decrypt_pending_;
+ bool key_added_while_decrypt_pending_ = false;
base::WeakPtr<DecryptingDemuxerStream> weak_this_;
base::WeakPtrFactory<DecryptingDemuxerStream> weak_factory_{this};
diff --git a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
index ad1468ea58b..148786fdd31 100644
--- a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
@@ -12,7 +12,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
#include "media/base/media_util.h"
@@ -69,7 +69,7 @@ class DecryptingDemuxerStreamTest : public testing::Test {
public:
DecryptingDemuxerStreamTest()
: demuxer_stream_(new DecryptingDemuxerStream(
- scoped_task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
&media_log_,
base::Bind(&DecryptingDemuxerStreamTest::OnWaiting,
base::Unretained(this)))),
@@ -264,7 +264,7 @@ class DecryptingDemuxerStreamTest : public testing::Test {
void(DemuxerStream::Status, scoped_refptr<DecoderBuffer>));
MOCK_METHOD1(OnWaiting, void(WaitingReason));
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<MockMediaLog> media_log_;
std::unique_ptr<DecryptingDemuxerStream> demuxer_stream_;
std::unique_ptr<StrictMock<MockCdmContext>> cdm_context_;
diff --git a/chromium/media/filters/decrypting_media_resource_unittest.cc b/chromium/media/filters/decrypting_media_resource_unittest.cc
index 78231155546..5d20ddcb4be 100644
--- a/chromium/media/filters/decrypting_media_resource_unittest.cc
+++ b/chromium/media/filters/decrypting_media_resource_unittest.cc
@@ -10,7 +10,7 @@
#include "base/bind_helpers.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
#include "media/base/decryptor.h"
@@ -69,7 +69,7 @@ class DecryptingMediaResourceTest : public testing::Test {
decrypting_media_resource_ = std::make_unique<DecryptingMediaResource>(
&demuxer_, &cdm_context_, &null_media_log_,
- scoped_task_environment_.GetMainThreadTaskRunner());
+ task_environment_.GetMainThreadTaskRunner());
}
~DecryptingMediaResourceTest() {
@@ -108,7 +108,7 @@ class DecryptingMediaResourceTest : public testing::Test {
void(DemuxerStream::Status, scoped_refptr<DecoderBuffer>));
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
base::MockCallback<DecryptingMediaResource::InitCB>
decrypting_media_resource_init_cb_;
base::MockCallback<WaitingCB> waiting_cb_;
@@ -132,7 +132,7 @@ TEST_F(DecryptingMediaResourceTest, ClearStreams) {
decrypting_media_resource_->Initialize(
decrypting_media_resource_init_cb_.Get(), waiting_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(
decrypting_media_resource_->DecryptingDemuxerStreamCountForTesting(), 2);
@@ -147,7 +147,7 @@ TEST_F(DecryptingMediaResourceTest, EncryptedStreams) {
decrypting_media_resource_->Initialize(
decrypting_media_resource_init_cb_.Get(), waiting_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// When using an AesDecryptor we preemptively wrap our streams with a
// DecryptingDemuxerStream, regardless of encryption. With this in mind, we
@@ -168,7 +168,7 @@ TEST_F(DecryptingMediaResourceTest, MixedStreams) {
decrypting_media_resource_->Initialize(
decrypting_media_resource_init_cb_.Get(), waiting_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(
decrypting_media_resource_->DecryptingDemuxerStreamCountForTesting(), 2);
@@ -190,7 +190,7 @@ TEST_F(DecryptingMediaResourceTest,
decrypting_media_resource_->Initialize(
decrypting_media_resource_init_cb_.Get(), waiting_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(DecryptingMediaResourceTest,
@@ -205,7 +205,7 @@ TEST_F(DecryptingMediaResourceTest,
decrypting_media_resource_->Initialize(
decrypting_media_resource_init_cb_.Get(), waiting_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
TEST_F(DecryptingMediaResourceTest, WaitingCallback) {
@@ -223,7 +223,7 @@ TEST_F(DecryptingMediaResourceTest, WaitingCallback) {
decrypting_media_resource_init_cb_.Get(), waiting_cb_.Get());
decrypting_media_resource_->GetAllStreams().front()->Read(base::BindRepeating(
&DecryptingMediaResourceTest::BufferReady, base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
} // namespace media
diff --git a/chromium/media/filters/decrypting_video_decoder_unittest.cc b/chromium/media/filters/decrypting_video_decoder_unittest.cc
index bebba2a41c3..f32101a8619 100644
--- a/chromium/media/filters/decrypting_video_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_video_decoder_unittest.cc
@@ -12,7 +12,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
#include "media/base/media_util.h"
@@ -51,7 +51,7 @@ class DecryptingVideoDecoderTest : public testing::Test {
public:
DecryptingVideoDecoderTest()
: decoder_(new DecryptingVideoDecoder(
- scoped_task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
&media_log_)),
cdm_context_(new StrictMock<MockCdmContext>()),
decryptor_(new StrictMock<MockDecryptor>()),
@@ -221,7 +221,7 @@ class DecryptingVideoDecoderTest : public testing::Test {
MOCK_METHOD1(OnWaiting, void(WaitingReason));
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NullMediaLog media_log_;
std::unique_ptr<DecryptingVideoDecoder> decoder_;
std::unique_ptr<StrictMock<MockCdmContext>> cdm_context_;
diff --git a/chromium/media/filters/demuxer_perftest.cc b/chromium/media/filters/demuxer_perftest.cc
index e5eb6df7da0..01ca651fa1e 100644
--- a/chromium/media/filters/demuxer_perftest.cc
+++ b/chromium/media/filters/demuxer_perftest.cc
@@ -11,7 +11,7 @@
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
#include "build/build_config.h"
@@ -176,7 +176,7 @@ static void RunDemuxerBenchmark(const std::string& filename) {
NullMediaLog media_log_;
for (int i = 0; i < kBenchmarkIterations; ++i) {
// Setup.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
DemuxerHostImpl demuxer_host;
FileDataSource data_source;
ASSERT_TRUE(data_source.Initialize(file_path));
diff --git a/chromium/media/filters/fake_video_decoder_unittest.cc b/chromium/media/filters/fake_video_decoder_unittest.cc
index a1a729d2e03..df8435f4c06 100644
--- a/chromium/media/filters/fake_video_decoder_unittest.cc
+++ b/chromium/media/filters/fake_video_decoder_unittest.cc
@@ -3,11 +3,12 @@
// found in the LICENSE file.
#include "media/filters/fake_video_decoder.h"
+
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
@@ -221,7 +222,7 @@ class FakeVideoDecoderTest
DCHECK(!is_reset_pending_);
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
VideoDecoderConfig current_config_;
std::unique_ptr<FakeVideoDecoder> decoder_;
diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
index 7033eb4dd68..049b0185f56 100644
--- a/chromium/media/filters/ffmpeg_demuxer.cc
+++ b/chromium/media/filters/ffmpeg_demuxer.cc
@@ -913,8 +913,9 @@ FFmpegDemuxer::FFmpegDemuxer(
// FFmpeg has no asynchronous API, so we use base::WaitableEvents inside
// the BlockingUrlProtocol to handle hops to the render thread for network
// reads and seeks.
- blocking_task_runner_(base::CreateSequencedTaskRunnerWithTraits(
- {base::MayBlock(), base::TaskPriority::USER_BLOCKING})),
+ blocking_task_runner_(
+ base::CreateSequencedTaskRunner({base::ThreadPool(), base::MayBlock(),
+ base::TaskPriority::USER_BLOCKING})),
stopped_(false),
pending_read_(false),
data_source_(data_source),
diff --git a/chromium/media/filters/ffmpeg_demuxer_unittest.cc b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
index b0033da5208..edfedb4e8c4 100644
--- a/chromium/media/filters/ffmpeg_demuxer_unittest.cc
+++ b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
@@ -17,7 +17,7 @@
#include "base/single_thread_task_runner.h"
#include "base/stl_util.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
@@ -117,7 +117,7 @@ class FFmpegDemuxerTest : public testing::Test {
if (demuxer_)
demuxer_->Stop();
demuxer_.reset();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
data_source_.reset();
}
@@ -268,7 +268,7 @@ class FFmpegDemuxerTest : public testing::Test {
// Fixture members.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
// TODO(wolenetz): Consider expanding MediaLog verification coverage here
// using StrictMock<MockMediaLog> for all FFmpegDemuxerTests. See
@@ -487,12 +487,12 @@ TEST_F(FFmpegDemuxerTest, AbortPendingReads) {
audio->Read(NewReadCB(FROM_HERE, 29, 0, true, DemuxerStream::kAborted));
demuxer_->AbortPendingReads();
base::RunLoop().Run();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Additional reads should also be aborted (until a Seek()).
audio->Read(NewReadCB(FROM_HERE, 29, 0, true, DemuxerStream::kAborted));
base::RunLoop().Run();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Ensure blocking thread has completed outstanding work.
demuxer_->Stop();
@@ -516,7 +516,7 @@ TEST_F(FFmpegDemuxerTest, Read_Audio) {
audio->Read(NewReadCB(FROM_HERE, 27, 3000, true));
base::RunLoop().Run();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(166866, demuxer_->GetMemoryUsage());
}
@@ -534,7 +534,7 @@ TEST_F(FFmpegDemuxerTest, Read_Video) {
video->Read(NewReadCB(FROM_HERE, 1057, 33000, false));
base::RunLoop().Run();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(148778, demuxer_->GetMemoryUsage());
}
@@ -1025,7 +1025,8 @@ TEST_F(FFmpegDemuxerTest, Seek) {
base::RunLoop().Run();
}
-TEST_F(FFmpegDemuxerTest, CancelledSeek) {
+// TODO(crbug.com/996040): Flaky.
+TEST_F(FFmpegDemuxerTest, DISABLED_CancelledSeek) {
CreateDemuxer("bear-320x240.webm");
InitializeDemuxer();
@@ -1066,7 +1067,7 @@ TEST_F(FFmpegDemuxerTest, Stop) {
// Attempt the read...
audio->Read(callback.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
;
// Don't let the test call Stop() again.
@@ -1696,7 +1697,7 @@ void QuitLoop(base::Closure quit_closure,
void DisableAndEnableDemuxerTracks(
FFmpegDemuxer* demuxer,
- base::test::ScopedTaskEnvironment* scoped_task_environment) {
+ base::test::TaskEnvironment* task_environment) {
base::WaitableEvent event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
base::WaitableEvent::InitialState::NOT_SIGNALED);
std::vector<MediaTrack::Id> audio_tracks;
@@ -1728,7 +1729,7 @@ void DisableAndEnableDemuxerTracks(
base::BindOnce(QuitLoop, base::Passed(enable_audio.QuitClosure())));
enable_audio.Run();
- scoped_task_environment->RunUntilIdle();
+ task_environment->RunUntilIdle();
}
void OnReadDoneExpectEos(DemuxerStream::Status status,
@@ -1749,7 +1750,7 @@ TEST_F(FFmpegDemuxerTest, StreamStatusNotifications) {
EXPECT_NE(nullptr, video_stream);
// Verify stream status notifications delivery without pending read first.
- DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &task_environment_);
// Verify that stream notifications are delivered properly when stream status
// changes with a pending read. Call FlushBuffers before reading, to ensure
@@ -1761,7 +1762,7 @@ TEST_F(FFmpegDemuxerTest, StreamStatusNotifications) {
audio_stream->Read(base::Bind(&OnReadDoneExpectEos));
video_stream->Read(base::Bind(&OnReadDoneExpectEos));
- DisableAndEnableDemuxerTracks(demuxer_.get(), &scoped_task_environment_);
+ DisableAndEnableDemuxerTracks(demuxer_.get(), &task_environment_);
}
TEST_F(FFmpegDemuxerTest, MultitrackMemoryUsage) {
@@ -1776,7 +1777,7 @@ TEST_F(FFmpegDemuxerTest, MultitrackMemoryUsage) {
// shouldn't be too high.
audio->Read(NewReadCB(FROM_HERE, 304, 0, true));
base::RunLoop().Run();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(22134, demuxer_->GetMemoryUsage());
// Now enable all demuxer streams in the file and perform another read, this
@@ -1788,7 +1789,7 @@ TEST_F(FFmpegDemuxerTest, MultitrackMemoryUsage) {
audio->Read(NewReadCB(FROM_HERE, 166, 21000, true));
base::RunLoop().Run();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// With newly enabled demuxer streams the amount of memory used by the demuxer
// is much higher.
EXPECT_EQ(156011, demuxer_->GetMemoryUsage());
diff --git a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
index f806b697564..d620f418bc6 100644
--- a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -17,7 +17,7 @@
#include "base/run_loop.h"
#include "base/strings/string_util.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/limits.h"
#include "media/base/media_log.h"
@@ -200,7 +200,7 @@ class FFmpegVideoDecoderTest : public testing::Test {
StrictMock<MockMediaLog> media_log_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<FFmpegVideoDecoder> decoder_;
// Various buffers for testing.
diff --git a/chromium/media/filters/frame_processor_unittest.cc b/chromium/media/filters/frame_processor_unittest.cc
index 3afe81a5fac..a18078b0f3a 100644
--- a/chromium/media/filters/frame_processor_unittest.cc
+++ b/chromium/media/filters/frame_processor_unittest.cc
@@ -16,7 +16,7 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "media/base/media_log.h"
#include "media/base/media_util.h"
@@ -298,7 +298,7 @@ class FrameProcessorTest : public ::testing::TestWithParam<bool> {
stream->StartReturningData();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<MockMediaLog> media_log_;
StrictMock<FrameProcessorTestCallbackHelper> callbacks_;
diff --git a/chromium/media/filters/fuchsia/OWNERS b/chromium/media/filters/fuchsia/OWNERS
index e7034eabb1e..c1b584511a6 100644
--- a/chromium/media/filters/fuchsia/OWNERS
+++ b/chromium/media/filters/fuchsia/OWNERS
@@ -1 +1,4 @@
file://build/fuchsia/OWNERS
+# COMPONENT: Fuchsia
+# OS: Fuchsia
+# TEAM: cr-fuchsia@chromium.org
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
index 6e8f58fa90f..bd11ccb95f3 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
@@ -6,18 +6,26 @@
#include <fuchsia/media/cpp/fidl.h>
#include <fuchsia/mediacodec/cpp/fidl.h>
+#include <fuchsia/sysmem/cpp/fidl.h>
+#include <lib/sys/cpp/component_context.h>
#include <zircon/rights.h>
#include "base/bind.h"
+#include "base/bits.h"
#include "base/callback_helpers.h"
+#include "base/fuchsia/default_context.h"
#include "base/fuchsia/fuchsia_logging.h"
-#include "base/fuchsia/service_directory_client.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
+#include "base/process/process_metrics.h"
#include "base/threading/sequenced_task_runner_handle.h"
+#include "gpu/command_buffer/client/context_support.h"
+#include "gpu/command_buffer/client/shared_image_interface.h"
+#include "gpu/command_buffer/common/shared_image_usage.h"
+#include "gpu/ipc/common/gpu_memory_buffer_impl_native_pixmap.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
@@ -26,15 +34,14 @@
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "ui/gfx/buffer_types.h"
+#include "ui/gfx/client_native_pixmap_factory.h"
+#include "ui/ozone/public/client_native_pixmap_factory_ozone.h"
namespace media {
namespace {
-const zx_rights_t kReadOnlyVmoRights =
- ZX_DEFAULT_VMO_RIGHTS &
- ~(ZX_RIGHT_WRITE | ZX_RIGHT_EXECUTE | ZX_RIGHT_SET_PROPERTY);
-
// Value passed to the codec as packet_count_for_client. It's number of output
// buffers that we expect to hold on to in the renderer.
//
@@ -43,30 +50,6 @@ const zx_rights_t kReadOnlyVmoRights =
// works properly when the client holds to more than that.
const uint32_t kMaxUsedOutputFrames = 8;
-zx::vmo CreateContiguousVmo(size_t size, const zx::handle& bti_handle) {
- zx::vmo vmo;
- zx_status_t status =
- zx_vmo_create_contiguous(bti_handle.get(), size, /*alignment_log2=*/0,
- vmo.reset_and_get_address());
- if (status != ZX_OK) {
- ZX_DLOG(ERROR, status) << "zx_vmo_create_contiguous";
- return zx::vmo();
- }
-
- return vmo;
-}
-
-zx::vmo CreateVmo(size_t size) {
- zx::vmo vmo;
- zx_status_t status = zx::vmo::create(size, 0, &vmo);
- if (status != ZX_OK) {
- ZX_DLOG(ERROR, status) << "zx_vmo_create";
- return zx::vmo();
- }
-
- return vmo;
-}
-
class PendingDecode {
public:
PendingDecode(scoped_refptr<DecoderBuffer> buffer,
@@ -101,76 +84,52 @@ class PendingDecode {
DISALLOW_COPY_AND_ASSIGN(PendingDecode);
};
-class CodecBuffer {
+class InputBuffer {
public:
- CodecBuffer() = default;
-
- bool Initialize(const fuchsia::media::StreamBufferConstraints& constraints) {
- if (!constraints.has_per_packet_buffer_bytes_recommended()) {
- return false;
+ InputBuffer() {}
+
+ ~InputBuffer() {
+ if (base_address_) {
+ size_t mapped_bytes =
+ base::bits::Align(offset_ + size_, base::GetPageSize());
+ zx_status_t status = zx::vmar::root_self()->unmap(
+ reinterpret_cast<uintptr_t>(base_address_), mapped_bytes);
+ ZX_DCHECK(status == ZX_OK, status) << "zx_vmar_unmap";
}
- size_ = constraints.per_packet_buffer_bytes_recommended();
-
- if (constraints.has_is_physically_contiguous_required() &&
- constraints.is_physically_contiguous_required()) {
- if (!constraints.has_very_temp_kludge_bti_handle()) {
- return false;
- }
- vmo_ =
- CreateContiguousVmo(size_, constraints.very_temp_kludge_bti_handle());
- } else {
- vmo_ = CreateVmo(size_);
- }
- return vmo_.is_valid();
+ CallDecodeCallbackIfAny(DecodeStatus::ABORTED);
}
- const zx::vmo& vmo() const { return vmo_; }
- size_t size() const { return size_; }
+ InputBuffer(InputBuffer&&) = default;
+ InputBuffer& operator=(InputBuffer&&) = default;
- bool ToFidlCodecBuffer(uint64_t buffer_lifetime_ordinal,
- uint32_t buffer_index,
- bool read_only,
- fuchsia::media::StreamBuffer* buffer) {
- zx::vmo vmo_dup;
- zx_status_t status = vmo_.duplicate(
- read_only ? kReadOnlyVmoRights : ZX_RIGHT_SAME_RIGHTS, &vmo_dup);
+ bool Initialize(zx::vmo vmo,
+ size_t offset,
+ size_t size,
+ fuchsia::sysmem::CoherencyDomain coherency_domain) {
+ DCHECK(!base_address_);
+ DCHECK(vmo);
+
+ // zx_vmo_write() doesn't work for sysmem-allocated VMOs (see ZX-4854), so
+ // the VMOs have to be mapped.
+ size_t bytes_to_map = base::bits::Align(offset + size, base::GetPageSize());
+ uintptr_t addr;
+ zx_status_t status = zx::vmar::root_self()->map(
+ /*vmar_offset=*/0, vmo, /*vmo_offset=*/0, bytes_to_map,
+ ZX_VM_PERM_READ | ZX_VM_PERM_WRITE, &addr);
if (status != ZX_OK) {
- ZX_DLOG(ERROR, status) << "zx_handle_duplicate";
+ ZX_DLOG(ERROR, status) << "zx_vmar_map";
return false;
}
- fuchsia::media::StreamBufferDataVmo buf_data;
- buf_data.set_vmo_handle(std::move(vmo_dup));
-
- buf_data.set_vmo_usable_start(0);
- buf_data.set_vmo_usable_size(size_);
-
- buffer->mutable_data()->set_vmo(std::move(buf_data));
- buffer->set_buffer_lifetime_ordinal(buffer_lifetime_ordinal);
- buffer->set_buffer_index(buffer_index);
+ base_address_ = reinterpret_cast<uint8_t*>(addr);
+ offset_ = offset;
+ size_ = size;
+ coherency_domain_ = coherency_domain;
return true;
}
- private:
- zx::vmo vmo_;
- size_t size_ = 0;
-
- DISALLOW_COPY_AND_ASSIGN(CodecBuffer);
-};
-
-class InputBuffer {
- public:
- InputBuffer() = default;
-
- ~InputBuffer() { CallDecodeCallbackIfAny(DecodeStatus::ABORTED); }
-
- bool Initialize(const fuchsia::media::StreamBufferConstraints& constraints) {
- return buffer_.Initialize(constraints);
- }
-
- CodecBuffer& buffer() { return buffer_; }
bool is_used() const { return is_used_; }
// Copies as much data as possible from |pending_decode| to this input buffer.
@@ -178,12 +137,15 @@ class InputBuffer {
DCHECK(!is_used_);
is_used_ = true;
- size_t bytes_to_fill =
- std::min(buffer_.size(), pending_decode->bytes_left());
+ size_t bytes_to_fill = std::min(size_, pending_decode->bytes_left());
+ memcpy(base_address_ + offset_, pending_decode->data(), bytes_to_fill);
- zx_status_t status =
- buffer_.vmo().write(pending_decode->data(), 0, bytes_to_fill);
- ZX_CHECK(status == ZX_OK, status) << "zx_vmo_write";
+ // Flush CPU cache if the codec reads from RAM.
+ if (coherency_domain_ == fuchsia::sysmem::CoherencyDomain::RAM) {
+ zx_status_t status = zx_cache_flush(base_address_ + offset_,
+ bytes_to_fill, ZX_CACHE_FLUSH_DATA);
+ ZX_DCHECK(status == ZX_OK, status) << "zx_cache_flush";
+ }
pending_decode->AdvanceCurrentPos(bytes_to_fill);
@@ -208,7 +170,12 @@ class InputBuffer {
}
private:
- CodecBuffer buffer_;
+ uint8_t* base_address_ = nullptr;
+
+ // Buffer settings provided by sysmem.
+ size_t offset_ = 0;
+ size_t size_ = 0;
+ fuchsia::sysmem::CoherencyDomain coherency_domain_;
// Set to true when this buffer is being used by the codec.
bool is_used_ = false;
@@ -216,67 +183,112 @@ class InputBuffer {
// Decode callback for the DecodeBuffer of which this InputBuffer is a part.
// This is only set on the final InputBuffer in each DecodeBuffer.
VideoDecoder::DecodeCB decode_cb_;
-
- DISALLOW_COPY_AND_ASSIGN(InputBuffer);
};
-// Output buffer used to pass decoded frames from the decoder. Ref-counted
-// to make it possible to share the buffers with VideoFrames, in case when a
-// frame outlives the decoder.UnsafeSharedMemoryRegion
-class OutputBuffer : public base::RefCountedThreadSafe<OutputBuffer> {
+// Helper used to hold mailboxes for the output textures. OutputMailbox may
+// outlive FuchsiaVideoDecoder if is referenced by a VideoFrame.
+class OutputMailbox {
public:
- OutputBuffer() = default;
+ OutputMailbox(gpu::SharedImageInterface* shared_image_interface,
+ gpu::ContextSupport* gpu_context_support,
+ std::unique_ptr<gfx::GpuMemoryBuffer> gmb)
+ : shared_image_interface_(shared_image_interface),
+ gpu_context_support_(gpu_context_support),
+ weak_factory_(this) {
+ uint32_t usage = gpu::SHARED_IMAGE_USAGE_RASTER |
+ gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION |
+ gpu::SHARED_IMAGE_USAGE_DISPLAY |
+ gpu::SHARED_IMAGE_USAGE_SCANOUT;
+ mailbox_ = shared_image_interface_->CreateSharedImage(
+ gmb.get(), nullptr, gfx::ColorSpace(), usage);
+ }
+ ~OutputMailbox() {
+ shared_image_interface_->DestroySharedImage(sync_token_, mailbox_);
+ }
- bool Initialize(const fuchsia::media::StreamBufferConstraints& constraints) {
- if (!buffer_.Initialize(constraints)) {
- return false;
- }
+ const gpu::Mailbox& mailbox() { return mailbox_; }
- zx_status_t status = zx::vmar::root_self()->map(
- /*vmar_offset=*/0, buffer_.vmo(), 0, buffer_.size(),
- ZX_VM_REQUIRE_NON_RESIZABLE | ZX_VM_PERM_READ, &mapped_memory_);
+ // Create a new video frame that wraps the mailbox. |reuse_callback| will be
+ // called when the mailbox can be reused.
+ scoped_refptr<VideoFrame> CreateFrame(VideoPixelFormat pixel_format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
+ base::OnceClosure reuse_callback) {
+ DCHECK(!is_used_);
+ is_used_ = true;
+ reuse_callback_ = std::move(reuse_callback);
- if (status != ZX_OK) {
- ZX_DLOG(ERROR, status) << "zx_vmar_map";
- mapped_memory_ = 0;
- return false;
- }
+ gpu::MailboxHolder mailboxes[VideoFrame::kMaxPlanes];
+ mailboxes[0].mailbox = mailbox_;
+ mailboxes[0].sync_token = shared_image_interface_->GenUnverifiedSyncToken();
- return true;
+ return VideoFrame::WrapNativeTextures(
+ pixel_format, mailboxes,
+ BindToCurrentLoop(base::BindOnce(&OutputMailbox::OnFrameDestroyed,
+ base::Unretained(this))),
+ coded_size, visible_rect, natural_size, timestamp);
}
- CodecBuffer& buffer() { return buffer_; }
-
- const uint8_t* mapped_memory() {
- DCHECK(mapped_memory_);
- return reinterpret_cast<uint8_t*>(mapped_memory_);
+ // Called by FuchsiaVideoDecoder when it no longer needs this mailbox.
+ void Release() {
+ if (is_used_) {
+ // The mailbox is referenced by a VideoFrame. It will be deleted as soon
+ // as the frame is destroyed.
+ DCHECK(reuse_callback_);
+ reuse_callback_ = base::Closure();
+ } else {
+ delete this;
+ }
}
private:
- friend class RefCountedThreadSafe<OutputBuffer>;
-
- ~OutputBuffer() {
- if (mapped_memory_) {
- zx_status_t status =
- zx::vmar::root_self()->unmap(mapped_memory_, buffer_.size());
- if (status != ZX_OK) {
- ZX_LOG(FATAL, status) << "zx_vmar_unmap";
- }
+ void OnFrameDestroyed(const gpu::SyncToken& sync_token) {
+ DCHECK(is_used_);
+ is_used_ = false;
+ sync_token_ = sync_token;
+
+ if (!reuse_callback_) {
+ // If the mailbox cannot be reused then we can just delete it.
+ delete this;
+ return;
}
+
+ gpu_context_support_->SignalSyncToken(
+ sync_token_,
+ BindToCurrentLoop(base::BindOnce(&OutputMailbox::OnSyncTokenSignaled,
+ weak_factory_.GetWeakPtr())));
}
- CodecBuffer buffer_;
+ void OnSyncTokenSignaled() {
+ sync_token_.Clear();
+ std::move(reuse_callback_).Run();
+ }
+
+ gpu::SharedImageInterface* const shared_image_interface_;
+ gpu::ContextSupport* const gpu_context_support_;
+
+ gpu::Mailbox mailbox_;
+ gpu::SyncToken sync_token_;
+
+ // Set to true when the mailbox is referenced by a video frame.
+ bool is_used_ = false;
+
+ base::OnceClosure reuse_callback_;
- uintptr_t mapped_memory_ = 0;
+ base::WeakPtrFactory<OutputMailbox> weak_factory_;
- DISALLOW_COPY_AND_ASSIGN(OutputBuffer);
+ DISALLOW_COPY_AND_ASSIGN(OutputMailbox);
};
} // namespace
class FuchsiaVideoDecoder : public VideoDecoder {
public:
- explicit FuchsiaVideoDecoder(bool enable_sw_decoding);
+ FuchsiaVideoDecoder(gpu::SharedImageInterface* shared_image_interface,
+ gpu::ContextSupport* gpu_context_support,
+ bool enable_sw_decoding);
~FuchsiaVideoDecoder() override;
// VideoDecoder implementation.
@@ -295,7 +307,8 @@ class FuchsiaVideoDecoder : public VideoDecoder {
private:
// Event handlers for |codec_|.
- void OnStreamFailed(uint64_t stream_lifetime_ordinal);
+ void OnStreamFailed(uint64_t stream_lifetime_ordinal,
+ fuchsia::media::StreamError error);
void OnInputConstraints(
fuchsia::media::StreamBufferConstraints input_constraints);
void OnFreeInputPacket(fuchsia::media::PacketHeader free_input_packet);
@@ -308,24 +321,38 @@ class FuchsiaVideoDecoder : public VideoDecoder {
void OnOutputEndOfStream(uint64_t stream_lifetime_ordinal,
bool error_detected_before);
+ // Called on errors to shutdown the decoder and notify the client.
void OnError();
// Called by OnInputConstraints() to initialize input buffers.
- bool InitializeInputBuffers(
- fuchsia::media::StreamBufferConstraints constraints);
+ void InitializeInputBufferCollection(
+ fuchsia::media::StreamBufferConstraints constraints,
+ fuchsia::sysmem::BufferCollectionTokenPtr sysmem_token);
+
+ // Callback for BufferCollection::WaitForBuffersAllocated() when initializing
+ // input buffer collection.
+ void OnInputBuffersAllocated(
+ zx_status_t status,
+ fuchsia::sysmem::BufferCollectionInfo_2 buffer_collection_info);
// Pumps |pending_decodes_| to the decoder.
void PumpInput();
- // Called by OnInputConstraints() to initialize input buffers.
- bool InitializeOutputBuffers(
- fuchsia::media::StreamBufferConstraints constraints);
+ // Called by OnOutputConstraints() to initialize input buffers.
+ void InitializeOutputBufferCollection(
+ fuchsia::media::StreamBufferConstraints constraints,
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_codec,
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_gpu);
+
+ // Called by OutputMailbox to signal that the mailbox and buffer can be
+ // reused.
+ void OnReuseMailbox(uint32_t buffer_index, uint32_t packet_index);
- // Destruction callback for the output VideoFrame instances.
- void OnFrameDestroyed(scoped_refptr<OutputBuffer> buffer,
- uint64_t buffer_lifetime_ordinal,
- uint32_t packet_index);
+ // Releases BufferCollection currently used for output buffers if any.
+ void ReleaseOutputBuffers();
+ gpu::SharedImageInterface* const shared_image_interface_;
+ gpu::ContextSupport* const gpu_context_support_;
const bool enable_sw_decoding_;
OutputCB output_cb_;
@@ -335,6 +362,8 @@ class FuchsiaVideoDecoder : public VideoDecoder {
float container_pixel_aspect_ratio_ = 1.0;
fuchsia::media::StreamProcessorPtr codec_;
+ fuchsia::sysmem::AllocatorPtr sysmem_allocator_;
+ std::unique_ptr<gfx::ClientNativePixmapFactory> client_native_pixmap_factory_;
uint64_t stream_lifetime_ordinal_ = 1;
@@ -342,14 +371,20 @@ class FuchsiaVideoDecoder : public VideoDecoder {
// stream_lifetime_ordinal_.
bool active_stream_ = false;
+ // Input buffers.
std::list<PendingDecode> pending_decodes_;
uint64_t input_buffer_lifetime_ordinal_ = 1;
+ fuchsia::sysmem::BufferCollectionPtr input_buffer_collection_;
std::vector<InputBuffer> input_buffers_;
int num_used_input_buffers_ = 0;
+ // Output buffers.
fuchsia::media::VideoUncompressedFormat output_format_;
uint64_t output_buffer_lifetime_ordinal_ = 1;
- std::vector<scoped_refptr<OutputBuffer>> output_buffers_;
+ fuchsia::sysmem::BufferCollectionPtr output_buffer_collection_;
+ gfx::SysmemBufferCollectionId output_buffer_collection_id_;
+ std::vector<OutputMailbox*> output_mailboxes_;
+
int num_used_output_buffers_ = 0;
int max_used_output_buffers_ = 0;
@@ -362,12 +397,22 @@ class FuchsiaVideoDecoder : public VideoDecoder {
DISALLOW_COPY_AND_ASSIGN(FuchsiaVideoDecoder);
};
-FuchsiaVideoDecoder::FuchsiaVideoDecoder(bool enable_sw_decoding)
- : enable_sw_decoding_(enable_sw_decoding), weak_factory_(this) {
+FuchsiaVideoDecoder::FuchsiaVideoDecoder(
+ gpu::SharedImageInterface* shared_image_interface,
+ gpu::ContextSupport* gpu_context_support,
+ bool enable_sw_decoding)
+ : shared_image_interface_(shared_image_interface),
+ gpu_context_support_(gpu_context_support),
+ enable_sw_decoding_(enable_sw_decoding),
+ client_native_pixmap_factory_(ui::CreateClientNativePixmapFactoryOzone()),
+ weak_factory_(this) {
+ DCHECK(shared_image_interface_);
weak_this_ = weak_factory_.GetWeakPtr();
}
-FuchsiaVideoDecoder::~FuchsiaVideoDecoder() = default;
+FuchsiaVideoDecoder::~FuchsiaVideoDecoder() {
+ ReleaseOutputBuffers();
+}
std::string FuchsiaVideoDecoder::GetDisplayName() const {
return "FuchsiaVideoDecoder";
@@ -403,6 +448,15 @@ void FuchsiaVideoDecoder::Initialize(const VideoDecoderConfig& config,
output_cb_ = output_cb;
container_pixel_aspect_ratio_ = config.GetPixelAspectRatio();
+ sysmem_allocator_ = base::fuchsia::ComponentContextForCurrentProcess()
+ ->svc()
+ ->Connect<fuchsia::sysmem::Allocator>();
+ sysmem_allocator_.set_error_handler([](zx_status_t status) {
+ // Just log a warning. We will handle BufferCollection the failure when
+ // trying to create a new BufferCollection.
+ ZX_DLOG(WARNING, status) << "fuchsia.sysmem.Allocator disconnected.";
+ });
+
fuchsia::mediacodec::CreateDecoder_Params codec_params;
codec_params.mutable_input_details()->set_format_details_version_ordinal(0);
@@ -431,17 +485,15 @@ void FuchsiaVideoDecoder::Initialize(const VideoDecoderConfig& config,
codec_params.set_promise_separate_access_units_on_input(true);
codec_params.set_require_hw(!enable_sw_decoding_);
- auto codec_factory =
- base::fuchsia::ServiceDirectoryClient::ForCurrentProcess()
- ->ConnectToService<fuchsia::mediacodec::CodecFactory>();
+ auto codec_factory = base::fuchsia::ComponentContextForCurrentProcess()
+ ->svc()
+ ->Connect<fuchsia::mediacodec::CodecFactory>();
codec_factory->CreateDecoder(std::move(codec_params), codec_.NewRequest());
- codec_.set_error_handler(
- [this](zx_status_t status) {
- ZX_LOG(ERROR, status)
- << "The fuchsia.mediacodec.Codec channel was terminated.";
- OnError();
- });
+ codec_.set_error_handler([this](zx_status_t status) {
+ ZX_LOG(ERROR, status) << "fuchsia.mediacodec.Codec disconnected.";
+ OnError();
+ });
codec_.events().OnStreamFailed =
fit::bind_member(this, &FuchsiaVideoDecoder::OnStreamFailed);
@@ -515,7 +567,8 @@ int FuchsiaVideoDecoder::GetMaxDecodeRequests() const {
return input_buffers_.size() + 1;
}
-void FuchsiaVideoDecoder::OnStreamFailed(uint64_t stream_lifetime_ordinal) {
+void FuchsiaVideoDecoder::OnStreamFailed(uint64_t stream_lifetime_ordinal,
+ fuchsia::media::StreamError error) {
if (stream_lifetime_ordinal_ != stream_lifetime_ordinal) {
return;
}
@@ -524,14 +577,49 @@ void FuchsiaVideoDecoder::OnStreamFailed(uint64_t stream_lifetime_ordinal) {
}
void FuchsiaVideoDecoder::OnInputConstraints(
- fuchsia::media::StreamBufferConstraints input_constraints) {
- if (!InitializeInputBuffers(std::move(input_constraints))) {
- DLOG(ERROR) << "Failed to initialize input buffers.";
+ fuchsia::media::StreamBufferConstraints constraints) {
+ // Buffer lifetime ordinal is an odd number incremented by 2 for each buffer
+ // generation as required by StreamProcessor.
+ input_buffer_lifetime_ordinal_ += 2;
+
+ if (!constraints.has_default_settings() ||
+ !constraints.default_settings().has_packet_count_for_server() ||
+ !constraints.default_settings().has_packet_count_for_client()) {
+ DLOG(ERROR)
+ << "Received OnInputConstraints() with missing required fields.";
OnError();
return;
}
- PumpInput();
+ input_buffer_collection_.Unbind();
+ input_buffers_.clear();
+
+ // Create a new sysmem buffer collection token for the input buffers.
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token;
+ sysmem_allocator_->AllocateSharedCollection(collection_token.NewRequest());
+
+ // Create collection token for the codec.
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_codec;
+ collection_token->Duplicate(ZX_RIGHT_SAME_RIGHTS,
+ collection_token_for_codec.NewRequest());
+
+ // Convert the token to a BufferCollection connection.
+ sysmem_allocator_->BindSharedCollection(
+ std::move(collection_token), input_buffer_collection_.NewRequest());
+ input_buffer_collection_.set_error_handler([this](zx_status_t status) {
+ ZX_LOG(ERROR, status) << "fuchsia.sysmem.BufferCollection disconnected.";
+ OnError();
+ });
+
+ // BufferCollection needs to be synchronized to ensure that all token
+ // duplicate requests have been processed and sysmem knows about all clients
+ // that will be using this buffer collection.
+ input_buffer_collection_->Sync([this, constraints = std::move(constraints),
+ collection_token_for_codec = std::move(
+ collection_token_for_codec)]() mutable {
+ InitializeInputBufferCollection(std::move(constraints),
+ std::move(collection_token_for_codec));
+ });
}
void FuchsiaVideoDecoder::OnFreeInputPacket(
@@ -569,8 +657,8 @@ void FuchsiaVideoDecoder::OnFreeInputPacket(
void FuchsiaVideoDecoder::OnOutputConstraints(
fuchsia::media::StreamOutputConstraints output_constraints) {
if (!output_constraints.has_stream_lifetime_ordinal()) {
- DLOG(ERROR) << "Received OnOutputConstraints() with missing required "
- "fields.";
+ DLOG(ERROR)
+ << "Received OnOutputConstraints() with missing required fields.";
OnError();
return;
}
@@ -580,21 +668,72 @@ void FuchsiaVideoDecoder::OnOutputConstraints(
return;
}
- if (output_constraints.has_buffer_constraints_action_required() &&
- output_constraints.buffer_constraints_action_required()) {
- if (!output_constraints.has_buffer_constraints()) {
- DLOG(ERROR) << "Received OnOutputConstraints() which requires buffer "
- "constraints action, but without buffer constraints.";
- OnError();
- return;
- }
- if (!InitializeOutputBuffers(
- std::move(*output_constraints.mutable_buffer_constraints()))) {
- DLOG(ERROR) << "Failed to initialize output buffers.";
- OnError();
- return;
- }
+ if (!output_constraints.has_buffer_constraints_action_required() ||
+ !output_constraints.buffer_constraints_action_required()) {
+ return;
}
+
+ if (!output_constraints.has_buffer_constraints()) {
+ DLOG(ERROR) << "Received OnOutputConstraints() which requires buffer "
+ "constraints action, but without buffer constraints.";
+ OnError();
+ return;
+ }
+
+ const fuchsia::media::StreamBufferConstraints& buffer_constraints =
+ output_constraints.buffer_constraints();
+
+ if (!buffer_constraints.has_default_settings() ||
+ !buffer_constraints.has_packet_count_for_client_max() ||
+ !buffer_constraints.default_settings().has_packet_count_for_server() ||
+ !buffer_constraints.default_settings().has_packet_count_for_client()) {
+ DLOG(ERROR)
+ << "Received OnOutputConstraints() with missing required fields.";
+ OnError();
+ return;
+ }
+
+ ReleaseOutputBuffers();
+
+ // mediacodec API expects odd buffer lifetime ordinal, which is incremented by
+ // 2 for each buffer generation.
+ output_buffer_lifetime_ordinal_ += 2;
+
+ max_used_output_buffers_ = std::min(
+ kMaxUsedOutputFrames, buffer_constraints.packet_count_for_client_max());
+
+ // Create a new sysmem buffer collection token for the output buffers.
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token;
+ sysmem_allocator_->AllocateSharedCollection(collection_token.NewRequest());
+
+ // Create sysmem tokens for the gpu process and the codec.
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_codec;
+ collection_token->Duplicate(ZX_RIGHT_SAME_RIGHTS,
+ collection_token_for_codec.NewRequest());
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_gpu;
+ collection_token->Duplicate(ZX_RIGHT_SAME_RIGHTS,
+ collection_token_for_gpu.NewRequest());
+
+ // Convert the token to a BufferCollection connection.
+ sysmem_allocator_->BindSharedCollection(
+ std::move(collection_token), output_buffer_collection_.NewRequest());
+ output_buffer_collection_.set_error_handler([this](zx_status_t status) {
+ ZX_LOG(ERROR, status) << "fuchsia.sysmem.BufferCollection disconnected.";
+ OnError();
+ });
+
+ // BufferCollection needs to be synchronized before we can use it.
+ output_buffer_collection_->Sync(
+ [this,
+ buffer_constraints = std::move(
+ std::move(*output_constraints.mutable_buffer_constraints())),
+ collection_token_for_codec = std::move(collection_token_for_codec),
+ collection_token_for_gpu =
+ std::move(collection_token_for_gpu)]() mutable {
+ InitializeOutputBufferCollection(std::move(buffer_constraints),
+ std::move(collection_token_for_codec),
+ std::move(collection_token_for_gpu));
+ });
}
void FuchsiaVideoDecoder::OnOutputFormat(
@@ -611,7 +750,6 @@ void FuchsiaVideoDecoder::OnOutputFormat(
}
auto* format = output_format.mutable_format_details();
-
if (!format->has_domain() || !format->domain().is_video() ||
!format->domain().video().is_uncompressed()) {
DLOG(ERROR) << "Received OnOutputFormat() with invalid format.";
@@ -639,72 +777,64 @@ void FuchsiaVideoDecoder::OnOutputPacket(fuchsia::media::Packet output_packet,
return;
}
- auto coded_size = gfx::Size(output_format_.primary_width_pixels,
- output_format_.primary_height_pixels);
+ fuchsia::sysmem::PixelFormatType sysmem_pixel_format =
+ output_format_.image_format.pixel_format.type;
- base::Optional<VideoFrameLayout> layout;
- switch (output_format_.fourcc) {
- case libyuv::FOURCC_NV12:
- layout = VideoFrameLayout::CreateWithPlanes(
- PIXEL_FORMAT_NV12, coded_size,
- std::vector<VideoFrameLayout::Plane>{
- VideoFrameLayout::Plane(output_format_.primary_line_stride_bytes,
- output_format_.primary_start_offset,
- output_format_.primary_line_stride_bytes *
- output_format_.primary_height_pixels),
- VideoFrameLayout::Plane(
- output_format_.secondary_line_stride_bytes,
- output_format_.secondary_start_offset,
- output_format_.secondary_line_stride_bytes *
- output_format_.secondary_height_pixels)});
- DCHECK(layout);
+ VideoPixelFormat pixel_format;
+ gfx::BufferFormat buffer_format;
+ switch (sysmem_pixel_format) {
+ case fuchsia::sysmem::PixelFormatType::NV12:
+ pixel_format = PIXEL_FORMAT_NV12;
+ buffer_format = gfx::BufferFormat::YUV_420_BIPLANAR;
break;
- case libyuv::FOURCC_YV12:
- layout = VideoFrameLayout::CreateWithPlanes(
- PIXEL_FORMAT_YV12, coded_size,
- std::vector<VideoFrameLayout::Plane>{
- VideoFrameLayout::Plane(output_format_.primary_line_stride_bytes,
- output_format_.primary_start_offset,
- output_format_.primary_line_stride_bytes *
- output_format_.primary_height_pixels),
- VideoFrameLayout::Plane(
- output_format_.secondary_line_stride_bytes,
- output_format_.secondary_start_offset,
- output_format_.secondary_line_stride_bytes *
- output_format_.secondary_height_pixels),
- VideoFrameLayout::Plane(
- output_format_.secondary_line_stride_bytes,
- output_format_.tertiary_start_offset,
- output_format_.secondary_line_stride_bytes *
- output_format_.secondary_height_pixels)});
- DCHECK(layout);
+ case fuchsia::sysmem::PixelFormatType::I420:
+ case fuchsia::sysmem::PixelFormatType::YV12:
+ pixel_format = PIXEL_FORMAT_I420;
+ buffer_format = gfx::BufferFormat::YVU_420;
break;
default:
- LOG(ERROR) << "unknown fourcc: "
- << std::string(reinterpret_cast<char*>(&output_format_.fourcc),
- 4);
+ DLOG(ERROR) << "Unsupported pixel format: "
+ << static_cast<int>(sysmem_pixel_format);
+ OnError();
+ return;
}
- if (!layout) {
- codec_->RecycleOutputPacket(fidl::Clone(output_packet.header()));
+ size_t buffer_index = output_packet.buffer_index();
+ if (buffer_index >= output_mailboxes_.size()) {
+ DLOG(ERROR)
+ << "mediacodec generated output packet with an invalid buffer_index="
+ << buffer_index << " for output buffer collection with only "
+ << output_mailboxes_.size() << " packets.";
+ OnError();
return;
}
- base::TimeDelta timestamp;
- if (output_packet.has_timestamp_ish()) {
- timestamp = base::TimeDelta::FromNanoseconds(output_packet.timestamp_ish());
- }
+ auto coded_size = gfx::Size(output_format_.primary_width_pixels,
+ output_format_.primary_height_pixels);
- auto packet_index = output_packet.header().packet_index();
- auto buffer_index = output_packet.buffer_index();
- auto& buffer = output_buffers_[buffer_index];
+ if (!output_mailboxes_[buffer_index]) {
+ gfx::GpuMemoryBufferHandle gmb_handle;
+ gmb_handle.type = gfx::NATIVE_PIXMAP;
+ gmb_handle.native_pixmap_handle.buffer_collection_id =
+ output_buffer_collection_id_;
+ gmb_handle.native_pixmap_handle.buffer_index = buffer_index;
- // We're not using single buffer mode, so packet count will be equal to buffer
- // count.
- DCHECK_LT(num_used_output_buffers_, static_cast<int>(output_buffers_.size()));
- num_used_output_buffers_++;
+ auto gmb = gpu::GpuMemoryBufferImplNativePixmap::CreateFromHandle(
+ client_native_pixmap_factory_.get(), std::move(gmb_handle), coded_size,
+ buffer_format, gfx::BufferUsage::GPU_READ,
+ gpu::GpuMemoryBufferImpl::DestructionCallback());
+
+ output_mailboxes_[buffer_index] = new OutputMailbox(
+ shared_image_interface_, gpu_context_support_, std::move(gmb));
+ } else {
+ shared_image_interface_->UpdateSharedImage(
+ gpu::SyncToken(), output_mailboxes_[buffer_index]->mailbox());
+ }
+
+ auto display_rect = gfx::Rect(output_format_.primary_display_width_pixels,
+ output_format_.primary_display_height_pixels);
float pixel_aspect_ratio;
if (output_format_.has_pixel_aspect_ratio) {
@@ -715,22 +845,19 @@ void FuchsiaVideoDecoder::OnOutputPacket(fuchsia::media::Packet output_packet,
pixel_aspect_ratio = container_pixel_aspect_ratio_;
}
- auto display_rect = gfx::Rect(output_format_.primary_display_width_pixels,
- output_format_.primary_display_height_pixels);
+ base::TimeDelta timestamp;
+ if (output_packet.has_timestamp_ish()) {
+ timestamp = base::TimeDelta::FromNanoseconds(output_packet.timestamp_ish());
+ }
- // TODO(sergeyu): Create ReadOnlySharedMemoryRegion for the VMO and pass
- // it to the frame.
- auto frame = VideoFrame::WrapExternalDataWithLayout(
- *layout, display_rect, GetNaturalSize(display_rect, pixel_aspect_ratio),
- const_cast<uint8_t*>(buffer->mapped_memory()) +
- output_format_.primary_start_offset,
- buffer->buffer().size() - output_format_.primary_start_offset, timestamp);
+ num_used_output_buffers_++;
- // Pass a reference to the buffer to the destruction callback to ensure it's
- // not destroyed while the frame is being used.
- frame->AddDestructionObserver(BindToCurrentLoop(
- base::BindOnce(&FuchsiaVideoDecoder::OnFrameDestroyed, weak_this_, buffer,
- output_buffer_lifetime_ordinal_, packet_index)));
+ auto frame = output_mailboxes_[buffer_index]->CreateFrame(
+ pixel_format, coded_size, display_rect,
+ GetNaturalSize(display_rect, pixel_aspect_ratio), timestamp,
+ base::BindOnce(&FuchsiaVideoDecoder::OnReuseMailbox,
+ base::Unretained(this), buffer_index,
+ output_packet.header().packet_index()));
output_cb_.Run(std::move(frame));
}
@@ -775,51 +902,76 @@ void FuchsiaVideoDecoder::OnError() {
pending_decodes_.clear();
+ input_buffer_collection_.Unbind();
num_used_input_buffers_ = 0;
input_buffers_.clear();
- num_used_output_buffers_ = 0;
- output_buffers_.clear();
+ ReleaseOutputBuffers();
}
-bool FuchsiaVideoDecoder::InitializeInputBuffers(
- fuchsia::media::StreamBufferConstraints constraints) {
- input_buffer_lifetime_ordinal_ += 2;
+void FuchsiaVideoDecoder::InitializeInputBufferCollection(
+ fuchsia::media::StreamBufferConstraints constraints,
+ fuchsia::sysmem::BufferCollectionTokenPtr sysmem_token) {
+ fuchsia::media::StreamBufferPartialSettings settings;
+ settings.set_buffer_lifetime_ordinal(input_buffer_lifetime_ordinal_);
+ settings.set_buffer_constraints_version_ordinal(
+ constraints.buffer_constraints_version_ordinal());
+ settings.set_single_buffer_mode(false);
+ settings.set_packet_count_for_server(
+ constraints.default_settings().packet_count_for_server());
+ settings.set_packet_count_for_client(
+ constraints.default_settings().packet_count_for_client());
+ settings.set_sysmem_token(std::move(sysmem_token));
+ codec_->SetInputBufferPartialSettings(std::move(settings));
+
+ fuchsia::sysmem::BufferCollectionConstraints buffer_constraints;
+
+ // Currently we have to map buffers VMOs to write to them (see ZX-4854) and
+ // memory cannot be mapped as write-only (see ZX-4872), so request RW access
+ // even though we will never need to read from these buffers.
+ buffer_constraints.usage.cpu =
+ fuchsia::sysmem::cpuUsageRead | fuchsia::sysmem::cpuUsageWrite;
+
+ buffer_constraints.min_buffer_count_for_camping =
+ settings.packet_count_for_client();
+ buffer_constraints.has_buffer_memory_constraints = true;
+ buffer_constraints.buffer_memory_constraints.min_size_bytes =
+ constraints.has_per_packet_buffer_bytes_recommended();
+ buffer_constraints.buffer_memory_constraints.ram_domain_supported = true;
+ buffer_constraints.buffer_memory_constraints.cpu_domain_supported = true;
+ input_buffer_collection_->SetConstraints(
+ /*has_constraints=*/true, std::move(buffer_constraints));
+
+ input_buffer_collection_->WaitForBuffersAllocated(
+ fit::bind_member(this, &FuchsiaVideoDecoder::OnInputBuffersAllocated));
+}
- if (!constraints.has_default_settings() ||
- !constraints.default_settings().has_packet_count_for_server() ||
- !constraints.default_settings().has_packet_count_for_client()) {
- DLOG(ERROR)
- << "Received InitializeInputBuffers() with missing required fields.";
+void FuchsiaVideoDecoder::OnInputBuffersAllocated(
+ zx_status_t status,
+ fuchsia::sysmem::BufferCollectionInfo_2 buffer_collection_info) {
+ if (status != ZX_OK) {
+ ZX_DLOG(ERROR, status) << "Failed to allocate buffer collection for input.";
OnError();
- return false;
+ return;
}
- auto settings = fidl::Clone(constraints.default_settings());
- settings.set_buffer_lifetime_ordinal(input_buffer_lifetime_ordinal_);
- codec_->SetInputBufferSettings(fidl::Clone(settings));
-
- int total_buffers =
- settings.packet_count_for_server() + settings.packet_count_for_client();
- std::vector<InputBuffer> new_buffers(total_buffers);
-
- for (int i = 0; i < total_buffers; ++i) {
- fuchsia::media::StreamBuffer codec_buffer;
-
- if (!new_buffers[i].Initialize(constraints) ||
- !new_buffers[i].buffer().ToFidlCodecBuffer(
- input_buffer_lifetime_ordinal_, i, /*read_only=*/true,
- &codec_buffer)) {
- return false;
+ std::vector<InputBuffer> new_buffers;
+ new_buffers.resize(buffer_collection_info.buffer_count);
+ fuchsia::sysmem::BufferMemorySettings& settings =
+ buffer_collection_info.settings.buffer_settings;
+ for (size_t i = 0; i < buffer_collection_info.buffer_count; ++i) {
+ fuchsia::sysmem::VmoBuffer& buffer = buffer_collection_info.buffers[i];
+ if (!new_buffers[i].Initialize(std::move(buffer.vmo),
+ buffer.vmo_usable_start, settings.size_bytes,
+ settings.coherency_domain)) {
+ OnError();
+ return;
}
-
- codec_->AddInputBuffer(std::move(codec_buffer));
}
-
num_used_input_buffers_ = 0;
input_buffers_ = std::move(new_buffers);
- return true;
+ PumpInput();
}
void FuchsiaVideoDecoder::PumpInput() {
@@ -877,77 +1029,92 @@ void FuchsiaVideoDecoder::PumpInput() {
}
}
-bool FuchsiaVideoDecoder::InitializeOutputBuffers(
- fuchsia::media::StreamBufferConstraints constraints) {
- if (!constraints.has_default_settings() ||
- !constraints.has_packet_count_for_client_max() ||
- !constraints.default_settings().has_packet_count_for_server() ||
- !constraints.default_settings().has_packet_count_for_client()) {
- DLOG(ERROR)
- << "Received InitializeOutputBuffers() with missing required fields.";
- OnError();
- return false;
- }
-
- // mediacodec API expects odd buffer lifetime ordinal, which is incremented by
- // 2 for each buffer generation.
- output_buffer_lifetime_ordinal_ += 2;
-
- auto settings = fidl::Clone(constraints.default_settings());
+void FuchsiaVideoDecoder::InitializeOutputBufferCollection(
+ fuchsia::media::StreamBufferConstraints constraints,
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_codec,
+ fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_gpu) {
+ fuchsia::sysmem::BufferCollectionConstraints buffer_constraints;
+ buffer_constraints.usage.none = fuchsia::sysmem::noneUsage;
+ buffer_constraints.min_buffer_count_for_camping = max_used_output_buffers_;
+ output_buffer_collection_->SetConstraints(
+ /*has_constraints=*/true, std::move(buffer_constraints));
+
+ // Register the new collection with the GPU process.
+ DCHECK(!output_buffer_collection_id_);
+ output_buffer_collection_id_ = gfx::SysmemBufferCollectionId::Create();
+ shared_image_interface_->RegisterSysmemBufferCollection(
+ output_buffer_collection_id_,
+ collection_token_for_gpu.Unbind().TakeChannel());
+
+ // Pass new output buffer settings to the codec.
+ fuchsia::media::StreamBufferPartialSettings settings;
settings.set_buffer_lifetime_ordinal(output_buffer_lifetime_ordinal_);
-
- max_used_output_buffers_ =
- std::min(kMaxUsedOutputFrames, constraints.packet_count_for_client_max());
+ settings.set_buffer_constraints_version_ordinal(
+ constraints.buffer_constraints_version_ordinal());
settings.set_packet_count_for_client(max_used_output_buffers_);
+ settings.set_packet_count_for_server(
+ constraints.packet_count_for_server_recommended());
+ settings.set_sysmem_token(std::move(collection_token_for_codec));
+ codec_->SetOutputBufferPartialSettings(std::move(settings));
+ codec_->CompleteOutputBufferPartialSettings(output_buffer_lifetime_ordinal_);
+
+ DCHECK(output_mailboxes_.empty());
+ output_mailboxes_.resize(
+ max_used_output_buffers_ +
+ constraints.packet_count_for_server_recommended(),
+ nullptr);
+}
- codec_->SetOutputBufferSettings(fidl::Clone(settings));
-
- int total_buffers =
- settings.packet_count_for_server() + settings.packet_count_for_client();
- std::vector<scoped_refptr<OutputBuffer>> new_buffers(total_buffers);
-
- for (int i = 0; i < total_buffers; ++i) {
- fuchsia::media::StreamBuffer codec_buffer;
- new_buffers[i] = new OutputBuffer();
- if (!new_buffers[i]->Initialize(constraints) ||
- !new_buffers[i]->buffer().ToFidlCodecBuffer(
- output_buffer_lifetime_ordinal_, i, /*read_only=*/false,
- &codec_buffer)) {
- return false;
- }
-
- codec_->AddOutputBuffer(std::move(codec_buffer));
+void FuchsiaVideoDecoder::ReleaseOutputBuffers() {
+ // Release the buffer collection.
+ num_used_output_buffers_ = 0;
+ if (output_buffer_collection_) {
+ output_buffer_collection_->Close();
+ output_buffer_collection_.Unbind();
}
- num_used_output_buffers_ = 0;
- output_buffers_ = std::move(new_buffers);
+ // Release all output mailboxes.
+ for (OutputMailbox* mailbox : output_mailboxes_) {
+ if (mailbox)
+ mailbox->Release();
+ }
+ output_mailboxes_.clear();
- return true;
+ // Tell the GPU process to drop the buffer collection.
+ if (output_buffer_collection_id_) {
+ shared_image_interface_->ReleaseSysmemBufferCollection(
+ output_buffer_collection_id_);
+ output_buffer_collection_id_ = {};
+ }
}
-void FuchsiaVideoDecoder::OnFrameDestroyed(scoped_refptr<OutputBuffer> buffer,
- uint64_t buffer_lifetime_ordinal,
- uint32_t packet_index) {
- if (!codec_)
- return;
+void FuchsiaVideoDecoder::OnReuseMailbox(uint32_t buffer_index,
+ uint32_t packet_index) {
+ DCHECK(codec_);
- if (buffer_lifetime_ordinal == output_buffer_lifetime_ordinal_) {
- DCHECK_GT(num_used_output_buffers_, 0);
- num_used_output_buffers_--;
- fuchsia::media::PacketHeader header;
- header.set_buffer_lifetime_ordinal(buffer_lifetime_ordinal);
- header.set_packet_index(packet_index);
- codec_->RecycleOutputPacket(std::move(header));
- }
+ DCHECK_GT(num_used_output_buffers_, 0);
+ num_used_output_buffers_--;
+
+ fuchsia::media::PacketHeader header;
+ header.set_buffer_lifetime_ordinal(output_buffer_lifetime_ordinal_);
+ header.set_packet_index(packet_index);
+ codec_->RecycleOutputPacket(std::move(header));
}
-std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoder() {
- return std::make_unique<FuchsiaVideoDecoder>(/*enable_sw_decoding=*/false);
+std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoder(
+ gpu::SharedImageInterface* shared_image_interface,
+ gpu::ContextSupport* gpu_context_support) {
+ return std::make_unique<FuchsiaVideoDecoder>(shared_image_interface,
+ gpu_context_support,
+ /*enable_sw_decoding=*/false);
}
std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoderForTests(
+ gpu::SharedImageInterface* shared_image_interface,
+ gpu::ContextSupport* gpu_context_support,
bool enable_sw_decoding) {
- return std::make_unique<FuchsiaVideoDecoder>(enable_sw_decoding);
+ return std::make_unique<FuchsiaVideoDecoder>(
+ shared_image_interface, gpu_context_support, enable_sw_decoding);
}
} // namespace media
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder.h b/chromium/media/filters/fuchsia/fuchsia_video_decoder.h
index 52f164ee129..d5a59637580 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder.h
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder.h
@@ -9,18 +9,28 @@
#include "media/base/media_export.h"
+namespace gpu {
+class ContextSupport;
+class SharedImageInterface;
+} // namespace gpu
+
namespace media {
class VideoDecoder;
// Creates VideoDecoder that uses fuchsia.mediacodec API. The returned
// VideoDecoder instance will only try to use hardware video codecs.
-MEDIA_EXPORT std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoder();
+// |shared_image_interface| and |gpu_context_support| must outlive the decoder.
+MEDIA_EXPORT std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoder(
+ gpu::SharedImageInterface* shared_image_interface,
+ gpu::ContextSupport* gpu_context_support);
// Same as above, but also allows to enable software codecs. This is useful for
// FuchsiaVideoDecoder tests that run on systems that don't have hardware
// decoder support.
MEDIA_EXPORT std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoderForTests(
+ gpu::SharedImageInterface* shared_image_interface,
+ gpu::ContextSupport* gpu_context_support,
bool enable_sw_decoding);
} // namespace media
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
index 14568a6640d..eb1dddfa716 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
@@ -4,21 +4,188 @@
#include "media/filters/fuchsia/fuchsia_video_decoder.h"
+#include <fuchsia/sysmem/cpp/fidl.h>
+#include <lib/sys/cpp/component_context.h>
+
#include "base/bind.h"
#include "base/bind_helpers.h"
-#include "base/message_loop/message_loop.h"
+#include "base/containers/flat_map.h"
+#include "base/containers/flat_set.h"
+#include "base/fuchsia/default_context.h"
+#include "base/fuchsia/fuchsia_logging.h"
+#include "base/test/task_environment.h"
+#include "components/viz/test/test_context_support.h"
+#include "gpu/command_buffer/client/shared_image_interface.h"
#include "media/base/test_data_util.h"
#include "media/base/test_helpers.h"
#include "media/base/video_decoder.h"
#include "media/base/video_frame.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "ui/gfx/gpu_memory_buffer.h"
namespace media {
+namespace {
+
+class TestBufferCollection {
+ public:
+ explicit TestBufferCollection(zx::channel collection_token) {
+ sysmem_allocator_ = base::fuchsia::ComponentContextForCurrentProcess()
+ ->svc()
+ ->Connect<fuchsia::sysmem::Allocator>();
+ sysmem_allocator_.set_error_handler([](zx_status_t status) {
+ ZX_LOG(FATAL, status)
+ << "The fuchsia.sysmem.Allocator channel was terminated.";
+ });
+
+ sysmem_allocator_->BindSharedCollection(
+ fidl::InterfaceHandle<fuchsia::sysmem::BufferCollectionToken>(
+ std::move(collection_token)),
+ buffers_collection_.NewRequest());
+
+ fuchsia::sysmem::BufferCollectionConstraints buffer_constraints;
+ buffer_constraints.usage.cpu = fuchsia::sysmem::cpuUsageRead;
+ zx_status_t status = buffers_collection_->SetConstraints(
+ /*has_constraints=*/true, std::move(buffer_constraints));
+ ZX_CHECK(status == ZX_OK, status) << "BufferCollection::SetConstraints()";
+ }
+
+ ~TestBufferCollection() { buffers_collection_->Close(); }
+
+ size_t GetNumBuffers() {
+ if (!buffer_collection_info_) {
+ zx_status_t wait_status;
+ fuchsia::sysmem::BufferCollectionInfo_2 info;
+ zx_status_t status =
+ buffers_collection_->WaitForBuffersAllocated(&wait_status, &info);
+ ZX_CHECK(status == ZX_OK, status)
+ << "BufferCollection::WaitForBuffersAllocated()";
+ ZX_CHECK(wait_status == ZX_OK, wait_status)
+ << "BufferCollection::WaitForBuffersAllocated()";
+ buffer_collection_info_ = std::move(info);
+ }
+ return buffer_collection_info_->buffer_count;
+ }
+
+ private:
+ fuchsia::sysmem::AllocatorPtr sysmem_allocator_;
+ fuchsia::sysmem::BufferCollectionSyncPtr buffers_collection_;
+
+ base::Optional<fuchsia::sysmem::BufferCollectionInfo_2>
+ buffer_collection_info_;
+
+ DISALLOW_COPY_AND_ASSIGN(TestBufferCollection);
+};
+
+class TestSharedImageInterface : public gpu::SharedImageInterface {
+ public:
+ TestSharedImageInterface() = default;
+ ~TestSharedImageInterface() override = default;
+
+ gpu::Mailbox CreateSharedImage(viz::ResourceFormat format,
+ const gfx::Size& size,
+ const gfx::ColorSpace& color_space,
+ uint32_t usage) override {
+ NOTREACHED();
+ return gpu::Mailbox();
+ }
+
+ gpu::Mailbox CreateSharedImage(
+ viz::ResourceFormat format,
+ const gfx::Size& size,
+ const gfx::ColorSpace& color_space,
+ uint32_t usage,
+ base::span<const uint8_t> pixel_data) override {
+ NOTREACHED();
+ return gpu::Mailbox();
+ }
+
+ gpu::Mailbox CreateSharedImage(
+ gfx::GpuMemoryBuffer* gpu_memory_buffer,
+ gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager,
+ const gfx::ColorSpace& color_space,
+ uint32_t usage) override {
+ gfx::GpuMemoryBufferHandle handle = gpu_memory_buffer->CloneHandle();
+ CHECK_EQ(handle.type, gfx::GpuMemoryBufferType::NATIVE_PIXMAP);
+
+ auto collection_it = sysmem_buffer_collections_.find(
+ handle.native_pixmap_handle.buffer_collection_id);
+ CHECK(collection_it != sysmem_buffer_collections_.end());
+ CHECK_LT(handle.native_pixmap_handle.buffer_index,
+ collection_it->second->GetNumBuffers());
+
+ auto result = gpu::Mailbox::Generate();
+ mailoxes_.insert(result);
+ return result;
+ }
+
+ void UpdateSharedImage(const gpu::SyncToken& sync_token,
+ const gpu::Mailbox& mailbox) override {
+ NOTREACHED();
+ }
+ void UpdateSharedImage(const gpu::SyncToken& sync_token,
+ std::unique_ptr<gfx::GpuFence> acquire_fence,
+ const gpu::Mailbox& mailbox) override {
+ NOTREACHED();
+ }
+
+ void DestroySharedImage(const gpu::SyncToken& sync_token,
+ const gpu::Mailbox& mailbox) override {
+ CHECK_EQ(mailoxes_.erase(mailbox), 1U);
+ }
+
+ SwapChainMailboxes CreateSwapChain(viz::ResourceFormat format,
+ const gfx::Size& size,
+ const gfx::ColorSpace& color_space,
+ uint32_t usage) override {
+ NOTREACHED();
+ return SwapChainMailboxes();
+ }
+ void PresentSwapChain(const gpu::SyncToken& sync_token,
+ const gpu::Mailbox& mailbox) override {
+ NOTREACHED();
+ }
+
+ void RegisterSysmemBufferCollection(gfx::SysmemBufferCollectionId id,
+ zx::channel token) override {
+ std::unique_ptr<TestBufferCollection>& collection =
+ sysmem_buffer_collections_[id];
+ EXPECT_FALSE(collection);
+ collection = std::make_unique<TestBufferCollection>(std::move(token));
+ }
+ void ReleaseSysmemBufferCollection(
+ gfx::SysmemBufferCollectionId id) override {
+ EXPECT_EQ(sysmem_buffer_collections_.erase(id), 1U);
+ }
+
+ gpu::SyncToken GenVerifiedSyncToken() override {
+ NOTREACHED();
+ return gpu::SyncToken();
+ }
+ gpu::SyncToken GenUnverifiedSyncToken() override {
+ return gpu::SyncToken(gpu::CommandBufferNamespace::GPU_IO,
+ gpu::CommandBufferId(33), 1);
+ }
+
+ void Flush() override { NOTREACHED(); }
+
+ private:
+ base::flat_map<gfx::SysmemBufferCollectionId,
+ std::unique_ptr<TestBufferCollection>>
+ sysmem_buffer_collections_;
+
+ base::flat_set<gpu::Mailbox> mailoxes_;
+};
+
+} // namespace
+
class FuchsiaVideoDecoderTest : public testing::Test {
public:
FuchsiaVideoDecoderTest() {
- decoder_ = CreateFuchsiaVideoDecoderForTests(/*enable_sw_decoding=*/true);
+ decoder_ = CreateFuchsiaVideoDecoderForTests(&shared_image_interface_,
+ &gpu_context_support_,
+
+ /*enable_sw_decoding=*/true);
}
~FuchsiaVideoDecoderTest() override = default;
@@ -43,6 +210,7 @@ class FuchsiaVideoDecoderTest : public testing::Test {
void OnVideoFrame(scoped_refptr<VideoFrame> frame) {
num_output_frames_++;
+ CHECK(frame->HasTextures());
output_frames_.push_back(std::move(frame));
while (output_frames_.size() > frames_to_keep_) {
output_frames_.pop_front();
@@ -73,7 +241,13 @@ class FuchsiaVideoDecoderTest : public testing::Test {
}
protected:
- base::MessageLoopForIO message_loop_;
+ base::test::TaskEnvironment task_environment_{
+ base::test::TaskEnvironment::ThreadingMode::MAIN_THREAD_ONLY,
+ base::test::TaskEnvironment::MainThreadType::IO};
+
+ TestSharedImageInterface shared_image_interface_;
+ viz::TestContextSupport gpu_context_support_;
+
std::unique_ptr<VideoDecoder> decoder_;
std::list<scoped_refptr<VideoFrame>> output_frames_;
diff --git a/chromium/media/filters/offloading_video_decoder.cc b/chromium/media/filters/offloading_video_decoder.cc
index 93f62a98e55..1cf837d567e 100644
--- a/chromium/media/filters/offloading_video_decoder.cc
+++ b/chromium/media/filters/offloading_video_decoder.cc
@@ -137,8 +137,8 @@ void OffloadingVideoDecoder::Initialize(const VideoDecoderConfig& config,
}
if (!offload_task_runner_) {
- offload_task_runner_ = base::CreateSequencedTaskRunnerWithTraits(
- {base::TaskPriority::USER_BLOCKING});
+ offload_task_runner_ = base::CreateSequencedTaskRunner(
+ {base::ThreadPool(), base::TaskPriority::USER_BLOCKING});
}
offload_task_runner_->PostTask(
diff --git a/chromium/media/filters/offloading_video_decoder_unittest.cc b/chromium/media/filters/offloading_video_decoder_unittest.cc
index 86e0fe051bb..319fc99f5a7 100644
--- a/chromium/media/filters/offloading_video_decoder_unittest.cc
+++ b/chromium/media/filters/offloading_video_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/mock_filters.h"
#include "media/base/test_data_util.h"
@@ -66,9 +66,9 @@ class MockOffloadableVideoDecoder : public OffloadableVideoDecoder {
class OffloadingVideoDecoderTest : public testing::Test {
public:
OffloadingVideoDecoderTest()
- : task_env_(base::test::ScopedTaskEnvironment::MainThreadType::DEFAULT,
- base::test::ScopedTaskEnvironment::ThreadPoolExecutionMode::
- QUEUED) {}
+ : task_env_(
+ base::test::TaskEnvironment::MainThreadType::DEFAULT,
+ base::test::TaskEnvironment::ThreadPoolExecutionMode::QUEUED) {}
void CreateWrapper(int offload_width, VideoCodec codec) {
decoder_ = new testing::StrictMock<MockOffloadableVideoDecoder>();
@@ -187,7 +187,7 @@ class OffloadingVideoDecoderTest : public testing::Test {
MOCK_METHOD1(DecodeDone, void(DecodeStatus));
MOCK_METHOD0(ResetDone, void(void));
- base::test::ScopedTaskEnvironment task_env_;
+ base::test::TaskEnvironment task_env_;
std::unique_ptr<OffloadingVideoDecoder> offloading_decoder_;
testing::StrictMock<MockOffloadableVideoDecoder>* decoder_ =
nullptr; // Owned by |offloading_decoder_|.
diff --git a/chromium/media/filters/pipeline_controller_unittest.cc b/chromium/media/filters/pipeline_controller_unittest.cc
index dcbb3aff8db..674a1cd1bda 100644
--- a/chromium/media/filters/pipeline_controller_unittest.cc
+++ b/chromium/media/filters/pipeline_controller_unittest.cc
@@ -13,7 +13,7 @@
#include "base/memory/ref_counted.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/time/time.h"
#include "media/base/mock_filters.h"
#include "media/base/pipeline.h"
@@ -158,7 +158,7 @@ class PipelineControllerTest : public ::testing::Test, public Pipeline::Client {
void OnAudioDecoderChange(const PipelineDecoderInfo& info) override {}
void OnVideoDecoderChange(const PipelineDecoderInfo& info) override {}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NiceMock<MockDemuxer> demuxer_;
StrictMock<MockPipeline>* pipeline_;
diff --git a/chromium/media/filters/stream_parser_factory.cc b/chromium/media/filters/stream_parser_factory.cc
index 9e29421d90f..d35f3c04496 100644
--- a/chromium/media/filters/stream_parser_factory.cc
+++ b/chromium/media/filters/stream_parser_factory.cc
@@ -430,20 +430,22 @@ static bool VerifyCodec(const CodecInfo* codec_info,
}
// Checks to see if the specified |type| and |codecs| list are supported.
-//
-// Returns true if |type| and all codecs listed in |codecs| are supported.
-// |factory_function| contains a function that can build a StreamParser for this
-// type. Value may be nullptr, in which case it is not touched.
+// Returns IsNotSupported if |type| and |codecs| are definitively not supported.
+// The values of |factory_function|, |audio_codecs|, and |video_codecs| are
+// undefined in this case.
+// Returns IsSupported if |type| and all codecs listed in |codecs| are
+// supported, any non-empty codecs requirement is met for |type|, and all of
+// |codecs| are supported for |type|.
+// Returns MayBeSupported if |type| is supported, but requires a codecs
+// parameter that is missing.
+// For both IsSupported and MayBeSupported results, |factory_function| is
+// updated to be a function that can build a StreamParser for this type,
// |audio_codecs| is updated with the appropriate HistogramTags for matching
-// audio codecs specified in |codecs|. Value may be nullptr, in which case it is
-// not touched.
-// |video_codecs| is updated with the appropriate HistogramTags for matching
-// video codecs specified in |codecs|. Value may be nullptr, in which case it is
-// not touched.
-//
-// Returns false otherwise. The values of |factory_function|, |audio_codecs|,
-// and |video_codecs| are undefined.
-static bool CheckTypeAndCodecs(
+// audio codecs specified in |codecs|, and |video_codecs| is updated with the
+// appropriate HistogramTags for matching video codecs specified in |codecs|.
+// The value of each of |factory_function|, |audio_codecs| and |video_codecs| is
+// not updated if it was nullptr initially.
+static SupportsType CheckTypeAndCodecs(
const std::string& type,
const std::vector<std::string>& codecs,
MediaLog* media_log,
@@ -458,14 +460,20 @@ static bool CheckTypeAndCodecs(
const CodecInfo* codec_info = type_info.codecs[0];
if (codec_info && !codec_info->pattern &&
VerifyCodec(codec_info, audio_codecs, video_codecs)) {
+ // If there was no specified codec parameter, and if the major/minor
+ // type is supported, specific and requires no codec parameter (such
+ // as audio/mpeg is specific to MP3), then populate the expected
+ // specific codec value and factory function and return definitive
+ // support.
if (factory_function)
*factory_function = type_info.factory_function;
- return true;
+ return IsSupported;
}
MEDIA_LOG(DEBUG, media_log)
- << "A codecs parameter must be provided for '" << type << "'";
- return false;
+ << "A codecs parameter must be provided for '" << type
+ << "' to determine definitive support proactively.";
+ return MayBeSupported;
}
// Make sure all the codecs specified in |codecs| are
@@ -488,23 +496,26 @@ static bool CheckTypeAndCodecs(
MEDIA_LOG(DEBUG, media_log)
<< "Codec '" << codec_id << "' is not supported for '" << type
<< "'";
- return false;
+ // Though the major/minor type is supported, a codecs parameter value
+ // was found to not be supported.
+ return IsNotSupported;
}
}
if (factory_function)
*factory_function = type_info.factory_function;
- // All codecs were supported by this |type|.
- return true;
+ // There was a non-empty |codecs| for this supported |type|, and all of
+ // |codecs| are supported for this |type|.
+ return IsSupported;
}
}
// |type| didn't match any of the supported types.
- return false;
+ return IsNotSupported;
}
-bool StreamParserFactory::IsTypeSupported(
+SupportsType StreamParserFactory::IsTypeSupported(
const std::string& type,
const std::vector<std::string>& codecs) {
// TODO(wolenetz): Questionable MediaLog usage, http://crbug.com/712310
@@ -522,13 +533,15 @@ std::unique_ptr<StreamParser> StreamParserFactory::Create(
std::vector<CodecInfo::HistogramTag> audio_codecs;
std::vector<CodecInfo::HistogramTag> video_codecs;
- if (CheckTypeAndCodecs(type, codecs, media_log, &factory_function,
- &audio_codecs, &video_codecs)) {
+ if (IsSupported == CheckTypeAndCodecs(type, codecs, media_log,
+ &factory_function, &audio_codecs,
+ &video_codecs)) {
// Log the expected codecs.
- // TODO(wolenetz): Relocate the logging to the parser configuration
- // callback. This creation method is called in AddId(), and also in
- // CanChangeType() and ChangeType(), so potentially overlogs codecs leading
- // to disproportion versus actually parsed codec configurations from
+ // TODO(wolenetz): Relax the requirement for specific codecs (allow
+ // MayBeSupported here), and relocate the logging to the parser
+ // configuration callback. This creation method is called in AddId(), and
+ // also in CanChangeType() and ChangeType(), so potentially overlogs codecs
+ // leading to disproportion versus actually parsed codec configurations from
// initialization segments. For this work and also recording when implicit
// codec switching occurs (without explicit ChangeType), see
// https://crbug.com/535738.
diff --git a/chromium/media/filters/stream_parser_factory.h b/chromium/media/filters/stream_parser_factory.h
index e879c708f2c..50544a5a93e 100644
--- a/chromium/media/filters/stream_parser_factory.h
+++ b/chromium/media/filters/stream_parser_factory.h
@@ -11,6 +11,7 @@
#include "media/base/media_export.h"
#include "media/base/media_log.h"
+#include "media/base/mime_util.h"
namespace media {
@@ -19,9 +20,15 @@ class StreamParser;
class MEDIA_EXPORT StreamParserFactory {
public:
// Checks to see if the specified |type| and |codecs| list are supported.
- // Returns true if |type| and all codecs listed in |codecs| are supported.
- static bool IsTypeSupported(
- const std::string& type, const std::vector<std::string>& codecs);
+ // Returns one of the following SupportsType values:
+ // IsNotSupported indicates definitive lack of support.
+ // IsSupported indicates the mime type is supported, any non-empty codecs
+ // requirement is met for the mime type, and all of the passed codecs are
+ // supported for the mime type.
+ // MayBeSupported indicates the mime type is supported, but the mime type
+ // requires a codecs parameter that is missing.
+ static SupportsType IsTypeSupported(const std::string& type,
+ const std::vector<std::string>& codecs);
// Creates a new StreamParser object if the specified |type| and |codecs| list
// are supported. |media_log| can be used to report errors if there is
diff --git a/chromium/media/filters/video_decoder_stream_unittest.cc b/chromium/media/filters/video_decoder_stream_unittest.cc
index 1c4e99ccaeb..f250d264429 100644
--- a/chromium/media/filters/video_decoder_stream_unittest.cc
+++ b/chromium/media/filters/video_decoder_stream_unittest.cc
@@ -11,7 +11,7 @@
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/fake_demuxer_stream.h"
#include "media/base/mock_filters.h"
@@ -85,7 +85,7 @@ class VideoDecoderStreamTest
has_no_key_(false) {
video_decoder_stream_.reset(new VideoDecoderStream(
std::make_unique<VideoDecoderStream::StreamTraits>(&media_log_),
- scoped_task_environment_.GetMainThreadTaskRunner(),
+ task_environment_.GetMainThreadTaskRunner(),
base::BindRepeating(&VideoDecoderStreamTest::CreateVideoDecodersForTest,
base::Unretained(this)),
&media_log_));
@@ -139,7 +139,7 @@ class VideoDecoderStreamTest
void PrepareFrame(scoped_refptr<VideoFrame> frame,
VideoDecoderStream::OutputReadyCB output_ready_cb) {
// Simulate some delay in return of the output.
- scoped_task_environment_.GetMainThreadTaskRunner()->PostTask(
+ task_environment_.GetMainThreadTaskRunner()->PostTask(
FROM_HERE,
base::BindOnce(std::move(output_ready_cb), std::move(frame)));
}
@@ -167,7 +167,7 @@ class VideoDecoderStreamTest
// Note this is _not_ inserted into |decoders_| below, so we don't need to
// adjust the indices used below to compensate.
decoders.push_back(std::make_unique<DecryptingVideoDecoder>(
- scoped_task_environment_.GetMainThreadTaskRunner(), &media_log_));
+ task_environment_.GetMainThreadTaskRunner(), &media_log_));
#endif
for (int i = 0; i < 3; ++i) {
@@ -463,7 +463,7 @@ class VideoDecoderStreamTest
SatisfyPendingCallback(DECODER_REINIT);
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<MockMediaLog> media_log_;
std::unique_ptr<VideoDecoderStream> video_decoder_stream_;
diff --git a/chromium/media/filters/vp9_parser.cc b/chromium/media/filters/vp9_parser.cc
index f975e7e84c6..2a904b64ceb 100644
--- a/chromium/media/filters/vp9_parser.cc
+++ b/chromium/media/filters/vp9_parser.cc
@@ -531,6 +531,12 @@ void Vp9Parser::SetStream(const uint8_t* stream,
stream_decrypt_config_ = std::move(stream_config);
}
+void Vp9Parser::SetStream(const uint8_t* stream,
+ off_t stream_size,
+ std::unique_ptr<DecryptConfig> stream_config) {
+ SetStream(stream, stream_size, {}, std::move(stream_config));
+}
+
void Vp9Parser::Reset() {
stream_ = nullptr;
bytes_left_ = 0;
diff --git a/chromium/media/filters/vp9_parser.h b/chromium/media/filters/vp9_parser.h
index 0aa1bb8b16e..359bafd835f 100644
--- a/chromium/media/filters/vp9_parser.h
+++ b/chromium/media/filters/vp9_parser.h
@@ -382,6 +382,10 @@ class MEDIA_EXPORT Vp9Parser {
const std::vector<uint32_t>& spatial_layer_frame_size,
std::unique_ptr<DecryptConfig> stream_config);
+ void SetStream(const uint8_t* stream,
+ off_t stream_size,
+ std::unique_ptr<DecryptConfig> stream_config);
+
// Parse the next frame in the current stream buffer, filling |fhdr| with
// the parsed frame header and updating current segmentation and loop filter
// state. The necessary frame size to decode |fhdr| fills in |allocate_size|.
diff --git a/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc b/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc
index 69e3820b4ee..ac102a6a274 100644
--- a/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc
+++ b/chromium/media/filters/vp9_parser_encrypted_fuzzertest.cc
@@ -5,6 +5,8 @@
#include <stddef.h>
#include <stdint.h>
+#include <fuzzer/FuzzedDataProvider.h>
+
#include "base/numerics/safe_conversions.h"
#include "media/base/decrypt_config.h"
@@ -12,8 +14,6 @@
#include "media/filters/ivf_parser.h"
#include "media/filters/vp9_parser.h"
-#include "third_party/libFuzzer/src/utils/FuzzedDataProvider.h"
-
struct Environment {
Environment() {
// Disable noisy logging as per "libFuzzer in Chrome" documentation:
@@ -57,7 +57,7 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
while (ivf_parser.ParseNextFrame(&ivf_frame_header, &ivf_payload)) {
media::Vp9FrameHeader vp9_frame_header;
vp9_parser.SetStream(
- ivf_payload, ivf_frame_header.frame_size, {},
+ ivf_payload, ivf_frame_header.frame_size,
media::DecryptConfig::CreateCencConfig(key_id, iv, subsamples));
// TODO(kcwu): further fuzzing the case of Vp9Parser::kAwaitingRefresh.
std::unique_ptr<media::DecryptConfig> null_config;
diff --git a/chromium/media/filters/vp9_parser_fuzzertest.cc b/chromium/media/filters/vp9_parser_fuzzertest.cc
index 9da885ba550..a4343d56e87 100644
--- a/chromium/media/filters/vp9_parser_fuzzertest.cc
+++ b/chromium/media/filters/vp9_parser_fuzzertest.cc
@@ -36,7 +36,7 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
// Parse until the end of stream/unsupported stream/error in stream is found.
while (ivf_parser.ParseNextFrame(&ivf_frame_header, &ivf_payload)) {
media::Vp9FrameHeader vp9_frame_header;
- vp9_parser.SetStream(ivf_payload, ivf_frame_header.frame_size, {}, nullptr);
+ vp9_parser.SetStream(ivf_payload, ivf_frame_header.frame_size, nullptr);
// TODO(kcwu): further fuzzing the case of Vp9Parser::kAwaitingRefresh.
std::unique_ptr<media::DecryptConfig> null_config;
gfx::Size allocate_size;
diff --git a/chromium/media/filters/vp9_parser_unittest.cc b/chromium/media/filters/vp9_parser_unittest.cc
index b435229f844..2381676710b 100644
--- a/chromium/media/filters/vp9_parser_unittest.cc
+++ b/chromium/media/filters/vp9_parser_unittest.cc
@@ -144,7 +144,7 @@ Vp9Parser::Result Vp9ParserTest::ParseNextFrame(Vp9FrameHeader* fhdr) {
if (!ivf_parser_.ParseNextFrame(&ivf_frame_header, &ivf_payload))
return Vp9Parser::kEOStream;
- vp9_parser_->SetStream(ivf_payload, ivf_frame_header.frame_size, {},
+ vp9_parser_->SetStream(ivf_payload, ivf_frame_header.frame_size,
nullptr);
continue;
}
@@ -158,7 +158,7 @@ void Vp9ParserTest::CheckSubsampleValues(
size_t framesize,
std::unique_ptr<DecryptConfig> config,
std::vector<std::unique_ptr<DecryptConfig>>& expected_split) {
- vp9_parser_->SetStream(superframe, framesize, {}, std::move(config));
+ vp9_parser_->SetStream(superframe, framesize, std::move(config));
for (auto& expected : expected_split) {
std::unique_ptr<DecryptConfig> actual =
vp9_parser_->NextFrameDecryptContextForTesting();
@@ -428,7 +428,7 @@ TEST_F(Vp9ParserTest, UnalignedInvalidSubsampleParsing) {
// marker again.
superframe_marker_byte};
- vp9_parser_->SetStream(kSuperframe, sizeof(kSuperframe), {},
+ vp9_parser_->SetStream(kSuperframe, sizeof(kSuperframe),
DecryptConfig::CreateCencConfig(
kKeyID, kInitialIV, {SubsampleEntry(16, 32)}));
@@ -459,7 +459,7 @@ TEST_F(Vp9ParserTest, CipherBytesCoverSuperframeMarkerSubsampleParsing) {
// marker again.
superframe_marker_byte};
- vp9_parser_->SetStream(kSuperframe, sizeof(kSuperframe), {},
+ vp9_parser_->SetStream(kSuperframe, sizeof(kSuperframe),
DecryptConfig::CreateCencConfig(
kKeyID, kInitialIV, {SubsampleEntry(0, 48)}));
@@ -494,7 +494,7 @@ TEST_F(Vp9ParserTest, ClearBytesCoverSuperframeMarkerSubsampleParsing) {
// marker again.
superframe_marker_byte};
- vp9_parser_->SetStream(kSuperframe, sizeof(kSuperframe), {},
+ vp9_parser_->SetStream(kSuperframe, sizeof(kSuperframe),
DecryptConfig::CreateCencConfig(
kKeyID, kInitialIV, {SubsampleEntry(48, 0)}));
@@ -530,7 +530,7 @@ TEST_F(Vp9ParserTest, SecondClearSubsampleSuperframeMarkerSubsampleParsing) {
superframe_marker_byte};
vp9_parser_->SetStream(
- kSuperframe, sizeof(kSuperframe), {},
+ kSuperframe, sizeof(kSuperframe),
DecryptConfig::CreateCencConfig(kKeyID, kInitialIV,
{
SubsampleEntry(16, 16),
diff --git a/chromium/media/filters/vpx_video_decoder_unittest.cc b/chromium/media/filters/vpx_video_decoder_unittest.cc
index befb24ebbda..e0a53024df6 100644
--- a/chromium/media/filters/vpx_video_decoder_unittest.cc
+++ b/chromium/media/filters/vpx_video_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/limits.h"
@@ -162,7 +162,7 @@ class VpxVideoDecoderTest : public testing::Test {
MOCK_METHOD1(DecodeDone, void(DecodeStatus));
- base::test::ScopedTaskEnvironment task_env_;
+ base::test::TaskEnvironment task_env_;
std::unique_ptr<VideoDecoder> decoder_;
scoped_refptr<DecoderBuffer> i_frame_buffer_;
diff --git a/chromium/media/fuchsia/OWNERS b/chromium/media/fuchsia/OWNERS
new file mode 100644
index 00000000000..50612805a53
--- /dev/null
+++ b/chromium/media/fuchsia/OWNERS
@@ -0,0 +1,3 @@
+sergeyu@chromium.org
+yucliu@chromium.org
+wez@chromium.org
diff --git a/chromium/media/fuchsia/cdm/BUILD.gn b/chromium/media/fuchsia/cdm/BUILD.gn
new file mode 100644
index 00000000000..04defe57532
--- /dev/null
+++ b/chromium/media/fuchsia/cdm/BUILD.gn
@@ -0,0 +1,22 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+source_set("cdm") {
+ sources = [
+ "fuchsia_cdm.cc",
+ "fuchsia_cdm.h",
+ "fuchsia_cdm_factory.cc",
+ "fuchsia_cdm_factory.h",
+ ]
+
+ deps = [
+ "//fuchsia/base",
+ "//media",
+ "//media/fuchsia/mojom",
+ "//services/service_manager/public/cpp",
+ "//third_party/fuchsia-sdk/sdk:media_drm",
+ ]
+}
diff --git a/chromium/media/fuchsia/cdm/DEPS b/chromium/media/fuchsia/cdm/DEPS
new file mode 100644
index 00000000000..66b87e0e4e1
--- /dev/null
+++ b/chromium/media/fuchsia/cdm/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+fuchsia/base",
+ "+services/service_manager/public",
+]
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.cc b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
new file mode 100644
index 00000000000..cbe083a91d6
--- /dev/null
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
@@ -0,0 +1,398 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/fuchsia/cdm/fuchsia_cdm.h"
+
+#include "base/fuchsia/fuchsia_logging.h"
+#include "base/logging.h"
+#include "base/optional.h"
+#include "fuchsia/base/mem_buffer_util.h"
+#include "media/base/callback_registry.h"
+#include "media/base/cdm_promise.h"
+
+namespace media {
+
+namespace {
+
+std::string GetInitDataTypeName(EmeInitDataType type) {
+ switch (type) {
+ case EmeInitDataType::WEBM:
+ return "webm";
+ case EmeInitDataType::CENC:
+ return "cenc";
+ case EmeInitDataType::KEYIDS:
+ return "keyids";
+ case EmeInitDataType::UNKNOWN:
+ return "unknown";
+ }
+}
+
+fuchsia::media::drm::LicenseInitData CreateLicenseInitData(
+ EmeInitDataType type,
+ const std::vector<uint8_t>& data) {
+ fuchsia::media::drm::LicenseInitData init_data;
+ init_data.type = GetInitDataTypeName(type);
+ init_data.data = data;
+ return init_data;
+}
+
+fuchsia::media::drm::LicenseServerMessage CreateLicenseServerMessage(
+ const std::vector<uint8_t>& response) {
+ fuchsia::media::drm::LicenseServerMessage message;
+ message.message = cr_fuchsia::MemBufferFromString(
+ base::StringPiece(reinterpret_cast<const char*>(response.data()),
+ response.size()),
+ "cr-drm-license-server-message");
+ return message;
+}
+
+CdmMessageType ToCdmMessageType(fuchsia::media::drm::LicenseMessageType type) {
+ switch (type) {
+ case fuchsia::media::drm::LicenseMessageType::REQUEST:
+ return CdmMessageType::LICENSE_REQUEST;
+ case fuchsia::media::drm::LicenseMessageType::RENEWAL:
+ return CdmMessageType::LICENSE_RENEWAL;
+ case fuchsia::media::drm::LicenseMessageType::RELEASE:
+ return CdmMessageType::LICENSE_RELEASE;
+ }
+}
+
+CdmKeyInformation::KeyStatus ToCdmKeyStatus(
+ fuchsia::media::drm::KeyStatus status) {
+ switch (status) {
+ case fuchsia::media::drm::KeyStatus::USABLE:
+ return CdmKeyInformation::USABLE;
+ case fuchsia::media::drm::KeyStatus::EXPIRED:
+ return CdmKeyInformation::EXPIRED;
+ case fuchsia::media::drm::KeyStatus::RELEASED:
+ return CdmKeyInformation::RELEASED;
+ case fuchsia::media::drm::KeyStatus::OUTPUT_RESTRICTED:
+ return CdmKeyInformation::OUTPUT_RESTRICTED;
+ case fuchsia::media::drm::KeyStatus::OUTPUT_DOWNSCALED:
+ return CdmKeyInformation::OUTPUT_DOWNSCALED;
+ case fuchsia::media::drm::KeyStatus::STATUS_PENDING:
+ return CdmKeyInformation::KEY_STATUS_PENDING;
+ case fuchsia::media::drm::KeyStatus::INTERNAL_ERROR:
+ return CdmKeyInformation::INTERNAL_ERROR;
+ }
+}
+
+CdmPromise::Exception ToCdmPromiseException(fuchsia::media::drm::Error error) {
+ switch (error) {
+ case fuchsia::media::drm::Error::TYPE:
+ return CdmPromise::Exception::TYPE_ERROR;
+ case fuchsia::media::drm::Error::NOT_SUPPORTED:
+ return CdmPromise::Exception::NOT_SUPPORTED_ERROR;
+ case fuchsia::media::drm::Error::INVALID_STATE:
+ return CdmPromise::Exception::INVALID_STATE_ERROR;
+ case fuchsia::media::drm::Error::QUOTA_EXCEEDED:
+ return CdmPromise::Exception::QUOTA_EXCEEDED_ERROR;
+ }
+}
+
+} // namespace
+
+class FuchsiaCdm::CdmSession {
+ public:
+ using ResultCB =
+ base::OnceCallback<void(base::Optional<CdmPromise::Exception>)>;
+
+ explicit CdmSession(const FuchsiaCdm::SessionCallbacks* callbacks)
+ : session_callbacks_(callbacks) {
+ // License session events, e.g. license request message, key status change.
+ // Fuchsia CDM service guarantees callback of functions (e.g.
+ // GenerateLicenseRequest) are called before event callbacks. So it's safe
+ // to rely on this to resolve the EME promises and send session events to
+ // JS. EME requires promises are resolved before session message.
+ session_.events().OnLicenseMessageGenerated =
+ fit::bind_member(this, &CdmSession::OnLicenseMessageGenerated);
+ session_.events().OnKeysChanged =
+ fit::bind_member(this, &CdmSession::OnKeysChanged);
+
+ session_.set_error_handler(
+ fit::bind_member(this, &CdmSession::OnSessionError));
+ }
+
+ ~CdmSession() {
+ if (!session_id_.empty())
+ session_callbacks_->closed_cb.Run(session_id_);
+ }
+
+ fidl::InterfaceRequest<fuchsia::media::drm::LicenseSession> NewRequest() {
+ return session_.NewRequest();
+ }
+
+ void GenerateLicenseRequest(EmeInitDataType init_data_type,
+ const std::vector<uint8_t>& init_data,
+ ResultCB generate_license_request_cb) {
+ DCHECK(!result_cb_);
+ result_cb_ = std::move(generate_license_request_cb);
+ session_->GenerateLicenseRequest(
+ CreateLicenseInitData(init_data_type, init_data),
+ [this](fuchsia::media::drm::LicenseSession_GenerateLicenseRequest_Result
+ result) { ProcessResult(result); });
+ }
+
+ void ProcessLicenseResponse(const std::vector<uint8_t>& response,
+ ResultCB process_license_response_cb) {
+ DCHECK(!result_cb_);
+ result_cb_ = std::move(process_license_response_cb);
+ session_->ProcessLicenseResponse(
+ CreateLicenseServerMessage(response),
+ [this](fuchsia::media::drm::LicenseSession_ProcessLicenseResponse_Result
+ result) { ProcessResult(result); });
+ }
+
+ void set_session_id(const std::string& session_id) {
+ session_id_ = session_id;
+ }
+ const std::string& session_id() const { return session_id_; }
+
+ private:
+ void OnLicenseMessageGenerated(fuchsia::media::drm::LicenseMessage message) {
+ DCHECK(!session_id_.empty());
+ std::string session_msg;
+ bool msg_available =
+ cr_fuchsia::StringFromMemBuffer(message.message, &session_msg);
+
+ if (!msg_available) {
+ LOG(ERROR) << "Failed to generate message for session " << session_id_;
+ return;
+ }
+
+ session_callbacks_->message_cb.Run(
+ session_id_, ToCdmMessageType(message.type),
+ std::vector<uint8_t>(session_msg.begin(), session_msg.end()));
+ }
+
+ void OnKeysChanged(std::vector<fuchsia::media::drm::KeyInfo> key_info) {
+ bool has_additional_usable_key = false;
+ CdmKeysInfo keys_info;
+ for (const auto& info : key_info) {
+ CdmKeyInformation::KeyStatus status = ToCdmKeyStatus(info.status);
+ has_additional_usable_key |= (status == CdmKeyInformation::USABLE);
+ keys_info.emplace_back(new CdmKeyInformation(
+ info.key_id.data.data(), info.key_id.data.size(), status, 0));
+ }
+
+ session_callbacks_->keys_change_cb.Run(
+ session_id_, has_additional_usable_key, std::move(keys_info));
+ }
+
+ void OnSessionError(zx_status_t status) {
+ ZX_LOG(ERROR, status) << "Session error.";
+ if (result_cb_)
+ std::move(result_cb_).Run(CdmPromise::Exception::TYPE_ERROR);
+ }
+
+ template <typename T>
+ void ProcessResult(const T& result) {
+ DCHECK(result_cb_);
+ std::move(result_cb_)
+ .Run(result.is_err()
+ ? base::make_optional(ToCdmPromiseException(result.err()))
+ : base::nullopt);
+ }
+
+ fuchsia::media::drm::LicenseSessionPtr session_;
+ std::string session_id_;
+
+ // Callback for license operation.
+ ResultCB result_cb_;
+
+ const SessionCallbacks* session_callbacks_;
+};
+
+FuchsiaCdm::SessionCallbacks::SessionCallbacks() = default;
+FuchsiaCdm::SessionCallbacks::SessionCallbacks(SessionCallbacks&&) = default;
+FuchsiaCdm::SessionCallbacks::~SessionCallbacks() = default;
+FuchsiaCdm::SessionCallbacks& FuchsiaCdm::SessionCallbacks::operator=(
+ SessionCallbacks&&) = default;
+
+FuchsiaCdm::FuchsiaCdm(fuchsia::media::drm::ContentDecryptionModulePtr cdm,
+ SessionCallbacks callbacks)
+ : cdm_(std::move(cdm)), session_callbacks_(std::move(callbacks)) {
+ DCHECK(cdm_);
+ cdm_.set_error_handler([](zx_status_t status) {
+ // Error will be handled in CdmSession::OnSessionError.
+ ZX_LOG(ERROR, status) << "The fuchsia.media.drm.ContentDecryptionModule"
+ << " channel was terminated.";
+ });
+}
+
+FuchsiaCdm::~FuchsiaCdm() = default;
+
+void FuchsiaCdm::SetServerCertificate(
+ const std::vector<uint8_t>& certificate,
+ std::unique_ptr<SimpleCdmPromise> promise) {
+ uint32_t promise_id = promises_.SavePromise(std::move(promise));
+ cdm_->SetServerCertificate(
+ certificate,
+ [this, promise_id](
+ fuchsia::media::drm::
+ ContentDecryptionModule_SetServerCertificate_Result result) {
+ if (result.is_err()) {
+ promises_.RejectPromise(promise_id,
+ ToCdmPromiseException(result.err()), 0,
+ "Fail to set server cert.");
+ return;
+ }
+
+ promises_.ResolvePromise(promise_id);
+ });
+}
+
+void FuchsiaCdm::CreateSessionAndGenerateRequest(
+ CdmSessionType session_type,
+ EmeInitDataType init_data_type,
+ const std::vector<uint8_t>& init_data,
+ std::unique_ptr<NewSessionCdmPromise> promise) {
+ // TODO(yucliu): Support persistent license.
+ if (session_type != CdmSessionType::kTemporary) {
+ promise->reject(CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
+ "session type is not supported.");
+ return;
+ }
+
+ if (init_data_type == EmeInitDataType::UNKNOWN) {
+ promise->reject(CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
+ "init data type is not supported.");
+ return;
+ }
+
+ uint32_t promise_id = promises_.SavePromise(std::move(promise));
+
+ auto session = std::make_unique<CdmSession>(&session_callbacks_);
+ CdmSession* session_ptr = session.get();
+
+ cdm_->CreateLicenseSession(
+ fuchsia::media::drm::LicenseSessionType::TEMPORARY,
+ session_ptr->NewRequest(),
+ [this, promise_id,
+ session = std::move(session)](std::string session_id) mutable {
+ OnCreateSession(std::move(session), promise_id, session_id);
+ });
+
+ // It's safe to pass raw pointer |session_ptr| because |session| owns the
+ // callback so it's guaranteed to outlive the callback.
+ session_ptr->GenerateLicenseRequest(
+ init_data_type, init_data,
+ base::BindOnce(&FuchsiaCdm::OnGenerateLicenseRequestStatus,
+ base::Unretained(this), session_ptr, promise_id));
+}
+
+void FuchsiaCdm::OnCreateSession(std::unique_ptr<CdmSession> session,
+ uint32_t promise_id,
+ const std::string& session_id) {
+ if (session_id.empty()) {
+ promises_.RejectPromise(promise_id,
+ CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
+ "fail to create license session.");
+ return;
+ }
+
+ session->set_session_id(session_id);
+ DCHECK(session_map_.find(session_id) == session_map_.end())
+ << "Duplicated session id " << session_id;
+ session_map_[session_id] = std::move(session);
+}
+
+void FuchsiaCdm::OnGenerateLicenseRequestStatus(
+ CdmSession* session,
+ uint32_t promise_id,
+ base::Optional<CdmPromise::Exception> exception) {
+ DCHECK(session);
+ std::string session_id = session->session_id();
+
+ if (exception.has_value()) {
+ promises_.RejectPromise(promise_id, exception.value(), 0,
+ "fail to generate license.");
+ session_map_.erase(session_id);
+ return;
+ }
+
+ DCHECK(!session_id.empty());
+ promises_.ResolvePromise(promise_id, session_id);
+}
+
+void FuchsiaCdm::LoadSession(CdmSessionType session_type,
+ const std::string& session_id,
+ std::unique_ptr<NewSessionCdmPromise> promise) {
+ NOTIMPLEMENTED();
+}
+
+void FuchsiaCdm::UpdateSession(const std::string& session_id,
+ const std::vector<uint8_t>& response,
+ std::unique_ptr<SimpleCdmPromise> promise) {
+ auto it = session_map_.find(session_id);
+ if (it == session_map_.end()) {
+ promise->reject(CdmPromise::Exception::INVALID_STATE_ERROR, 0,
+ "session doesn't exist.");
+ return;
+ }
+
+ // Caller should NOT pass in an empty response.
+ DCHECK(!response.empty());
+
+ uint32_t promise_id = promises_.SavePromise(std::move(promise));
+
+ CdmSession* session = it->second.get();
+ DCHECK(session);
+
+ session->ProcessLicenseResponse(
+ response, base::BindOnce(&FuchsiaCdm::OnProcessLicenseServerMessageStatus,
+ base::Unretained(this), promise_id));
+}
+
+void FuchsiaCdm::OnProcessLicenseServerMessageStatus(
+ uint32_t promise_id,
+ base::Optional<CdmPromise::Exception> exception) {
+ if (exception.has_value()) {
+ promises_.RejectPromise(promise_id, exception.value(), 0,
+ "fail to process license.");
+ return;
+ }
+
+ promises_.ResolvePromise(promise_id);
+}
+
+void FuchsiaCdm::CloseSession(const std::string& session_id,
+ std::unique_ptr<SimpleCdmPromise> promise) {
+ // There's a small window app can call close twice before receiving the closed
+ // event, in which case we want to resolve the promise. Read
+ // AesDecryptor::CloseSession for more details.
+ //
+ // Resolve the promise before deleting CdmSession. CdmSession will call
+ // SessionClosedCB in its destructor.
+ promise->resolve();
+ session_map_.erase(session_id);
+}
+
+void FuchsiaCdm::RemoveSession(const std::string& session_id,
+ std::unique_ptr<SimpleCdmPromise> promise) {
+ NOTIMPLEMENTED();
+ promise->reject(CdmPromise::Exception::NOT_SUPPORTED_ERROR, 0,
+ "not implemented");
+}
+
+CdmContext* FuchsiaCdm::GetCdmContext() {
+ return this;
+}
+
+std::unique_ptr<CallbackRegistration> FuchsiaCdm::RegisterEventCB(
+ EventCB event_cb) {
+ NOTIMPLEMENTED();
+ return nullptr;
+}
+
+Decryptor* FuchsiaCdm::GetDecryptor() {
+ NOTIMPLEMENTED();
+ return nullptr;
+}
+
+int FuchsiaCdm::GetCdmId() const {
+ return kInvalidCdmId;
+}
+
+} // namespace media
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.h b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
new file mode 100644
index 00000000000..93e2a7efa9b
--- /dev/null
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
@@ -0,0 +1,94 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FUCHSIA_CDM_FUCHSIA_CDM_H_
+#define MEDIA_FUCHSIA_CDM_FUCHSIA_CDM_H_
+
+#include <fuchsia/media/drm/cpp/fidl.h>
+
+#include "base/containers/flat_map.h"
+#include "base/macros.h"
+#include "base/optional.h"
+#include "media/base/cdm_context.h"
+#include "media/base/cdm_promise_adapter.h"
+#include "media/base/content_decryption_module.h"
+
+namespace media {
+
+class FuchsiaCdm : public ContentDecryptionModule, public CdmContext {
+ public:
+ struct SessionCallbacks {
+ SessionCallbacks();
+ SessionCallbacks(SessionCallbacks&&);
+ ~SessionCallbacks();
+
+ SessionCallbacks& operator=(SessionCallbacks&&);
+
+ SessionMessageCB message_cb;
+ SessionClosedCB closed_cb;
+ SessionKeysChangeCB keys_change_cb;
+ SessionExpirationUpdateCB expiration_update_cb;
+
+ DISALLOW_COPY_AND_ASSIGN(SessionCallbacks);
+ };
+
+ FuchsiaCdm(fuchsia::media::drm::ContentDecryptionModulePtr cdm,
+ SessionCallbacks callbacks);
+
+ // ContentDecryptionModule implementation:
+ void SetServerCertificate(const std::vector<uint8_t>& certificate,
+ std::unique_ptr<SimpleCdmPromise> promise) override;
+ void CreateSessionAndGenerateRequest(
+ CdmSessionType session_type,
+ EmeInitDataType init_data_type,
+ const std::vector<uint8_t>& init_data,
+ std::unique_ptr<NewSessionCdmPromise> promise) override;
+ void LoadSession(CdmSessionType session_type,
+ const std::string& session_id,
+ std::unique_ptr<NewSessionCdmPromise> promise) override;
+ void UpdateSession(const std::string& session_id,
+ const std::vector<uint8_t>& response,
+ std::unique_ptr<SimpleCdmPromise> promise) override;
+ void CloseSession(const std::string& session_id,
+ std::unique_ptr<SimpleCdmPromise> promise) override;
+ void RemoveSession(const std::string& session_id,
+ std::unique_ptr<SimpleCdmPromise> promise) override;
+ CdmContext* GetCdmContext() override;
+
+ // CdmContext implementation:
+ std::unique_ptr<CallbackRegistration> RegisterEventCB(
+ EventCB event_cb) override;
+ Decryptor* GetDecryptor() override;
+ int GetCdmId() const override;
+
+ private:
+ class CdmSession;
+
+ ~FuchsiaCdm() override;
+
+ void OnCreateSession(std::unique_ptr<CdmSession> session,
+ uint32_t promise_id,
+ const std::string& session_id);
+ void OnGenerateLicenseRequestStatus(
+ CdmSession* session,
+ uint32_t promise_id,
+ base::Optional<CdmPromise::Exception> exception);
+ void OnProcessLicenseServerMessageStatus(
+ uint32_t promise_id,
+ base::Optional<CdmPromise::Exception> exception);
+
+ void OnCdmError(zx_status_t status);
+
+ CdmPromiseAdapter promises_;
+ base::flat_map<std::string, std::unique_ptr<CdmSession>> session_map_;
+
+ fuchsia::media::drm::ContentDecryptionModulePtr cdm_;
+ SessionCallbacks session_callbacks_;
+
+ DISALLOW_COPY_AND_ASSIGN(FuchsiaCdm);
+};
+
+} // namespace media
+
+#endif // MEDIA_FUCHSIA_CDM_FUCHSIA_CDM_H_
diff --git a/chromium/media/cdm/fuchsia/fuchsia_cdm_factory.cc b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc
index 612afdd9019..6e8059968ae 100644
--- a/chromium/media/cdm/fuchsia/fuchsia_cdm_factory.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.cc
@@ -2,16 +2,26 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/cdm/fuchsia/fuchsia_cdm_factory.h"
+#include "media/fuchsia/cdm/fuchsia_cdm_factory.h"
+#include <fuchsia/media/drm/cpp/fidl.h>
+
+#include "base/bind.h"
#include "media/base/bind_to_current_loop.h"
+#include "media/base/cdm_config.h"
#include "media/base/key_systems.h"
#include "media/cdm/aes_decryptor.h"
+#include "media/fuchsia/cdm/fuchsia_cdm.h"
+#include "services/service_manager/public/cpp/interface_provider.h"
#include "url/origin.h"
namespace media {
-FuchsiaCdmFactory::FuchsiaCdmFactory() = default;
+FuchsiaCdmFactory::FuchsiaCdmFactory(
+ service_manager::InterfaceProvider* interface_provider)
+ : interface_provider_(interface_provider) {
+ DCHECK(interface_provider_);
+}
FuchsiaCdmFactory::~FuchsiaCdmFactory() = default;
@@ -35,12 +45,26 @@ void FuchsiaCdmFactory::Create(
auto cdm = base::MakeRefCounted<AesDecryptor>(
session_message_cb, session_closed_cb, session_keys_change_cb,
session_expiration_update_cb);
- std::move(bound_cdm_created_cb).Run(cdm, std::string());
+ std::move(bound_cdm_created_cb).Run(std::move(cdm), "");
return;
}
- // TODO(yucliu): Create CDM with platform support.
- std::move(bound_cdm_created_cb).Run(nullptr, "Unsupported key system.");
+ if (!cdm_provider_)
+ interface_provider_->GetInterface(mojo::MakeRequest(&cdm_provider_));
+
+ fuchsia::media::drm::ContentDecryptionModulePtr cdm_ptr;
+ cdm_provider_->CreateCdmInterface(key_system, cdm_ptr.NewRequest());
+
+ FuchsiaCdm::SessionCallbacks callbacks;
+ callbacks.message_cb = session_message_cb;
+ callbacks.closed_cb = session_closed_cb;
+ callbacks.keys_change_cb = session_keys_change_cb;
+ callbacks.expiration_update_cb = session_expiration_update_cb;
+
+ auto cdm = base::MakeRefCounted<FuchsiaCdm>(std::move(cdm_ptr),
+ std::move(callbacks));
+
+ std::move(bound_cdm_created_cb).Run(std::move(cdm), "");
}
} // namespace media
diff --git a/chromium/media/cdm/fuchsia/fuchsia_cdm_factory.h b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
index 4e41d22ec12..6fdba9dac04 100644
--- a/chromium/media/cdm/fuchsia/fuchsia_cdm_factory.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm_factory.h
@@ -2,18 +2,25 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_CDM_FUCHSIA_FUCHSIA_CDM_FACTORY_H_
-#define MEDIA_CDM_FUCHSIA_FUCHSIA_CDM_FACTORY_H_
+#ifndef MEDIA_FUCHSIA_CDM_FUCHSIA_CDM_FACTORY_H_
+#define MEDIA_FUCHSIA_CDM_FUCHSIA_CDM_FACTORY_H_
#include "base/macros.h"
#include "media/base/cdm_factory.h"
#include "media/base/media_export.h"
+#include "media/fuchsia/mojom/fuchsia_cdm_provider.mojom.h"
+
+namespace service_manager {
+class InterfaceProvider;
+}
namespace media {
class MEDIA_EXPORT FuchsiaCdmFactory : public CdmFactory {
public:
- FuchsiaCdmFactory();
+ // |interface_provider| must outlive this class.
+ explicit FuchsiaCdmFactory(
+ service_manager::InterfaceProvider* interface_provider);
~FuchsiaCdmFactory() final;
// CdmFactory implementation.
@@ -27,9 +34,12 @@ class MEDIA_EXPORT FuchsiaCdmFactory : public CdmFactory {
const CdmCreatedCB& cdm_created_cb) final;
private:
+ service_manager::InterfaceProvider* const interface_provider_;
+ media::mojom::FuchsiaCdmProviderPtr cdm_provider_;
+
DISALLOW_COPY_AND_ASSIGN(FuchsiaCdmFactory);
};
} // namespace media
-#endif // MEDIA_CDM_FUCHSIA_FUCHSIA_CDM_FACTORY_H_
+#endif // MEDIA_FUCHSIA_CDM_FUCHSIA_CDM_FACTORY_H_
diff --git a/chromium/media/fuchsia/cdm/service/BUILD.gn b/chromium/media/fuchsia/cdm/service/BUILD.gn
new file mode 100644
index 00000000000..1c9fe6ba82c
--- /dev/null
+++ b/chromium/media/fuchsia/cdm/service/BUILD.gn
@@ -0,0 +1,23 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_fuchsia)
+
+source_set("service") {
+ sources = [
+ "fuchsia_cdm_manager.cc",
+ "fuchsia_cdm_manager.h",
+ ]
+
+ public_deps = [
+ "//third_party/fuchsia-sdk/sdk:media_drm",
+ ]
+
+ deps = [
+ "//fuchsia/base",
+ "//media",
+ "//media/fuchsia/mojom",
+ "//url",
+ ]
+}
diff --git a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
new file mode 100644
index 00000000000..a47aa03a6e3
--- /dev/null
+++ b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
@@ -0,0 +1,282 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/fuchsia/cdm/service/fuchsia_cdm_manager.h"
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/fuchsia/fuchsia_logging.h"
+#include "base/logging.h"
+#include "fuchsia/base/mem_buffer_util.h"
+#include "url/origin.h"
+
+namespace media {
+
+// Provisioner for one origin. It makes sure only one provision request is sent
+// to provisioning server. All the concurrent provision request will be cached
+// until it finishes current provision request.
+class FuchsiaCdmManager::OriginProvisioner {
+ public:
+ explicit OriginProvisioner(KeySystemHandler* handler);
+ ~OriginProvisioner();
+
+ void CheckOrProvision(CreateFetcherCB create_fecther_cb,
+ base::OnceCallback<void(bool)> provision_cb);
+
+ private:
+ enum class ProvisionStatus {
+ UNKNOWN,
+ PENDING,
+ SUCCESS,
+ FAIL,
+ };
+
+ // Called when any error happens during provision flow.
+ void OnProvisionFail();
+
+ // The following functions are used to complete origin provision flow. If any
+ // error happens, provision flow will be stopped.
+ // 1. Check if current origin provisioned or not.
+ void CheckOrProvisionImpl();
+ void OnProvisionStatus(fuchsia::media::drm::ProvisioningStatus status);
+ // 2. Generate provision request if current origin is not provisioned.
+ void HandleDeviceProvision();
+ // 3. Send provision request to provisioning server.
+ void OnProvisioningRequest(fuchsia::media::drm::ProvisioningRequest request);
+ // 4. Provide provision response to CDM.
+ void OnProvisioningResponse(bool success, const std::string& response);
+ void OnProvisioningResponseResult(
+ fuchsia::media::drm::Provisioner_ProcessProvisioningResponse_Result
+ result);
+
+ void ProcessPendingCallbacks();
+
+ KeySystemHandler* const handler_;
+
+ ProvisionStatus provision_status_ = ProvisionStatus::UNKNOWN;
+ std::vector<base::OnceCallback<void(bool)>> pending_cbs_;
+
+ // Cdm used for provision.
+ fuchsia::media::drm::ProvisionerPtr provisioner_;
+
+ CreateFetcherCB create_fetcher_cb_;
+ std::unique_ptr<ProvisionFetcher> provision_fetcher_;
+};
+
+FuchsiaCdmManager::OriginProvisioner::OriginProvisioner(
+ KeySystemHandler* handler)
+ : handler_(handler) {
+ DCHECK(handler_);
+}
+
+FuchsiaCdmManager::OriginProvisioner::~OriginProvisioner() = default;
+
+void FuchsiaCdmManager::OriginProvisioner::CheckOrProvision(
+ CreateFetcherCB create_fetcher_cb,
+ base::OnceCallback<void(bool)> provision_cb) {
+ pending_cbs_.push_back(std::move(provision_cb));
+
+ if (provision_status_ == ProvisionStatus::UNKNOWN) {
+ DCHECK(!provisioner_);
+
+ provision_status_ = ProvisionStatus::PENDING;
+ create_fetcher_cb_ = std::move(create_fetcher_cb);
+ CheckOrProvisionImpl();
+ return;
+ }
+
+ // Pending provision. Wait for provision finish.
+ if (provision_status_ == ProvisionStatus::PENDING) {
+ return;
+ }
+
+ ProcessPendingCallbacks();
+}
+
+void FuchsiaCdmManager::OriginProvisioner::CheckOrProvisionImpl() {
+ if (!provisioner_) {
+ provisioner_ = handler_->CreateProvisioner();
+ if (!provisioner_) {
+ // No provisioner means provision is not needed at all.
+ provision_status_ = ProvisionStatus::SUCCESS;
+ ProcessPendingCallbacks();
+ return;
+ }
+
+ provisioner_.set_error_handler([this](zx_status_t status) {
+ ZX_DLOG(ERROR, status) << "The fuchsia.media.drm.Provisioner"
+ << " channel was terminated.";
+ OnProvisionFail();
+ });
+ }
+
+ provisioner_->GetStatus(
+ fit::bind_member(this, &OriginProvisioner::OnProvisionStatus));
+}
+
+void FuchsiaCdmManager::OriginProvisioner::OnProvisionStatus(
+ fuchsia::media::drm::ProvisioningStatus status) {
+ if (status == fuchsia::media::drm::ProvisioningStatus::PROVISIONED) {
+ provision_status_ = ProvisionStatus::SUCCESS;
+ ProcessPendingCallbacks();
+ return;
+ }
+
+ DCHECK_EQ(status, fuchsia::media::drm::ProvisioningStatus::NOT_PROVISIONED);
+ HandleDeviceProvision();
+}
+
+void FuchsiaCdmManager::OriginProvisioner::HandleDeviceProvision() {
+ DCHECK(provisioner_);
+
+ DVLOG(2) << "Start device provision.";
+
+ provisioner_->GenerateProvisioningRequest(
+ fit::bind_member(this, &OriginProvisioner::OnProvisioningRequest));
+}
+
+void FuchsiaCdmManager::OriginProvisioner::OnProvisioningRequest(
+ fuchsia::media::drm::ProvisioningRequest request) {
+ std::string request_str;
+ if (!cr_fuchsia::StringFromMemBuffer(request.message, &request_str)) {
+ DLOG(ERROR) << "Failed to get provision request.";
+ OnProvisionFail();
+ return;
+ }
+ if (!request.default_provisioning_server_url) {
+ DLOG(ERROR) << "Missing default provisioning server URL.";
+ OnProvisionFail();
+ return;
+ }
+
+ DCHECK(create_fetcher_cb_);
+ provision_fetcher_ = std::move(create_fetcher_cb_).Run();
+ DCHECK(provision_fetcher_);
+ provision_fetcher_->Retrieve(
+ request.default_provisioning_server_url.value(), request_str,
+ base::BindRepeating(&OriginProvisioner::OnProvisioningResponse,
+ base::Unretained(this)));
+}
+
+void FuchsiaCdmManager::OriginProvisioner::OnProvisioningResponse(
+ bool success,
+ const std::string& response) {
+ provision_fetcher_ = nullptr;
+
+ if (!success) {
+ LOG(ERROR) << "Failed to fetch provision response. response " << response;
+ OnProvisionFail();
+ return;
+ }
+
+ fuchsia::media::drm::ProvisioningResponse provision_response;
+ provision_response.message =
+ cr_fuchsia::MemBufferFromString(response, "cr-drm-provision-response");
+
+ provisioner_->ProcessProvisioningResponse(
+ std::move(provision_response),
+ fit::bind_member(this, &OriginProvisioner::OnProvisioningResponseResult));
+}
+
+void FuchsiaCdmManager::OriginProvisioner::OnProvisioningResponseResult(
+ fuchsia::media::drm::Provisioner_ProcessProvisioningResponse_Result
+ result) {
+ if (result.is_err()) {
+ LOG(ERROR) << "Fail to process provisioning response "
+ << static_cast<int>(result.err());
+ OnProvisionFail();
+ return;
+ }
+
+ DVLOG(2) << "Provision success!";
+
+ provision_status_ = ProvisionStatus::SUCCESS;
+ ProcessPendingCallbacks();
+}
+
+void FuchsiaCdmManager::OriginProvisioner::ProcessPendingCallbacks() {
+ DCHECK_NE(provision_status_, ProvisionStatus::UNKNOWN);
+ DCHECK_NE(provision_status_, ProvisionStatus::PENDING);
+ DCHECK(!pending_cbs_.empty());
+
+ if (provisioner_) {
+ // Close the channel by dropping the returned InterfaceHandle.
+ auto close_channel = provisioner_.Unbind();
+ }
+
+ auto pending_cbs = std::move(pending_cbs_);
+ for (auto& cb : pending_cbs) {
+ std::move(cb).Run(provision_status_ == ProvisionStatus::SUCCESS);
+ }
+}
+
+void FuchsiaCdmManager::OriginProvisioner::OnProvisionFail() {
+ provision_status_ = ProvisionStatus::FAIL;
+ ProcessPendingCallbacks();
+}
+
+FuchsiaCdmManager::FuchsiaCdmManager(KeySystemHandlerMap handlers)
+ : handlers_(std::move(handlers)) {
+ DETACH_FROM_THREAD(thread_checker_);
+}
+
+FuchsiaCdmManager::~FuchsiaCdmManager() = default;
+
+void FuchsiaCdmManager::CreateAndProvision(
+ const std::string& key_system,
+ const url::Origin& origin,
+ CreateFetcherCB create_fetcher_cb,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
+ request) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ auto it = handlers_.find(key_system);
+ if (it == handlers_.end()) {
+ DLOG(ERROR) << "Key system is not supported: " << key_system;
+ return;
+ }
+ KeySystemHandler* handler = it->second.get();
+
+ OriginProvisioner* origin_provisioner =
+ GetProvisioner(key_system, origin, handler);
+ DCHECK(origin_provisioner);
+
+ origin_provisioner->CheckOrProvision(
+ std::move(create_fetcher_cb),
+ base::BindOnce(&FuchsiaCdmManager::OnProvisionResult,
+ base::Unretained(this), handler, std::move(request)));
+}
+
+// TODO(yucliu): This should return different OriginProvisioner for different
+// origins. Due to platform limitation, we can only support one for all the
+// origins. (crbug.com/991723)
+FuchsiaCdmManager::OriginProvisioner* FuchsiaCdmManager::GetProvisioner(
+ const std::string& key_system,
+ const url::Origin&,
+ KeySystemHandler* handler) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ auto it = origin_provisioners_.find(key_system);
+ if (it == origin_provisioners_.end()) {
+ it = origin_provisioners_
+ .emplace(key_system, std::make_unique<OriginProvisioner>(handler))
+ .first;
+ }
+
+ return it->second.get();
+}
+
+void FuchsiaCdmManager::OnProvisionResult(
+ KeySystemHandler* handler,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
+ request,
+ bool success) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ if (!success) {
+ LOG(ERROR) << "Failed to provision origin";
+ return;
+ }
+
+ handler->CreateCdm(std::move(request));
+}
+
+} // namespace media
diff --git a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h
new file mode 100644
index 00000000000..497bfedbb4c
--- /dev/null
+++ b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h
@@ -0,0 +1,87 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FUCHSIA_CDM_SERVICE_FUCHSIA_CDM_MANAGER_H_
+#define MEDIA_FUCHSIA_CDM_SERVICE_FUCHSIA_CDM_MANAGER_H_
+
+#include <fuchsia/media/drm/cpp/fidl.h>
+#include <string>
+
+#include "base/containers/flat_map.h"
+#include "base/macros.h"
+#include "base/threading/thread_checker.h"
+#include "media/base/provision_fetcher.h"
+
+namespace url {
+class Origin;
+} // namespace url
+
+namespace media {
+
+// Create and connect to Fuchsia CDM service. It will provision the origin if
+// needed. When provision is needed by multiple web pages for the same origin,
+// it will chain all the concurrent provision requests and make sure we
+// only handle one provision request for the origin at a time. This is mainly
+// because the latest provision response will invalidate old provisioned cert,
+// as well as the license sessions. We want to make sure once the channel to
+// CDM service is established, nothing from Chromium breaks it.
+class FuchsiaCdmManager {
+ public:
+ // Handler for key system specific logic.
+ class KeySystemHandler {
+ public:
+ virtual ~KeySystemHandler() = default;
+
+ // Create CDM for license management and decryption.
+ virtual void CreateCdm(
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
+ request) = 0;
+
+ // Create Provisioner for origin provision. Impl may return nullptr if
+ // Provisioner is not supported, in which case the call should assume the
+ // origin is already provisioned.
+ virtual fuchsia::media::drm::ProvisionerPtr CreateProvisioner() = 0;
+ };
+
+ // A map from key system to its KeySystemHandler.
+ using KeySystemHandlerMap =
+ base::flat_map<std::string, std::unique_ptr<KeySystemHandler>>;
+
+ explicit FuchsiaCdmManager(KeySystemHandlerMap handlers);
+ ~FuchsiaCdmManager();
+
+ void CreateAndProvision(
+ const std::string& key_system,
+ const url::Origin& origin,
+ CreateFetcherCB create_fetcher_cb,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
+ request);
+
+ private:
+ class OriginProvisioner;
+
+ OriginProvisioner* GetProvisioner(const std::string& key_system,
+ const url::Origin& origin,
+ KeySystemHandler* handler);
+
+ void OnProvisionResult(
+ KeySystemHandler* handler,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
+ request,
+ bool success);
+
+ const KeySystemHandlerMap handlers_;
+
+ // key system -> OriginProvisioner
+ base::flat_map<std::string, std::unique_ptr<OriginProvisioner>>
+ origin_provisioners_;
+
+ THREAD_CHECKER(thread_checker_);
+
+ DISALLOW_COPY_AND_ASSIGN(FuchsiaCdmManager);
+};
+
+} // namespace media
+
+#endif // MEDIA_FUCHSIA_CDM_SERVICE_FUCHSIA_CDM_MANAGER_H_
diff --git a/chromium/media/fuchsia/mojom/BUILD.gn b/chromium/media/fuchsia/mojom/BUILD.gn
new file mode 100644
index 00000000000..d61221b7c5d
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/BUILD.gn
@@ -0,0 +1,11 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//mojo/public/tools/bindings/mojom.gni")
+
+mojom("mojom") {
+ sources = [
+ "fuchsia_cdm_provider.mojom",
+ ]
+}
diff --git a/chromium/media/fuchsia/mojom/DEPS b/chromium/media/fuchsia/mojom/DEPS
new file mode 100644
index 00000000000..76ac06459a2
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+fuchsia/mojom",
+]
diff --git a/chromium/media/mojo/interfaces/OWNERS b/chromium/media/fuchsia/mojom/OWNERS
index 8e9e507beb2..ae29a36aac8 100644
--- a/chromium/media/mojo/interfaces/OWNERS
+++ b/chromium/media/fuchsia/mojom/OWNERS
@@ -2,7 +2,5 @@ per-file *.mojom=set noparent
per-file *.mojom=file://ipc/SECURITY_OWNERS
per-file *_mojom_traits*.*=set noparent
per-file *_mojom_traits*.*=file://ipc/SECURITY_OWNERS
-per-file *_struct_traits*.*=set noparent
-per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS
per-file *.typemap=set noparent
per-file *.typemap=file://ipc/SECURITY_OWNERS
diff --git a/chromium/media/fuchsia/mojom/cdm_request.typemap b/chromium/media/fuchsia/mojom/cdm_request.typemap
new file mode 100644
index 00000000000..daed02bbc57
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/cdm_request.typemap
@@ -0,0 +1,16 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/fuchsia/mojom/fuchsia_cdm_provider.mojom"
+os_whitelist = [ "fuchsia" ]
+public_headers = [ "fuchsia/media/drm/cpp/fidl.h" ]
+traits_headers = [ "//media/fuchsia/mojom/cdm_request_mojom_traits.h" ]
+sources = [
+ "//media/fuchsia/mojom/cdm_request_mojom_traits.h",
+]
+public_deps = [
+ "//fuchsia/mojom:traits",
+ "//third_party/fuchsia-sdk/sdk:media_drm",
+]
+type_mappings = [ "media.mojom.CdmRequest=::fidl::InterfaceRequest<::fuchsia::media::drm::ContentDecryptionModule>[move_only]" ]
diff --git a/chromium/media/fuchsia/mojom/cdm_request_mojom_traits.h b/chromium/media/fuchsia/mojom/cdm_request_mojom_traits.h
new file mode 100644
index 00000000000..c9c2e6c80e3
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/cdm_request_mojom_traits.h
@@ -0,0 +1,24 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FUCHSIA_MOJOM_CDM_REQUEST_MOJOM_TRAITS_H_
+#define MEDIA_FUCHSIA_MOJOM_CDM_REQUEST_MOJOM_TRAITS_H_
+
+#include <fuchsia/media/drm/cpp/fidl.h>
+
+#include "fuchsia/mojom/fidl_interface_request_mojom_traits.h"
+
+namespace mojo {
+
+template <>
+struct StructTraits<
+ media::mojom::CdmRequestDataView,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>>
+ : public FidlInterfaceRequestStructTraits<
+ media::mojom::CdmRequestDataView,
+ fuchsia::media::drm::ContentDecryptionModule> {};
+
+} // namespace mojo
+
+#endif // MEDIA_FUCHSIA_MOJOM_CDM_REQUEST_MOJOM_TRAITS_H_
diff --git a/chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom b/chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom
new file mode 100644
index 00000000000..2971aa3da72
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/fuchsia_cdm_provider.mojom
@@ -0,0 +1,21 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+module media.mojom;
+
+// Mojo struct for
+// fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>.
+struct CdmRequest {
+ handle request;
+};
+
+// Interface for asking privileged process to create connection to
+// fuchsia CDM service.
+interface FuchsiaCdmProvider {
+ // Create connection to fuchsia::media::drm::ContentDecryptionModule for
+ // |key_system|.
+ // Implementation should make sure the persistent storage are isolated
+ // for different web origins.
+ CreateCdmInterface(string key_system, CdmRequest cdm_request);
+};
diff --git a/chromium/media/fuchsia/mojom/typemaps.gni b/chromium/media/fuchsia/mojom/typemaps.gni
new file mode 100644
index 00000000000..e25ac0443d4
--- /dev/null
+++ b/chromium/media/fuchsia/mojom/typemaps.gni
@@ -0,0 +1,5 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+typemaps = [ "//media/fuchsia/mojom/cdm_request.typemap" ]
diff --git a/chromium/media/gpu/BUILD.gn b/chromium/media/gpu/BUILD.gn
index a16a60049bf..8e8adcbb8c1 100644
--- a/chromium/media/gpu/BUILD.gn
+++ b/chromium/media/gpu/BUILD.gn
@@ -18,6 +18,7 @@ buildflag_header("buildflags") {
"USE_VAAPI=$use_vaapi",
"USE_V4L2_CODEC=$use_v4l2_codec",
"USE_LIBV4L2=$use_v4lplugin",
+ "USE_CHROMEOS_MEDIA_ACCELERATION=$use_vaapi||$use_v4l2_codec",
]
}
@@ -28,9 +29,10 @@ component("gpu") {
# media-related content code should access //media/gpu.
visibility = [
"//chrome/gpu",
- "//components/arc/common:media",
+ "//components/arc/mojom:media",
"//components/arc/video_accelerator",
"//components/mirroring/service:mirroring_service",
+ "//components/chromeos_camera/*",
"//components/viz/service/main",
"//content/gpu:*",
"//content/renderer:*",
@@ -103,6 +105,8 @@ component("gpu") {
"android/android_video_surface_chooser_impl.h",
"android/codec_allocator.cc",
"android/codec_allocator.h",
+ "android/codec_buffer_wait_coordinator.cc",
+ "android/codec_buffer_wait_coordinator.h",
"android/codec_image.cc",
"android/codec_image.h",
"android/codec_image_group.cc",
@@ -115,8 +119,6 @@ component("gpu") {
"android/device_info.h",
"android/direct_shared_image_video_provider.cc",
"android/direct_shared_image_video_provider.h",
- "android/image_reader_gl_owner.cc",
- "android/image_reader_gl_owner.h",
"android/maybe_render_early_manager.cc",
"android/maybe_render_early_manager.h",
"android/media_codec_video_decoder.cc",
@@ -124,23 +126,19 @@ component("gpu") {
"android/promotion_hint_aggregator.h",
"android/promotion_hint_aggregator_impl.cc",
"android/promotion_hint_aggregator_impl.h",
- "android/shared_image_video.cc",
- "android/shared_image_video.h",
"android/shared_image_video_provider.cc",
"android/shared_image_video_provider.h",
"android/surface_chooser_helper.cc",
"android/surface_chooser_helper.h",
- "android/surface_texture_gl_owner.cc",
- "android/surface_texture_gl_owner.h",
- "android/texture_owner.cc",
- "android/texture_owner.h",
"android/video_frame_factory.h",
"android/video_frame_factory_impl.cc",
"android/video_frame_factory_impl.h",
]
libs += [ "android" ]
deps += [
+ "//gpu/command_buffer/service:shared_image_video",
"//gpu/ipc/common:android_image_reader_utils",
+ "//gpu/ipc/common:android_texture_owner",
# TODO(crbug.com/789435): This can be removed once CdmManager is removed.
"//gpu/ipc/common:ipc_common_sources",
@@ -429,6 +427,7 @@ if (is_win || is_android || use_v4l2_codec || use_vaapi) {
if (is_android) {
deps += [
":android_video_decode_accelerator_unittests",
+ "//gpu/ipc/common:android_texture_owner_unittests",
"//media/base/android:media_java",
"//media/test:run_all_unittests",
"//ui/android:ui_java",
@@ -466,24 +465,20 @@ source_set("android_video_decode_accelerator_unittests") {
"android/codec_wrapper_unittest.cc",
"android/fake_codec_allocator.cc",
"android/fake_codec_allocator.h",
- "android/image_reader_gl_owner_unittest.cc",
"android/maybe_render_early_manager_unittest.cc",
"android/media_codec_video_decoder_unittest.cc",
- "android/mock_abstract_texture.cc",
- "android/mock_abstract_texture.h",
"android/mock_android_video_surface_chooser.cc",
"android/mock_android_video_surface_chooser.h",
+ "android/mock_codec_buffer_wait_coordinator.cc",
+ "android/mock_codec_buffer_wait_coordinator.h",
"android/mock_codec_image.cc",
"android/mock_codec_image.h",
"android/mock_device_info.cc",
"android/mock_device_info.h",
"android/mock_promotion_hint_aggregator.cc",
"android/mock_promotion_hint_aggregator.h",
- "android/mock_texture_owner.cc",
- "android/mock_texture_owner.h",
"android/promotion_hint_aggregator_impl_unittest.cc",
"android/surface_chooser_helper_unittest.cc",
- "android/surface_texture_gl_owner_unittest.cc",
"android/video_frame_factory_impl_unittest.cc",
]
deps = [
@@ -491,6 +486,7 @@ source_set("android_video_decode_accelerator_unittests") {
":gpu",
"//base/test:test_support",
"//gpu:test_support",
+ "//gpu/ipc/common:android_texture_owner_test_support",
"//media",
"//media:test_support",
"//testing/gmock",
@@ -535,7 +531,7 @@ if (use_v4l2_codec || use_vaapi || is_mac || is_win) {
}
static_library("test_support") {
- visibility = [ "//media:test_support" ]
+ visibility = [ "//media/gpu/*" ]
testonly = true
sources = [
"test/fake_command_buffer_helper.cc",
@@ -633,6 +629,7 @@ test("image_processor_test") {
deps = [
":buildflags",
":gpu",
+ "test:frame_file_writer",
"test:frame_validator",
"test:helpers",
"test:image_processor",
diff --git a/chromium/media/gpu/OWNERS b/chromium/media/gpu/OWNERS
index be483dde5d2..18fb4fa7ad2 100644
--- a/chromium/media/gpu/OWNERS
+++ b/chromium/media/gpu/OWNERS
@@ -1,9 +1,12 @@
-acourbot@chromium.org
dalecurtis@chromium.org
dcastagna@chromium.org
+liberato@chromium.org
+sandersd@chromium.org
+
+# For chromeos/, linux/, v4l2/, and vaapi/ -specific changes.
+acourbot@chromium.org
hiroh@chromium.org
jcliang@chromium.org
kcwu@chromium.org
-liberato@chromium.org
+mcasas@chromium.org
posciak@chromium.org
-sandersd@chromium.org
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
index e76784096f6..a9102422e5a 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
@@ -18,9 +18,7 @@ constexpr base::TimeDelta MinimumDelayAfterFailedOverlay =
AndroidVideoSurfaceChooserImpl::AndroidVideoSurfaceChooserImpl(
bool allow_dynamic,
const base::TickClock* tick_clock)
- : allow_dynamic_(allow_dynamic),
- tick_clock_(tick_clock),
- weak_factory_(this) {
+ : allow_dynamic_(allow_dynamic), tick_clock_(tick_clock) {
// Use a DefaultTickClock if one wasn't provided.
if (!tick_clock_)
tick_clock_ = base::DefaultTickClock::GetInstance();
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl.h b/chromium/media/gpu/android/android_video_surface_chooser_impl.h
index f5a5ecd5ada..84f5f486f1c 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl.h
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl.h
@@ -89,7 +89,7 @@ class MEDIA_GPU_EXPORT AndroidVideoSurfaceChooserImpl
// Time at which we most recently got a failed overlay request.
base::TimeTicks most_recent_overlay_failure_;
- base::WeakPtrFactory<AndroidVideoSurfaceChooserImpl> weak_factory_;
+ base::WeakPtrFactory<AndroidVideoSurfaceChooserImpl> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(AndroidVideoSurfaceChooserImpl);
};
diff --git a/chromium/media/gpu/android/codec_allocator.cc b/chromium/media/gpu/android/codec_allocator.cc
index 4bd8b0e00c2..f6ca0eb58a9 100644
--- a/chromium/media/gpu/android/codec_allocator.cc
+++ b/chromium/media/gpu/android/codec_allocator.cc
@@ -45,8 +45,8 @@ void ReleaseMediaCodecInternal(std::unique_ptr<MediaCodecBridge> codec) {
}
scoped_refptr<base::SequencedTaskRunner> CreateCodecTaskRunner() {
- return base::CreateSequencedTaskRunnerWithTraits(
- {base::TaskPriority::USER_VISIBLE, base::MayBlock(),
+ return base::CreateSequencedTaskRunner(
+ {base::ThreadPool(), base::TaskPriority::USER_VISIBLE, base::MayBlock(),
base::TaskShutdownBehavior::SKIP_ON_SHUTDOWN});
}
diff --git a/chromium/media/gpu/android/codec_allocator_unittest.cc b/chromium/media/gpu/android/codec_allocator_unittest.cc
index d57e7b4370f..90d25b8ccaf 100644
--- a/chromium/media/gpu/android/codec_allocator_unittest.cc
+++ b/chromium/media/gpu/android/codec_allocator_unittest.cc
@@ -12,8 +12,8 @@
#include "base/bind_helpers.h"
#include "base/logging.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "base/threading/thread.h"
#include "base/time/tick_clock.h"
@@ -72,8 +72,8 @@ class CodecAllocatorTest : public testing::Test {
std::unique_ptr<MediaCodecBridge> codec) {
// This should always be called on the main thread, despite whatever thread
// the allocator happens to be running on.
- ASSERT_TRUE(scoped_task_environment_.GetMainThreadTaskRunner()
- ->BelongsToCurrentThread());
+ ASSERT_TRUE(
+ task_environment_.GetMainThreadTaskRunner()->BelongsToCurrentThread());
last_created_codec_.reset(
reinterpret_cast<MockMediaCodecBridge*>(codec.release()));
@@ -85,8 +85,8 @@ class CodecAllocatorTest : public testing::Test {
void OnCodecReleasedInternal(base::OnceClosure quit_closure) {
// This should always be called on the main thread, despite whatever thread
// the allocator happens to be running on.
- ASSERT_TRUE(scoped_task_environment_.GetMainThreadTaskRunner()
- ->BelongsToCurrentThread());
+ ASSERT_TRUE(
+ task_environment_.GetMainThreadTaskRunner()->BelongsToCurrentThread());
OnCodecReleased();
std::move(quit_closure).Run();
}
@@ -110,7 +110,7 @@ class CodecAllocatorTest : public testing::Test {
protected:
// So that we can get the thread's task runner.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
base::Thread allocator_thread_;
@@ -265,7 +265,7 @@ TEST_F(CodecAllocatorTest, SecureCreationFailsWhenHung) {
// QuitClosure may run before the initial release processes, so RunUntilIdle
// here such that hung status is cleared.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Running the loop should clear hung status.
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
@@ -294,7 +294,7 @@ TEST_F(CodecAllocatorTest, SoftwareCodecUsedWhenHung) {
// QuitClosure may run before the initial release processes, so RunUntilIdle
// here such that hung status is cleared.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Running the loop should clear hung status.
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
@@ -341,7 +341,7 @@ TEST_F(CodecAllocatorTest, CodecReleasedOnRightTaskRunnerWhenHung) {
// QuitClosure may run before the initial release processes, so RunUntilIdle
// here such that hung status is cleared.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Running the loop should clear hung status.
ASSERT_FALSE(IsPrimaryTaskRunnerLikelyHung());
diff --git a/chromium/media/gpu/android/codec_buffer_wait_coordinator.cc b/chromium/media/gpu/android/codec_buffer_wait_coordinator.cc
new file mode 100644
index 00000000000..7f3a1e03f75
--- /dev/null
+++ b/chromium/media/gpu/android/codec_buffer_wait_coordinator.cc
@@ -0,0 +1,86 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/android/codec_buffer_wait_coordinator.h"
+
+#include "base/android/scoped_hardware_buffer_fence_sync.h"
+#include "base/metrics/histogram_macros.h"
+#include "base/threading/thread_task_runner_handle.h"
+
+namespace media {
+
+// FrameAvailableEvent is a RefCounted wrapper for a WaitableEvent
+// (it's not possible to put one in RefCountedData).
+// This let's us safely signal an event on any thread.
+struct FrameAvailableEvent
+ : public base::RefCountedThreadSafe<FrameAvailableEvent> {
+ FrameAvailableEvent()
+ : event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
+ base::WaitableEvent::InitialState::NOT_SIGNALED) {}
+ void Signal() { event.Signal(); }
+ base::WaitableEvent event;
+
+ private:
+ friend class RefCountedThreadSafe<FrameAvailableEvent>;
+ ~FrameAvailableEvent() = default;
+};
+
+CodecBufferWaitCoordinator::CodecBufferWaitCoordinator(
+ scoped_refptr<gpu::TextureOwner> texture_owner)
+ : texture_owner_(std::move(texture_owner)),
+ frame_available_event_(new FrameAvailableEvent()),
+ task_runner_(base::ThreadTaskRunnerHandle::Get()) {
+ DCHECK(texture_owner_);
+ texture_owner_->SetFrameAvailableCallback(base::BindRepeating(
+ &FrameAvailableEvent::Signal, frame_available_event_));
+}
+
+CodecBufferWaitCoordinator::~CodecBufferWaitCoordinator() {
+ DCHECK(texture_owner_);
+}
+
+void CodecBufferWaitCoordinator::SetReleaseTimeToNow() {
+ release_time_ = base::TimeTicks::Now();
+}
+
+bool CodecBufferWaitCoordinator::IsExpectingFrameAvailable() {
+ return !release_time_.is_null();
+}
+
+void CodecBufferWaitCoordinator::WaitForFrameAvailable() {
+ DCHECK(!release_time_.is_null());
+
+ // 5msec covers >99.9% of cases, so just wait for up to that much before
+ // giving up. If an error occurs, we might not ever get a notification.
+ const base::TimeDelta max_wait = base::TimeDelta::FromMilliseconds(5);
+ const base::TimeTicks call_time = base::TimeTicks::Now();
+ const base::TimeDelta elapsed = call_time - release_time_;
+ const base::TimeDelta remaining = max_wait - elapsed;
+ release_time_ = base::TimeTicks();
+ bool timed_out = false;
+
+ if (remaining <= base::TimeDelta()) {
+ if (!frame_available_event_->event.IsSignaled()) {
+ DVLOG(1) << "Deferred WaitForFrameAvailable() timed out, elapsed: "
+ << elapsed.InMillisecondsF() << "ms";
+ timed_out = true;
+ }
+ } else {
+ DCHECK_LE(remaining, max_wait);
+ SCOPED_UMA_HISTOGRAM_TIMER(
+ "Media.CodecImage.CodecBufferWaitCoordinator.WaitTimeForFrame");
+ if (!frame_available_event_->event.TimedWait(remaining)) {
+ DVLOG(1) << "WaitForFrameAvailable() timed out, elapsed: "
+ << elapsed.InMillisecondsF()
+ << "ms, additionally waited: " << remaining.InMillisecondsF()
+ << "ms, total: " << (elapsed + remaining).InMillisecondsF()
+ << "ms";
+ timed_out = true;
+ }
+ }
+ UMA_HISTOGRAM_BOOLEAN(
+ "Media.CodecImage.CodecBufferWaitCoordinator.FrameTimedOut", timed_out);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/android/codec_buffer_wait_coordinator.h b/chromium/media/gpu/android/codec_buffer_wait_coordinator.h
new file mode 100644
index 00000000000..e6523b23c31
--- /dev/null
+++ b/chromium/media/gpu/android/codec_buffer_wait_coordinator.h
@@ -0,0 +1,66 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_ANDROID_CODEC_BUFFER_WAIT_COORDINATOR_H_
+#define MEDIA_GPU_ANDROID_CODEC_BUFFER_WAIT_COORDINATOR_H_
+
+#include "base/memory/ref_counted.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/thread_checker.h"
+#include "base/time/time.h"
+#include "gpu/ipc/common/android/texture_owner.h"
+#include "media/gpu/media_gpu_export.h"
+
+namespace media {
+
+struct FrameAvailableEvent;
+
+// This class supports waiting for codec buffers to be released/rendered before
+// using them. This class is RefCountedThreadSafe to make sure it's safe to
+// keep and drop refptrs to it on any thread.
+class MEDIA_GPU_EXPORT CodecBufferWaitCoordinator
+ : public base::RefCountedThreadSafe<CodecBufferWaitCoordinator> {
+ public:
+ explicit CodecBufferWaitCoordinator(
+ scoped_refptr<gpu::TextureOwner> texture_owner);
+
+ scoped_refptr<gpu::TextureOwner> texture_owner() const {
+ DCHECK(texture_owner_);
+ return texture_owner_;
+ }
+
+ // Codec buffer wait management apis.
+ // Sets the expectation of onFrameAVailable for a new frame because a buffer
+ // was just released to this surface.
+ virtual void SetReleaseTimeToNow();
+
+ // Whether we're expecting onFrameAvailable. True when SetReleaseTimeToNow()
+ // was called but WaitForFrameAvailable() have not been called since.
+ virtual bool IsExpectingFrameAvailable();
+
+ // Waits for onFrameAvailable until it's been 5ms since the buffer was
+ // released. This must only be called if IsExpectingFrameAvailable().
+ virtual void WaitForFrameAvailable();
+
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner() {
+ return task_runner_;
+ }
+
+ protected:
+ virtual ~CodecBufferWaitCoordinator();
+
+ private:
+ friend class base::RefCountedThreadSafe<CodecBufferWaitCoordinator>;
+
+ scoped_refptr<gpu::TextureOwner> texture_owner_;
+ base::TimeTicks release_time_;
+ scoped_refptr<FrameAvailableEvent> frame_available_event_;
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ DISALLOW_COPY_AND_ASSIGN(CodecBufferWaitCoordinator);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_ANDROID_CODEC_BUFFER_WAIT_COORDINATOR_H_
diff --git a/chromium/media/gpu/android/codec_image.cc b/chromium/media/gpu/android/codec_image.cc
index 257b0ad016c..9f4ec226c50 100644
--- a/chromium/media/gpu/android/codec_image.cc
+++ b/chromium/media/gpu/android/codec_image.cc
@@ -19,7 +19,7 @@ namespace {
// Makes |texture_owner|'s context current if it isn't already.
std::unique_ptr<ui::ScopedMakeCurrent> MakeCurrentIfNeeded(
- TextureOwner* texture_owner) {
+ gpu::TextureOwner* texture_owner) {
gl::GLContext* context = texture_owner->GetContext();
// Note: this works for virtual contexts too, because IsCurrent() returns true
// if their shared platform context is current, regardless of which virtual
@@ -52,12 +52,12 @@ CodecImage::~CodecImage() {
void CodecImage::Initialize(
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<TextureOwner> texture_owner,
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb) {
DCHECK(output_buffer);
phase_ = Phase::kInCodec;
output_buffer_ = std::move(output_buffer);
- texture_owner_ = std::move(texture_owner);
+ codec_buffer_wait_coordinator_ = std::move(codec_buffer_wait_coordinator);
promotion_hint_cb_ = std::move(promotion_hint_cb);
}
@@ -83,7 +83,7 @@ unsigned CodecImage::GetInternalFormat() {
CodecImage::BindOrCopy CodecImage::ShouldBindOrCopy() {
// If we're using an overlay, then pretend it's bound. That way, we'll get
// calls to ScheduleOverlayPlane. Otherwise, CopyTexImage needs to be called.
- return !texture_owner_ ? BIND : COPY;
+ return !codec_buffer_wait_coordinator_ ? BIND : COPY;
}
bool CodecImage::BindTexImage(unsigned target) {
@@ -103,7 +103,9 @@ bool CodecImage::CopyTexImage(unsigned target) {
GLint bound_service_id = 0;
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &bound_service_id);
// The currently bound texture should be the texture owner's texture.
- if (bound_service_id != static_cast<GLint>(texture_owner_->GetTextureId()))
+ if (bound_service_id !=
+ static_cast<GLint>(
+ codec_buffer_wait_coordinator_->texture_owner()->GetTextureId()))
return false;
RenderToTextureOwnerFrontBuffer(BindingsMode::kEnsureTexImageBound);
@@ -125,27 +127,41 @@ bool CodecImage::ScheduleOverlayPlane(
bool enable_blend,
std::unique_ptr<gfx::GpuFence> gpu_fence) {
TRACE_EVENT0("media", "CodecImage::ScheduleOverlayPlane");
- if (texture_owner_) {
+ if (codec_buffer_wait_coordinator_) {
DVLOG(1) << "Invalid call to ScheduleOverlayPlane; this image is "
"TextureOwner backed.";
return false;
}
- // Move the overlay if needed.
- if (most_recent_bounds_ != bounds_rect) {
- most_recent_bounds_ = bounds_rect;
- // Note that, if we're actually promoted to overlay, that this is where the
- // hint is sent to the callback. NotifyPromotionHint detects this case and
- // lets us do it. If we knew that we were going to get promotion hints,
- // then we could always let NotifyPromotionHint do it. Unfortunately, we
- // don't know that.
- promotion_hint_cb_.Run(PromotionHintAggregator::Hint(bounds_rect, true));
- }
-
+ NotifyOverlayPromotion(true, bounds_rect);
RenderToOverlay();
return true;
}
+void CodecImage::NotifyOverlayPromotion(bool promotion,
+ const gfx::Rect& bounds) {
+ if (!codec_buffer_wait_coordinator_ && promotion) {
+ // When |CodecImage| is already backed by SurfaceView, and it should be used
+ // as overlay.
+
+ // Move the overlay if needed.
+ if (most_recent_bounds_ != bounds) {
+ most_recent_bounds_ = bounds;
+ // Note that, if we're actually promoted to overlay, that this is where
+ // the hint is sent to the callback. NotifyPromotionHint detects this
+ // case and lets us do it. If we knew that we were going to get promotion
+ // hints, then we could always let NotifyPromotionHint do it.
+ // Unfortunately, we don't know that.
+ promotion_hint_cb_.Run(PromotionHintAggregator::Hint(bounds, promotion));
+ }
+ } else {
+ // This could be when |CodecImage| is backed by SurfaceTexture but should be
+ // promoted, or when this is backed by either SurfaceView or SurfaceTexture
+ // but should not be promoted.
+ promotion_hint_cb_.Run(PromotionHintAggregator::Hint(bounds, promotion));
+ }
+}
+
void CodecImage::OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd,
uint64_t process_tracing_id,
const std::string& dump_name) {}
@@ -159,14 +175,14 @@ void CodecImage::GetTextureMatrix(float matrix[16]) {
0, 1, 0, 1 //
};
memcpy(matrix, kYInvertedIdentity, sizeof(kYInvertedIdentity));
- if (!texture_owner_)
+ if (!codec_buffer_wait_coordinator_)
return;
// The matrix is available after we render to the front buffer. If that fails
// we'll return the matrix from the previous frame, which is more likely to be
// correct than the identity matrix anyway.
RenderToTextureOwnerFrontBuffer(BindingsMode::kDontRestoreIfBound);
- texture_owner_->GetTransformMatrix(matrix);
+ codec_buffer_wait_coordinator_->texture_owner()->GetTransformMatrix(matrix);
YInvertMatrix(matrix);
}
@@ -175,26 +191,48 @@ void CodecImage::NotifyPromotionHint(bool promotion_hint,
int display_y,
int display_width,
int display_height) {
- // If this is promotable, and we're using an overlay, then skip sending this
- // hint. ScheduleOverlayPlane will do it.
- if (promotion_hint && !texture_owner_)
+ // TODO(crbug.com/1004859): Add back early skip due to suspecting affecting
+ // video smoothness.
+ if (promotion_hint && !codec_buffer_wait_coordinator_)
return;
- promotion_hint_cb_.Run(PromotionHintAggregator::Hint(
- gfx::Rect(display_x, display_y, display_width, display_height),
- promotion_hint));
+ NotifyOverlayPromotion(
+ promotion_hint,
+ gfx::Rect(display_x, display_y, display_width, display_height));
+}
+
+void CodecImage::ReleaseResources() {
+ ReleaseCodecBuffer();
+}
+
+bool CodecImage::IsUsingGpuMemory() const {
+ // Only the images which are bound to texture accounts for gpu memory.
+ return was_tex_image_bound_;
+}
+
+void CodecImage::UpdateAndBindTexImage() {
+ RenderToTextureOwnerFrontBuffer(BindingsMode::kEnsureTexImageBound);
+}
+
+bool CodecImage::HasTextureOwner() const {
+ return !!texture_owner();
+}
+
+gpu::gles2::Texture* CodecImage::GetTexture() const {
+ DCHECK(texture_owner());
+ return gpu::gles2::Texture::CheckedCast(texture_owner()->GetTextureBase());
}
bool CodecImage::RenderToFrontBuffer() {
// This code is used to trigger early rendering of the image before it is used
// for compositing, there is no need to bind the image.
- return texture_owner_
+ return codec_buffer_wait_coordinator_
? RenderToTextureOwnerFrontBuffer(BindingsMode::kRestoreIfBound)
: RenderToOverlay();
}
bool CodecImage::RenderToTextureOwnerBackBuffer() {
- DCHECK(texture_owner_);
+ DCHECK(codec_buffer_wait_coordinator_);
DCHECK_NE(phase_, Phase::kInFrontBuffer);
if (phase_ == Phase::kInBackBuffer)
return true;
@@ -203,19 +241,19 @@ bool CodecImage::RenderToTextureOwnerBackBuffer() {
// Wait for a previous frame available so we don't confuse it with the one
// we're about to release.
- if (texture_owner_->IsExpectingFrameAvailable())
- texture_owner_->WaitForFrameAvailable();
+ if (codec_buffer_wait_coordinator_->IsExpectingFrameAvailable())
+ codec_buffer_wait_coordinator_->WaitForFrameAvailable();
if (!output_buffer_->ReleaseToSurface()) {
phase_ = Phase::kInvalidated;
return false;
}
phase_ = Phase::kInBackBuffer;
- texture_owner_->SetReleaseTimeToNow();
+ codec_buffer_wait_coordinator_->SetReleaseTimeToNow();
return true;
}
bool CodecImage::RenderToTextureOwnerFrontBuffer(BindingsMode bindings_mode) {
- DCHECK(texture_owner_);
+ DCHECK(codec_buffer_wait_coordinator_);
if (phase_ == Phase::kInFrontBuffer) {
EnsureBoundIfNeeded(bindings_mode);
@@ -230,21 +268,23 @@ bool CodecImage::RenderToTextureOwnerFrontBuffer(BindingsMode bindings_mode) {
// The image is now in the back buffer, so promote it to the front buffer.
phase_ = Phase::kInFrontBuffer;
- if (texture_owner_->IsExpectingFrameAvailable())
- texture_owner_->WaitForFrameAvailable();
+ if (codec_buffer_wait_coordinator_->IsExpectingFrameAvailable())
+ codec_buffer_wait_coordinator_->WaitForFrameAvailable();
std::unique_ptr<ui::ScopedMakeCurrent> scoped_make_current =
- MakeCurrentIfNeeded(texture_owner_.get());
+ MakeCurrentIfNeeded(
+ codec_buffer_wait_coordinator_->texture_owner().get());
// If updating the image will implicitly update the texture bindings then
// restore if requested or the update needed a context switch.
bool should_restore_bindings =
- texture_owner_->binds_texture_on_update() &&
+ codec_buffer_wait_coordinator_->texture_owner()
+ ->binds_texture_on_update() &&
(bindings_mode == BindingsMode::kRestoreIfBound || !!scoped_make_current);
GLint bound_service_id = 0;
if (should_restore_bindings)
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &bound_service_id);
- texture_owner_->UpdateTexImage();
+ codec_buffer_wait_coordinator_->texture_owner()->UpdateTexImage();
EnsureBoundIfNeeded(bindings_mode);
if (should_restore_bindings)
glBindTexture(GL_TEXTURE_EXTERNAL_OES, bound_service_id);
@@ -252,15 +292,16 @@ bool CodecImage::RenderToTextureOwnerFrontBuffer(BindingsMode bindings_mode) {
}
void CodecImage::EnsureBoundIfNeeded(BindingsMode mode) {
- DCHECK(texture_owner_);
+ DCHECK(codec_buffer_wait_coordinator_);
- if (texture_owner_->binds_texture_on_update()) {
+ if (codec_buffer_wait_coordinator_->texture_owner()
+ ->binds_texture_on_update()) {
was_tex_image_bound_ = true;
return;
}
if (mode != BindingsMode::kEnsureTexImageBound)
return;
- texture_owner_->EnsureTexImageBound();
+ codec_buffer_wait_coordinator_->texture_owner()->EnsureTexImageBound();
was_tex_image_bound_ = true;
}
@@ -285,10 +326,10 @@ void CodecImage::ReleaseCodecBuffer() {
std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
CodecImage::GetAHardwareBuffer() {
- DCHECK(texture_owner_);
+ DCHECK(codec_buffer_wait_coordinator_);
RenderToTextureOwnerFrontBuffer(BindingsMode::kDontRestoreIfBound);
- return texture_owner_->GetAHardwareBuffer();
+ return codec_buffer_wait_coordinator_->texture_owner()->GetAHardwareBuffer();
}
CodecImageHolder::CodecImageHolder(
diff --git a/chromium/media/gpu/android/codec_image.h b/chromium/media/gpu/android/codec_image.h
index 1525597f84c..39b67808b17 100644
--- a/chromium/media/gpu/android/codec_image.h
+++ b/chromium/media/gpu/android/codec_image.h
@@ -13,9 +13,10 @@
#include "base/macros.h"
#include "base/memory/ref_counted_delete_on_sequence.h"
#include "gpu/command_buffer/service/gl_stream_texture_image.h"
+#include "gpu/command_buffer/service/stream_texture_shared_image_interface.h"
+#include "media/gpu/android/codec_buffer_wait_coordinator.h"
#include "media/gpu/android/codec_wrapper.h"
#include "media/gpu/android/promotion_hint_aggregator.h"
-#include "media/gpu/android/surface_texture_gl_owner.h"
#include "media/gpu/media_gpu_export.h"
namespace base {
@@ -28,7 +29,8 @@ namespace media {
// A GLImage that renders MediaCodec buffers to a TextureOwner or overlay
// as needed in order to draw them.
-class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
+class MEDIA_GPU_EXPORT CodecImage
+ : public gpu::StreamTextureSharedImageInterface {
public:
// Callback to notify that a codec image is now unused in the sense of not
// being out for display. This lets us signal interested folks once a video
@@ -52,7 +54,7 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
// not in use.
void Initialize(
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<TextureOwner> texture_owner,
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb);
void SetNowUnusedCB(NowUnusedCB now_unused_cb);
@@ -68,6 +70,9 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
bool CopyTexSubImage(unsigned target,
const gfx::Point& offset,
const gfx::Rect& rect) override;
+ // Currently this API is depended on the implementation of
+ // NotifyOverlayPromotion. since we expect overlay to use SharedImage in the
+ // future.
bool ScheduleOverlayPlane(gfx::AcceleratedWidget widget,
int z_order,
gfx::OverlayTransform transform,
@@ -84,12 +89,25 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
GetAHardwareBuffer() override;
// gpu::gles2::GLStreamTextureMatrix implementation
void GetTextureMatrix(float xform[16]) override;
+ // Currently this API is implemented by the NotifyOverlayPromotion, since this
+ // API is expected to be removed.
void NotifyPromotionHint(bool promotion_hint,
int display_x,
int display_y,
int display_width,
int display_height) override;
+ // gpu::StreamTextureSharedImageInterface implementation.
+ void ReleaseResources() override;
+ bool IsUsingGpuMemory() const override;
+ void UpdateAndBindTexImage() override;
+ bool HasTextureOwner() const override;
+ gpu::gles2::Texture* GetTexture() const override;
+ void NotifyOverlayPromotion(bool promotion, const gfx::Rect& bounds) override;
+ // Renders this image to the overlay. Returns true if the buffer is in the
+ // overlay front buffer. Returns false if the buffer was invalidated.
+ bool RenderToOverlay() override;
+
// Whether the codec buffer has been rendered to the front buffer.
bool was_rendered_to_front_buffer() const {
return phase_ == Phase::kInFrontBuffer;
@@ -100,9 +118,19 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
bool was_tex_image_bound() const { return was_tex_image_bound_; }
// Whether this image is backed by a texture owner.
- bool is_texture_owner_backed() const { return !!texture_owner_; }
+ // We want to check for texture_owner owned by
+ // |codec_buffer_wait_coordinator_| and hence only checking for
+ // |codec_buffer_wait_coordinator_| is enough here.
+ // TODO(vikassoni): Update the method name in future refactorings.
+ bool is_texture_owner_backed() const {
+ return !!codec_buffer_wait_coordinator_;
+ }
- scoped_refptr<TextureOwner> texture_owner() const { return texture_owner_; }
+ scoped_refptr<gpu::TextureOwner> texture_owner() const {
+ return codec_buffer_wait_coordinator_
+ ? codec_buffer_wait_coordinator_->texture_owner()
+ : nullptr;
+ }
// Renders this image to the front buffer of its backing surface.
// Returns true if the buffer is in the front buffer. Returns false if the
@@ -134,35 +162,18 @@ class MEDIA_GPU_EXPORT CodecImage : public gpu::gles2::GLStreamTextureImage {
// Renders this image to the texture owner front buffer by first rendering
// it to the back buffer if it's not already there, and then waiting for the
// frame available event before calling UpdateTexImage().
- enum class BindingsMode {
- // Ensures that the TextureOwner's texture is bound to the latest image, if
- // it requires explicit binding.
- kEnsureTexImageBound,
-
- // Updates the current image but does not bind it. If updating the image
- // implicitly binds the texture, the current bindings will be restored.
- kRestoreIfBound,
-
- // Updates the current image but does not bind it. If updating the image
- // implicitly binds the texture, the current bindings will not be restored.
- kDontRestoreIfBound
- };
bool RenderToTextureOwnerFrontBuffer(BindingsMode bindings_mode);
void EnsureBoundIfNeeded(BindingsMode mode);
- // Renders this image to the overlay. Returns true if the buffer is in the
- // overlay front buffer. Returns false if the buffer was invalidated.
- bool RenderToOverlay();
-
// The phase of the image buffer's lifecycle.
Phase phase_ = Phase::kInvalidated;
// The buffer backing this image.
std::unique_ptr<CodecOutputBuffer> output_buffer_;
- // The TextureOwner that |output_buffer_| will be rendered to. Or null, if
- // this image is backed by an overlay.
- scoped_refptr<TextureOwner> texture_owner_;
+ // The CodecBufferWaitCoordinator that |output_buffer_| will be rendered to.
+ // Or null, if this image is backed by an overlay.
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator_;
// The bounds last sent to the overlay.
gfx::Rect most_recent_bounds_;
diff --git a/chromium/media/gpu/android/codec_image_group.cc b/chromium/media/gpu/android/codec_image_group.cc
index e84d8f82633..6e1979e37a2 100644
--- a/chromium/media/gpu/android/codec_image_group.cc
+++ b/chromium/media/gpu/android/codec_image_group.cc
@@ -13,7 +13,8 @@ namespace media {
CodecImageGroup::CodecImageGroup(
scoped_refptr<base::SequencedTaskRunner> task_runner,
scoped_refptr<CodecSurfaceBundle> surface_bundle)
- : surface_bundle_(std::move(surface_bundle)), weak_this_factory_(this) {
+ : surface_bundle_(std::move(surface_bundle)),
+ task_runner_(std::move(task_runner)) {
// If the surface bundle has an overlay, then register for destruction
// callbacks. We thread-hop to the right thread, which means that we might
// find out about destruction asynchronously. Remember that the wp will be
@@ -26,7 +27,7 @@ CodecImageGroup::CodecImageGroup(
task_runner->PostTask(FROM_HERE,
base::BindOnce(std::move(cb), overlay));
},
- std::move(task_runner),
+ task_runner_,
base::BindOnce(&CodecImageGroup::OnSurfaceDestroyed,
weak_this_factory_.GetWeakPtr())));
}
@@ -36,9 +37,13 @@ CodecImageGroup::CodecImageGroup(
// adding a new image.
}
-CodecImageGroup::~CodecImageGroup() {}
+CodecImageGroup::~CodecImageGroup() {
+ CHECK(task_runner_->RunsTasksInCurrentSequence());
+}
void CodecImageGroup::AddCodecImage(CodecImage* image) {
+ // Temporary: crbug.com/986783 .
+ CHECK(task_runner_->RunsTasksInCurrentSequence());
// If somebody adds an image after the surface has been destroyed, fail the
// image immediately. This can happen due to thread hopping.
if (!surface_bundle_) {
@@ -56,16 +61,22 @@ void CodecImageGroup::AddCodecImage(CodecImage* image) {
}
void CodecImageGroup::RemoveCodecImage(CodecImage* image) {
+ // Temporary: crbug.com/986783 .
+ CHECK(task_runner_->RunsTasksInCurrentSequence());
images_.erase(image);
// Clear the destruction CB, since it has a strong ref to us.
image->SetDestructionCB(CodecImage::DestructionCB());
}
void CodecImageGroup::OnCodecImageDestroyed(CodecImage* image) {
+ // Temporary: crbug.com/986783 .
+ CHECK(task_runner_->RunsTasksInCurrentSequence());
images_.erase(image);
}
void CodecImageGroup::OnSurfaceDestroyed(AndroidOverlay* overlay) {
+ // Temporary: crbug.com/986783 .
+ CHECK(task_runner_->RunsTasksInCurrentSequence());
// Release any codec buffer, so that the image doesn't try to render to the
// overlay. If it already did, that's fine.
for (CodecImage* image : images_)
diff --git a/chromium/media/gpu/android/codec_image_group.h b/chromium/media/gpu/android/codec_image_group.h
index ad0336fa613..a595aaca6b6 100644
--- a/chromium/media/gpu/android/codec_image_group.h
+++ b/chromium/media/gpu/android/codec_image_group.h
@@ -68,7 +68,10 @@ class MEDIA_GPU_EXPORT CodecImageGroup
// All the images that use |surface_bundle_|.
std::unordered_set<CodecImage*> images_;
- base::WeakPtrFactory<CodecImageGroup> weak_this_factory_;
+ // Task runner for everything.
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+
+ base::WeakPtrFactory<CodecImageGroup> weak_this_factory_{this};
};
} // namespace media
diff --git a/chromium/media/gpu/android/codec_image_group_unittest.cc b/chromium/media/gpu/android/codec_image_group_unittest.cc
index 788ed2b9475..6b02e977b44 100644
--- a/chromium/media/gpu/android/codec_image_group_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_group_unittest.cc
@@ -8,7 +8,7 @@
#include "base/callback.h"
#include "base/memory/ref_counted.h"
#include "base/sequenced_task_runner.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_simple_task_runner.h"
#include "base/threading/thread.h"
#include "media/base/android/mock_android_overlay.h"
@@ -79,7 +79,7 @@ class CodecImageGroupTest : public testing::Test {
// Handy method to check that CodecImage destruction is relayed properly.
MOCK_METHOD1(OnCodecImageDestroyed, void(CodecImage*));
- base::test::ScopedTaskEnvironment env_;
+ base::test::TaskEnvironment env_;
// Our thread is the mcvd thread. This is the task runner for the gpu thread.
scoped_refptr<base::TestSimpleTaskRunner> gpu_task_runner_;
diff --git a/chromium/media/gpu/android/codec_image_unittest.cc b/chromium/media/gpu/android/codec_image_unittest.cc
index 7e3686ee774..3bf7bcdfbf8 100644
--- a/chromium/media/gpu/android/codec_image_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_unittest.cc
@@ -8,14 +8,15 @@
#include "base/bind_helpers.h"
#include "base/logging.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "gpu/command_buffer/service/texture_manager.h"
+#include "gpu/ipc/common/android/mock_abstract_texture.h"
+#include "gpu/ipc/common/android/mock_texture_owner.h"
#include "media/base/android/media_codec_bridge.h"
#include "media/base/android/mock_media_codec_bridge.h"
#include "media/gpu/android/codec_image.h"
-#include "media/gpu/android/mock_abstract_texture.h"
-#include "media/gpu/android/mock_texture_owner.h"
+#include "media/gpu/android/mock_codec_buffer_wait_coordinator.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/geometry/rect.h"
@@ -61,8 +62,11 @@ class CodecImageTest : public testing::Test {
// The tests rely on this texture being bound.
glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id_);
- texture_owner_ = new NiceMock<MockTextureOwner>(
+ auto texture_owner = base::MakeRefCounted<NiceMock<gpu::MockTextureOwner>>(
texture_id_, context_.get(), surface_.get(), BindsTextureOnUpdate());
+ codec_buffer_wait_coordinator_ =
+ base::MakeRefCounted<NiceMock<MockCodecBufferWaitCoordinator>>(
+ std::move(texture_owner));
}
void TearDown() override {
@@ -83,7 +87,8 @@ class CodecImageTest : public testing::Test {
wrapper_->DequeueOutputBuffer(nullptr, nullptr, &buffer);
scoped_refptr<CodecImage> image = new CodecImage();
image->Initialize(
- std::move(buffer), kind == kTextureOwner ? texture_owner_ : nullptr,
+ std::move(buffer),
+ kind == kTextureOwner ? codec_buffer_wait_coordinator_ : nullptr,
base::BindRepeating(&PromotionHintReceiver::OnPromotionHint,
base::Unretained(&promotion_hint_receiver_)));
@@ -93,10 +98,11 @@ class CodecImageTest : public testing::Test {
virtual bool BindsTextureOnUpdate() { return true; }
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NiceMock<MockMediaCodecBridge>* codec_;
std::unique_ptr<CodecWrapper> wrapper_;
- scoped_refptr<NiceMock<MockTextureOwner>> texture_owner_;
+ scoped_refptr<NiceMock<MockCodecBufferWaitCoordinator>>
+ codec_buffer_wait_coordinator_;
scoped_refptr<gl::GLContext> context_;
scoped_refptr<gl::GLShareGroup> share_group_;
scoped_refptr<gl::GLSurface> surface_;
@@ -170,8 +176,9 @@ TEST_F(CodecImageTest, CopyTexImageTriggersFrontBufferRendering) {
// Verify that the release comes before the wait.
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
- EXPECT_CALL(*texture_owner_, UpdateTexImage());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, WaitForFrameAvailable());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage());
i->CopyTexImage(GL_TEXTURE_EXTERNAL_OES);
ASSERT_TRUE(i->was_rendered_to_front_buffer());
}
@@ -181,9 +188,11 @@ TEST_F(CodecImageTestExplicitBind, CopyTexImageTriggersFrontBufferRendering) {
// Verify that the release comes before the wait.
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
- EXPECT_CALL(*texture_owner_, UpdateTexImage());
- EXPECT_CALL(*texture_owner_, EnsureTexImageBound());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, WaitForFrameAvailable());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ EnsureTexImageBound());
i->CopyTexImage(GL_TEXTURE_EXTERNAL_OES);
ASSERT_TRUE(i->was_rendered_to_front_buffer());
}
@@ -192,9 +201,11 @@ TEST_F(CodecImageTest, GetTextureMatrixTriggersFrontBufferRendering) {
auto i = NewImage(kTextureOwner);
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
- EXPECT_CALL(*texture_owner_, UpdateTexImage());
- EXPECT_CALL(*texture_owner_, GetTransformMatrix(_));
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, WaitForFrameAvailable());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ GetTransformMatrix(_));
float matrix[16];
i->GetTextureMatrix(matrix);
ASSERT_TRUE(i->was_rendered_to_front_buffer());
@@ -203,14 +214,17 @@ TEST_F(CodecImageTest, GetTextureMatrixTriggersFrontBufferRendering) {
TEST_F(CodecImageTestExplicitBind,
GetTextureMatrixTriggersFrontBufferRendering) {
// GetTextureMatrix should not bind the image.
- texture_owner_->expect_update_tex_image = false;
+ codec_buffer_wait_coordinator_->texture_owner()->expect_update_tex_image =
+ false;
auto i = NewImage(kTextureOwner);
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
- EXPECT_CALL(*texture_owner_, UpdateTexImage());
- EXPECT_CALL(*texture_owner_, GetTransformMatrix(_));
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, WaitForFrameAvailable());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ GetTransformMatrix(_));
float matrix[16];
i->GetTextureMatrix(matrix);
ASSERT_TRUE(i->was_rendered_to_front_buffer());
@@ -254,16 +268,17 @@ TEST_F(CodecImageTest, RenderToBackBufferDoesntWait) {
auto i = NewImage(kTextureOwner);
InSequence s;
EXPECT_CALL(*codec_, ReleaseOutputBuffer(_, true));
- EXPECT_CALL(*texture_owner_, SetReleaseTimeToNow());
- EXPECT_CALL(*texture_owner_, WaitForFrameAvailable()).Times(0);
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, SetReleaseTimeToNow());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, WaitForFrameAvailable())
+ .Times(0);
ASSERT_TRUE(i->RenderToTextureOwnerBackBuffer());
}
TEST_F(CodecImageTest, PromotingTheBackBufferWaits) {
auto i = NewImage(kTextureOwner);
- EXPECT_CALL(*texture_owner_, SetReleaseTimeToNow()).Times(1);
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, SetReleaseTimeToNow()).Times(1);
i->RenderToTextureOwnerBackBuffer();
- EXPECT_CALL(*texture_owner_, WaitForFrameAvailable());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_, WaitForFrameAvailable());
ASSERT_TRUE(i->RenderToFrontBuffer());
}
@@ -288,7 +303,8 @@ TEST_F(CodecImageTest, RenderToFrontBufferRestoresTextureBindings) {
glGenTextures(1, &pre_bound_texture);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, pre_bound_texture);
auto i = NewImage(kTextureOwner);
- EXPECT_CALL(*texture_owner_, UpdateTexImage());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage());
i->RenderToFrontBuffer();
GLint post_bound_texture = 0;
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &post_bound_texture);
@@ -296,13 +312,15 @@ TEST_F(CodecImageTest, RenderToFrontBufferRestoresTextureBindings) {
}
TEST_F(CodecImageTestExplicitBind, RenderToFrontBufferDoesNotBindTexture) {
- texture_owner_->expect_update_tex_image = false;
+ codec_buffer_wait_coordinator_->texture_owner()->expect_update_tex_image =
+ false;
GLuint pre_bound_texture = 0;
glGenTextures(1, &pre_bound_texture);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, pre_bound_texture);
auto i = NewImage(kTextureOwner);
- EXPECT_CALL(*texture_owner_, UpdateTexImage());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage());
i->RenderToFrontBuffer();
GLint post_bound_texture = 0;
glGetIntegerv(GL_TEXTURE_BINDING_EXTERNAL_OES, &post_bound_texture);
@@ -321,9 +339,10 @@ TEST_F(CodecImageTest, RenderToFrontBufferRestoresGLContext) {
auto i = NewImage(kTextureOwner);
// Our context should not be current when UpdateTexImage() is called.
- EXPECT_CALL(*texture_owner_, UpdateTexImage()).WillOnce(Invoke([&]() {
- ASSERT_FALSE(context->IsCurrent(surface.get()));
- }));
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage())
+ .WillOnce(
+ Invoke([&]() { ASSERT_FALSE(context->IsCurrent(surface.get())); }));
i->RenderToFrontBuffer();
// Our context should have been restored.
ASSERT_TRUE(context->IsCurrent(surface.get()));
@@ -353,12 +372,17 @@ TEST_F(CodecImageTest, ScheduleOverlayPlaneDoesntSendDuplicateHints) {
TEST_F(CodecImageTest, GetAHardwareBuffer) {
auto i = NewImage(kTextureOwner);
- EXPECT_EQ(texture_owner_->get_a_hardware_buffer_count, 0);
+ EXPECT_EQ(codec_buffer_wait_coordinator_->texture_owner()
+ ->get_a_hardware_buffer_count,
+ 0);
EXPECT_FALSE(i->was_rendered_to_front_buffer());
- EXPECT_CALL(*texture_owner_, UpdateTexImage());
+ EXPECT_CALL(*codec_buffer_wait_coordinator_->texture_owner(),
+ UpdateTexImage());
i->GetAHardwareBuffer();
- EXPECT_EQ(texture_owner_->get_a_hardware_buffer_count, 1);
+ EXPECT_EQ(codec_buffer_wait_coordinator_->texture_owner()
+ ->get_a_hardware_buffer_count,
+ 1);
EXPECT_TRUE(i->was_rendered_to_front_buffer());
}
diff --git a/chromium/media/gpu/android/codec_surface_bundle.cc b/chromium/media/gpu/android/codec_surface_bundle.cc
index 16093ae4ec0..616d66f85ac 100644
--- a/chromium/media/gpu/android/codec_surface_bundle.cc
+++ b/chromium/media/gpu/android/codec_surface_bundle.cc
@@ -12,22 +12,22 @@ namespace media {
CodecSurfaceBundle::CodecSurfaceBundle()
: RefCountedDeleteOnSequence<CodecSurfaceBundle>(
- base::SequencedTaskRunnerHandle::Get()),
- weak_factory_(this) {}
+ base::SequencedTaskRunnerHandle::Get()) {}
CodecSurfaceBundle::CodecSurfaceBundle(std::unique_ptr<AndroidOverlay> overlay)
: RefCountedDeleteOnSequence<CodecSurfaceBundle>(
base::SequencedTaskRunnerHandle::Get()),
- overlay_(std::move(overlay)),
- weak_factory_(this) {}
+ overlay_(std::move(overlay)) {}
CodecSurfaceBundle::CodecSurfaceBundle(
- scoped_refptr<TextureOwner> texture_owner)
+ scoped_refptr<gpu::TextureOwner> texture_owner)
: RefCountedDeleteOnSequence<CodecSurfaceBundle>(
base::SequencedTaskRunnerHandle::Get()),
- texture_owner_(std::move(texture_owner)),
- texture_owner_surface_(texture_owner_->CreateJavaSurface()),
- weak_factory_(this) {}
+ codec_buffer_wait_coordinator_(
+ base::MakeRefCounted<CodecBufferWaitCoordinator>(
+ std::move(texture_owner))),
+ texture_owner_surface_(codec_buffer_wait_coordinator_->texture_owner()
+ ->CreateJavaSurface()) {}
CodecSurfaceBundle::~CodecSurfaceBundle() {
// Explicitly free the surface first, just to be sure that it's deleted before
@@ -35,16 +35,18 @@ CodecSurfaceBundle::~CodecSurfaceBundle() {
texture_owner_surface_ = gl::ScopedJavaSurface();
// Also release the back buffers.
- if (!texture_owner_)
+ if (!codec_buffer_wait_coordinator_)
return;
- auto task_runner = texture_owner_->task_runner();
+ auto task_runner =
+ codec_buffer_wait_coordinator_->texture_owner()->task_runner();
if (task_runner->RunsTasksInCurrentSequence()) {
- texture_owner_->ReleaseBackBuffers();
+ codec_buffer_wait_coordinator_->texture_owner()->ReleaseBackBuffers();
} else {
task_runner->PostTask(
FROM_HERE,
- base::BindRepeating(&TextureOwner::ReleaseBackBuffers, texture_owner_));
+ base::BindRepeating(&gpu::TextureOwner::ReleaseBackBuffers,
+ codec_buffer_wait_coordinator_->texture_owner()));
}
}
diff --git a/chromium/media/gpu/android/codec_surface_bundle.h b/chromium/media/gpu/android/codec_surface_bundle.h
index a8ee763bca4..86a3ad0ee03 100644
--- a/chromium/media/gpu/android/codec_surface_bundle.h
+++ b/chromium/media/gpu/android/codec_surface_bundle.h
@@ -6,8 +6,9 @@
#define MEDIA_GPU_ANDROID_CODEC_SURFACE_BUNDLE_H_
#include "base/memory/ref_counted_delete_on_sequence.h"
+#include "gpu/ipc/common/android/texture_owner.h"
#include "media/base/android/android_overlay.h"
-#include "media/gpu/android/surface_texture_gl_owner.h"
+#include "media/gpu/android/codec_buffer_wait_coordinator.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gl/android/scoped_java_surface.h"
@@ -27,7 +28,7 @@ class MEDIA_GPU_EXPORT CodecSurfaceBundle
// Create an empty bundle to be manually populated.
CodecSurfaceBundle();
explicit CodecSurfaceBundle(std::unique_ptr<AndroidOverlay> overlay);
- explicit CodecSurfaceBundle(scoped_refptr<TextureOwner> texture_owner);
+ explicit CodecSurfaceBundle(scoped_refptr<gpu::TextureOwner> texture_owner);
const base::android::JavaRef<jobject>& GetJavaSurface() const;
@@ -36,7 +37,10 @@ class MEDIA_GPU_EXPORT CodecSurfaceBundle
// |this|; the cb will do nothing if |this| is destroyed.
ScheduleLayoutCB GetScheduleLayoutCB();
- scoped_refptr<TextureOwner> texture_owner() const { return texture_owner_; }
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator()
+ const {
+ return codec_buffer_wait_coordinator_;
+ }
AndroidOverlay* overlay() const { return overlay_.get(); }
private:
@@ -49,15 +53,16 @@ class MEDIA_GPU_EXPORT CodecSurfaceBundle
// The Overlay or TextureOwner.
std::unique_ptr<AndroidOverlay> overlay_;
- scoped_refptr<TextureOwner> texture_owner_;
+ // |codec_buffer_wait_coordinator_| owns the TextureOwner.
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator_;
- // The Java surface for |texture_owner_|.
+ // The Java surface for |codec_buffer_wait_coordinator_|'s TextureOwner.
gl::ScopedJavaSurface texture_owner_surface_;
// The last updated layout rect position for the |overlay|.
gfx::Rect layout_rect_;
- base::WeakPtrFactory<CodecSurfaceBundle> weak_factory_;
+ base::WeakPtrFactory<CodecSurfaceBundle> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(CodecSurfaceBundle);
};
diff --git a/chromium/media/gpu/android/codec_wrapper_unittest.cc b/chromium/media/gpu/android/codec_wrapper_unittest.cc
index 2abcd6c459a..fd30d7f07d1 100644
--- a/chromium/media/gpu/android/codec_wrapper_unittest.cc
+++ b/chromium/media/gpu/android/codec_wrapper_unittest.cc
@@ -9,7 +9,7 @@
#include "base/memory/ref_counted.h"
#include "base/synchronization/waitable_event.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "base/threading/thread.h"
#include "media/base/android/media_codec_bridge.h"
@@ -66,7 +66,7 @@ class CodecWrapperTest : public testing::Test {
}
// So that we can get the thread's task runner.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
NiceMock<MockMediaCodecBridge>* codec_;
std::unique_ptr<CodecWrapper> wrapper_;
diff --git a/chromium/media/gpu/android/direct_shared_image_video_provider.cc b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
index e50dc1b44d2..5620d53e63f 100644
--- a/chromium/media/gpu/android/direct_shared_image_video_provider.cc
+++ b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
@@ -18,13 +18,13 @@
#include "gpu/command_buffer/service/abstract_texture.h"
#include "gpu/command_buffer/service/mailbox_manager.h"
#include "gpu/command_buffer/service/shared_image_factory.h"
+#include "gpu/command_buffer/service/shared_image_video.h"
#include "gpu/command_buffer/service/texture_manager.h"
#include "gpu/ipc/service/command_buffer_stub.h"
#include "gpu/ipc/service/gpu_channel.h"
#include "gpu/ipc/service/gpu_channel_manager.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
-#include "media/gpu/android/shared_image_video.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/scoped_make_current.h"
@@ -73,7 +73,7 @@ void DirectSharedImageVideoProvider::Initialize(GpuInitCB gpu_init_cb) {
void DirectSharedImageVideoProvider::RequestImage(
ImageReadyCB cb,
const ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner) {
+ scoped_refptr<gpu::TextureOwner> texture_owner) {
// It's unclear that we should handle the image group, but since CodecImages
// have to be registered on it, we do. If the CodecImage is ever re-used,
// then part of that re-use would be to call the (then mis-named)
@@ -89,8 +89,7 @@ void DirectSharedImageVideoProvider::RequestImage(
}
GpuSharedImageVideoFactory::GpuSharedImageVideoFactory(
- SharedImageVideoProvider::GetStubCB get_stub_cb)
- : weak_factory_(this) {
+ SharedImageVideoProvider::GetStubCB get_stub_cb) {
DETACH_FROM_THREAD(thread_checker_);
stub_ = get_stub_cb.Run();
if (stub_)
@@ -142,7 +141,7 @@ void GpuSharedImageVideoFactory::Initialize(
void GpuSharedImageVideoFactory::CreateImage(
FactoryImageReadyCB image_ready_cb,
const SharedImageVideoProvider::ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner) {
+ scoped_refptr<gpu::TextureOwner> texture_owner) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// Generate a shared image mailbox.
@@ -187,7 +186,7 @@ void GpuSharedImageVideoFactory::CreateImage(
bool GpuSharedImageVideoFactory::CreateImageInternal(
const SharedImageVideoProvider::ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner,
+ scoped_refptr<gpu::TextureOwner> texture_owner,
gpu::Mailbox mailbox,
scoped_refptr<CodecImage> image) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -239,8 +238,8 @@ bool GpuSharedImageVideoFactory::CreateImageInternal(
// colorspace and wire it here.
// TODO(vikassoni): This shared image need to be thread safe eventually for
// webview to work with shared images.
- auto shared_image = std::make_unique<SharedImageVideo>(
- mailbox, gfx::ColorSpace::CreateSRGB(), std::move(image),
+ auto shared_image = std::make_unique<gpu::SharedImageVideo>(
+ mailbox, size, gfx::ColorSpace::CreateSRGB(), std::move(image),
std::move(texture), std::move(shared_context),
false /* is_thread_safe */);
diff --git a/chromium/media/gpu/android/direct_shared_image_video_provider.h b/chromium/media/gpu/android/direct_shared_image_video_provider.h
index ff0a08d2001..7225062b999 100644
--- a/chromium/media/gpu/android/direct_shared_image_video_provider.h
+++ b/chromium/media/gpu/android/direct_shared_image_video_provider.h
@@ -14,13 +14,13 @@
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/command_buffer/service/shared_image_representation.h"
#include "gpu/command_buffer/service/texture_manager.h"
+#include "gpu/ipc/common/android/texture_owner.h"
#include "gpu/ipc/common/vulkan_ycbcr_info.h"
#include "gpu/ipc/service/command_buffer_stub.h"
#include "media/base/video_frame.h"
#include "media/gpu/android/codec_image.h"
#include "media/gpu/android/maybe_render_early_manager.h"
#include "media/gpu/android/shared_image_video_provider.h"
-#include "media/gpu/android/surface_texture_gl_owner.h"
#include "media/gpu/android/video_frame_factory.h"
#include "media/gpu/gles2_decoder_helper.h"
#include "media/gpu/media_gpu_export.h"
@@ -43,7 +43,7 @@ class MEDIA_GPU_EXPORT DirectSharedImageVideoProvider
void Initialize(GpuInitCB get_stub_cb) override;
void RequestImage(ImageReadyCB cb,
const ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner) override;
+ scoped_refptr<gpu::TextureOwner> texture_owner) override;
private:
base::SequenceBound<GpuSharedImageVideoFactory> gpu_factory_;
@@ -80,12 +80,12 @@ class GpuSharedImageVideoFactory
// mailbox support, where we have to have one texture per CodecImage.
void CreateImage(FactoryImageReadyCB cb,
const SharedImageVideoProvider::ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner);
+ scoped_refptr<gpu::TextureOwner> texture_owner);
private:
// Creates a SharedImage for |mailbox|, and returns success or failure.
bool CreateImageInternal(const SharedImageVideoProvider::ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner,
+ scoped_refptr<gpu::TextureOwner> texture_owner,
gpu::Mailbox mailbox,
scoped_refptr<CodecImage> image);
@@ -102,7 +102,7 @@ class GpuSharedImageVideoFactory
THREAD_CHECKER(thread_checker_);
- base::WeakPtrFactory<GpuSharedImageVideoFactory> weak_factory_;
+ base::WeakPtrFactory<GpuSharedImageVideoFactory> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(GpuSharedImageVideoFactory);
};
diff --git a/chromium/media/gpu/android/image_reader_gl_owner.cc b/chromium/media/gpu/android/image_reader_gl_owner.cc
deleted file mode 100644
index e2d764f8b98..00000000000
--- a/chromium/media/gpu/android/image_reader_gl_owner.cc
+++ /dev/null
@@ -1,485 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/image_reader_gl_owner.h"
-
-#include <android/native_window_jni.h>
-#include <jni.h>
-#include <stdint.h>
-
-#include "base/android/jni_android.h"
-#include "base/android/scoped_hardware_buffer_fence_sync.h"
-#include "base/logging.h"
-#include "base/memory/ptr_util.h"
-#include "base/metrics/histogram_functions.h"
-#include "base/metrics/histogram_macros.h"
-#include "base/posix/eintr_wrapper.h"
-#include "base/synchronization/waitable_event.h"
-#include "base/threading/thread_task_runner_handle.h"
-#include "gpu/command_buffer/service/abstract_texture.h"
-#include "gpu/ipc/common/android/android_image_reader_utils.h"
-#include "ui/gl/android/android_surface_control_compat.h"
-#include "ui/gl/gl_fence_android_native_fence_sync.h"
-#include "ui/gl/gl_utils.h"
-#include "ui/gl/scoped_binders.h"
-#include "ui/gl/scoped_make_current.h"
-
-namespace media {
-
-namespace {
-bool IsSurfaceControl(TextureOwner::Mode mode) {
- switch (mode) {
- case TextureOwner::Mode::kAImageReaderInsecureSurfaceControl:
- case TextureOwner::Mode::kAImageReaderSecureSurfaceControl:
- return true;
- case TextureOwner::Mode::kAImageReaderInsecure:
- return false;
- case TextureOwner::Mode::kSurfaceTextureInsecure:
- NOTREACHED();
- return false;
- }
- NOTREACHED();
- return false;
-}
-} // namespace
-
-// FrameAvailableEvent_ImageReader is a RefCounted wrapper for a WaitableEvent
-// (it's not possible to put one in RefCountedData). This lets us safely signal
-// an event on any thread.
-struct FrameAvailableEvent_ImageReader
- : public base::RefCountedThreadSafe<FrameAvailableEvent_ImageReader> {
- FrameAvailableEvent_ImageReader()
- : event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED) {}
- void Signal() { event.Signal(); }
- base::WaitableEvent event;
-
- // This callback function will be called when there is a new image available
- // for in the image reader's queue.
- static void CallbackSignal(void* context, AImageReader* reader) {
- (reinterpret_cast<FrameAvailableEvent_ImageReader*>(context))->Signal();
- }
-
- private:
- friend class RefCountedThreadSafe<FrameAvailableEvent_ImageReader>;
-
- ~FrameAvailableEvent_ImageReader() = default;
-};
-
-class ImageReaderGLOwner::ScopedHardwareBufferImpl
- : public base::android::ScopedHardwareBufferFenceSync {
- public:
- ScopedHardwareBufferImpl(scoped_refptr<ImageReaderGLOwner> texture_owner,
- AImage* image,
- base::android::ScopedHardwareBufferHandle handle,
- base::ScopedFD fence_fd)
- : base::android::ScopedHardwareBufferFenceSync(std::move(handle),
- std::move(fence_fd)),
- texture_owner_(std::move(texture_owner)),
- image_(image) {
- DCHECK(image_);
- texture_owner_->RegisterRefOnImage(image_);
- }
- ~ScopedHardwareBufferImpl() override {
- texture_owner_->ReleaseRefOnImage(image_, std::move(read_fence_));
- }
-
- void SetReadFence(base::ScopedFD fence_fd, bool has_context) final {
- // Client can call this method multiple times for a hardware buffer. Hence
- // all the client provided sync_fd should be merged. Eg: BeginReadAccess()
- // can be called multiple times for a SharedImageVideo representation.
- read_fence_ = gl::MergeFDs(std::move(read_fence_), std::move(fence_fd));
- }
-
- private:
- base::ScopedFD read_fence_;
- scoped_refptr<ImageReaderGLOwner> texture_owner_;
- AImage* image_;
-};
-
-ImageReaderGLOwner::ImageReaderGLOwner(
- std::unique_ptr<gpu::gles2::AbstractTexture> texture,
- Mode mode)
- : TextureOwner(false /* binds_texture_on_image_update */,
- std::move(texture)),
- loader_(base::android::AndroidImageReader::GetInstance()),
- context_(gl::GLContext::GetCurrent()),
- surface_(gl::GLSurface::GetCurrent()),
- frame_available_event_(new FrameAvailableEvent_ImageReader()) {
- DCHECK(context_);
- DCHECK(surface_);
-
- // Set the width, height and format to some default value. This parameters
- // are/maybe overriden by the producer sending buffers to this imageReader's
- // Surface.
- int32_t width = 1, height = 1;
-
- // This should be as small as possible to limit the memory usage.
- // ImageReader needs 2 images to mimic the behavior of SurfaceTexture. For
- // SurfaceControl we need 3 images instead of 2 since 1 frame(and hence image
- // associated with it) will be with system compositor and 2 frames will be in
- // flight. Also note that we always acquire an image before deleting the
- // previous acquired image. This causes 2 acquired images to be in flight at
- // the image acquisition point until the previous image is deleted.
- max_images_ = IsSurfaceControl(mode) ? 3 : 2;
- AIMAGE_FORMATS format = mode == Mode::kAImageReaderSecureSurfaceControl
- ? AIMAGE_FORMAT_PRIVATE
- : AIMAGE_FORMAT_YUV_420_888;
- AImageReader* reader = nullptr;
-
- // The usage flag below should be used when the buffer will be read from by
- // the GPU as a texture.
- uint64_t usage = mode == Mode::kAImageReaderSecureSurfaceControl
- ? AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT
- : AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
- usage |= gl::SurfaceControl::RequiredUsage();
-
- // Create a new reader for images of the desired size and format.
- media_status_t return_code = loader_.AImageReader_newWithUsage(
- width, height, format, usage, max_images_, &reader);
- if (return_code != AMEDIA_OK) {
- LOG(ERROR) << " Image reader creation failed.";
- if (return_code == AMEDIA_ERROR_INVALID_PARAMETER)
- LOG(ERROR) << "Either reader is NULL, or one or more of width, height, "
- "format, maxImages arguments is not supported";
- else
- LOG(ERROR) << "unknown error";
- return;
- }
- DCHECK(reader);
- image_reader_ = reader;
-
- // Create a new Image Listner.
- listener_ = std::make_unique<AImageReader_ImageListener>();
- listener_->context = reinterpret_cast<void*>(frame_available_event_.get());
- listener_->onImageAvailable =
- &FrameAvailableEvent_ImageReader::CallbackSignal;
-
- // Set the onImageAvailable listener of this image reader.
- if (loader_.AImageReader_setImageListener(image_reader_, listener_.get()) !=
- AMEDIA_OK) {
- LOG(ERROR) << " Failed to register AImageReader listener";
- return;
- }
-}
-
-ImageReaderGLOwner::~ImageReaderGLOwner() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- // Clear the texture before we return, so that it can OnTextureDestroyed() if
- // it hasn't already. This will do nothing if it has already been destroyed.
- ClearAbstractTexture();
-
- DCHECK_EQ(image_refs_.size(), 0u);
-}
-
-void ImageReaderGLOwner::OnTextureDestroyed(gpu::gles2::AbstractTexture*) {
- // The AbstractTexture is being destroyed. This can happen if, for example,
- // the video decoder's gl context is lost. Remember that the platform texture
- // might not be gone; it's possible for the gl decoder (and AbstractTexture)
- // to be destroyed via, e.g., renderer crash, but the platform texture is
- // still shared with some other gl context.
-
- // This should only be called once. Note that even during construction,
- // there's a check that |image_reader_| is constructed. Otherwise, errors
- // during init might cause us to get here without an image reader.
- DCHECK(image_reader_);
-
- // Now we can stop listening to new images.
- loader_.AImageReader_setImageListener(image_reader_, NULL);
-
- // Delete all images before closing the associated image reader.
- for (auto& image_ref : image_refs_)
- loader_.AImage_delete(image_ref.first);
-
- // Delete the image reader.
- loader_.AImageReader_delete(image_reader_);
- image_reader_ = nullptr;
-
- // Clean up the ImageRefs which should now be a no-op since there is no valid
- // |image_reader_|.
- image_refs_.clear();
- current_image_ref_.reset();
-}
-
-gl::ScopedJavaSurface ImageReaderGLOwner::CreateJavaSurface() const {
- // If we've already lost the texture, then do nothing.
- if (!image_reader_) {
- DLOG(ERROR) << "Already lost texture / image reader";
- return gl::ScopedJavaSurface::AcquireExternalSurface(nullptr);
- }
-
- // Get the android native window from the image reader.
- ANativeWindow* window = nullptr;
- if (loader_.AImageReader_getWindow(image_reader_, &window) != AMEDIA_OK) {
- DLOG(ERROR) << "unable to get a window from image reader.";
- return gl::ScopedJavaSurface::AcquireExternalSurface(nullptr);
- }
-
- // Get the java surface object from the Android native window.
- JNIEnv* env = base::android::AttachCurrentThread();
- jobject j_surface = loader_.ANativeWindow_toSurface(env, window);
- DCHECK(j_surface);
-
- // Get the scoped java surface that is owned externally.
- return gl::ScopedJavaSurface::AcquireExternalSurface(j_surface);
-}
-
-void ImageReaderGLOwner::UpdateTexImage() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- // If we've lost the texture, then do nothing.
- if (!texture())
- return;
-
- DCHECK(image_reader_);
-
- // Acquire the latest image asynchronously
- AImage* image = nullptr;
- int acquire_fence_fd = -1;
- media_status_t return_code = AMEDIA_OK;
- DCHECK_GT(max_images_, static_cast<int32_t>(image_refs_.size()));
- if (max_images_ - image_refs_.size() < 2) {
- // acquireNextImageAsync is required here since as per the spec calling
- // AImageReader_acquireLatestImage with less than two images of margin, that
- // is (maxImages - currentAcquiredImages < 2) will not discard as expected.
- // We always have currentAcquiredImages as 1 since we delete a previous
- // image only after acquiring a new image.
- return_code = loader_.AImageReader_acquireNextImageAsync(
- image_reader_, &image, &acquire_fence_fd);
- } else {
- return_code = loader_.AImageReader_acquireLatestImageAsync(
- image_reader_, &image, &acquire_fence_fd);
- }
-
- // TODO(http://crbug.com/846050).
- // Need to add some better error handling if below error occurs. Currently we
- // just return if error occurs.
- switch (return_code) {
- case AMEDIA_ERROR_INVALID_PARAMETER:
- LOG(ERROR) << " Image is NULL";
- base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
- return_code);
- return;
- case AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED:
- LOG(ERROR)
- << "number of concurrently acquired images has reached the limit";
- base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
- return_code);
- return;
- case AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE:
- LOG(ERROR) << "no buffers currently available in the reader queue";
- base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
- return_code);
- return;
- case AMEDIA_ERROR_UNKNOWN:
- LOG(ERROR) << "method fails for some other reasons";
- base::UmaHistogramSparse("Media.AImageReaderGLOwner.AcquireImageResult",
- return_code);
- return;
- case AMEDIA_OK:
- // Method call succeeded.
- break;
- default:
- // No other error code should be returned.
- NOTREACHED();
- return;
- }
- base::ScopedFD scoped_acquire_fence_fd(acquire_fence_fd);
-
- // If there is no new image simply return. At this point previous image will
- // still be bound to the texture.
- if (!image) {
- return;
- }
-
- // Make the newly acquired image as current image.
- current_image_ref_.emplace(this, image, std::move(scoped_acquire_fence_fd));
-}
-
-void ImageReaderGLOwner::EnsureTexImageBound() {
- if (current_image_ref_)
- current_image_ref_->EnsureBound();
-}
-
-std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
-ImageReaderGLOwner::GetAHardwareBuffer() {
- if (!current_image_ref_)
- return nullptr;
-
- AHardwareBuffer* buffer = nullptr;
- loader_.AImage_getHardwareBuffer(current_image_ref_->image(), &buffer);
- if (!buffer)
- return nullptr;
-
- return std::make_unique<ScopedHardwareBufferImpl>(
- this, current_image_ref_->image(),
- base::android::ScopedHardwareBufferHandle::Create(buffer),
- current_image_ref_->GetReadyFence());
-}
-
-void ImageReaderGLOwner::RegisterRefOnImage(AImage* image) {
- DCHECK(image_reader_);
-
- // Add a ref that the caller will release.
- image_refs_[image].count++;
-}
-
-void ImageReaderGLOwner::ReleaseRefOnImage(AImage* image,
- base::ScopedFD fence_fd) {
- // During cleanup on losing the texture, all images are synchronously released
- // and the |image_reader_| is destroyed.
- if (!image_reader_)
- return;
-
- auto it = image_refs_.find(image);
- DCHECK(it != image_refs_.end());
-
- auto& image_ref = it->second;
- DCHECK_GT(image_ref.count, 0u);
- image_ref.count--;
- image_ref.release_fence_fd =
- gl::MergeFDs(std::move(image_ref.release_fence_fd), std::move(fence_fd));
-
- if (image_ref.count > 0)
- return;
-
- if (image_ref.release_fence_fd.is_valid()) {
- loader_.AImage_deleteAsync(image,
- std::move(image_ref.release_fence_fd.release()));
- } else {
- loader_.AImage_delete(image);
- }
-
- image_refs_.erase(it);
-}
-
-void ImageReaderGLOwner::GetTransformMatrix(float mtx[]) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- // Assign a Y inverted Identity matrix. Both MCVD and AVDA path performs a Y
- // inversion of this matrix later. Hence if we assign a Y inverted matrix
- // here, it simply becomes an identity matrix later and will have no effect
- // on the image data.
- static constexpr float kYInvertedIdentity[16]{1, 0, 0, 0, 0, -1, 0, 0,
- 0, 0, 1, 0, 0, 1, 0, 1};
- memcpy(mtx, kYInvertedIdentity, sizeof(kYInvertedIdentity));
-}
-
-void ImageReaderGLOwner::ReleaseBackBuffers() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- // ReleaseBackBuffers() call is not required with image reader.
-}
-
-gl::GLContext* ImageReaderGLOwner::GetContext() const {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- return context_.get();
-}
-
-gl::GLSurface* ImageReaderGLOwner::GetSurface() const {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- return surface_.get();
-}
-
-void ImageReaderGLOwner::SetReleaseTimeToNow() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- release_time_ = base::TimeTicks::Now();
-}
-
-void ImageReaderGLOwner::IgnorePendingRelease() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- release_time_ = base::TimeTicks();
-}
-
-bool ImageReaderGLOwner::IsExpectingFrameAvailable() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- return !release_time_.is_null();
-}
-
-void ImageReaderGLOwner::WaitForFrameAvailable() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- DCHECK(!release_time_.is_null());
-
- // 5msec covers >99.9% of cases, so just wait for up to that much before
- // giving up. If an error occurs, we might not ever get a notification.
- const base::TimeDelta max_wait = base::TimeDelta::FromMilliseconds(5);
- const base::TimeTicks call_time = base::TimeTicks::Now();
- const base::TimeDelta elapsed = call_time - release_time_;
- const base::TimeDelta remaining = max_wait - elapsed;
- release_time_ = base::TimeTicks();
- bool timed_out = false;
-
- if (remaining <= base::TimeDelta()) {
- if (!frame_available_event_->event.IsSignaled()) {
- DVLOG(1) << "Deferred WaitForFrameAvailable() timed out, elapsed: "
- << elapsed.InMillisecondsF() << "ms";
- timed_out = true;
- }
- } else {
- DCHECK_LE(remaining, max_wait);
- SCOPED_UMA_HISTOGRAM_TIMER(
- "Media.CodecImage.ImageReaderGLOwner.WaitTimeForFrame");
- if (!frame_available_event_->event.TimedWait(remaining)) {
- DVLOG(1) << "WaitForFrameAvailable() timed out, elapsed: "
- << elapsed.InMillisecondsF()
- << "ms, additionally waited: " << remaining.InMillisecondsF()
- << "ms, total: " << (elapsed + remaining).InMillisecondsF()
- << "ms";
- timed_out = true;
- }
- }
- UMA_HISTOGRAM_BOOLEAN("Media.CodecImage.ImageReaderGLOwner.FrameTimedOut",
- timed_out);
-}
-
-ImageReaderGLOwner::ImageRef::ImageRef() = default;
-ImageReaderGLOwner::ImageRef::~ImageRef() = default;
-ImageReaderGLOwner::ImageRef::ImageRef(ImageRef&& other) = default;
-ImageReaderGLOwner::ImageRef& ImageReaderGLOwner::ImageRef::operator=(
- ImageRef&& other) = default;
-
-ImageReaderGLOwner::ScopedCurrentImageRef::ScopedCurrentImageRef(
- ImageReaderGLOwner* texture_owner,
- AImage* image,
- base::ScopedFD ready_fence)
- : texture_owner_(texture_owner),
- image_(image),
- ready_fence_(std::move(ready_fence)) {
- DCHECK(image_);
- texture_owner_->RegisterRefOnImage(image_);
-}
-
-ImageReaderGLOwner::ScopedCurrentImageRef::~ScopedCurrentImageRef() {
- base::ScopedFD release_fence;
- // If there is no |image_reader_|, we are in tear down so no fence is
- // required.
- if (image_bound_ && texture_owner_->image_reader_)
- release_fence = gpu::CreateEglFenceAndExportFd();
- else
- release_fence = std::move(ready_fence_);
- texture_owner_->ReleaseRefOnImage(image_, std::move(release_fence));
-}
-
-base::ScopedFD ImageReaderGLOwner::ScopedCurrentImageRef::GetReadyFence()
- const {
- return base::ScopedFD(HANDLE_EINTR(dup(ready_fence_.get())));
-}
-
-void ImageReaderGLOwner::ScopedCurrentImageRef::EnsureBound() {
- if (image_bound_)
- return;
-
- // Insert an EGL fence and make server wait for image to be available.
- if (!gpu::InsertEglFenceAndWait(GetReadyFence()))
- return;
-
- // Create EGL image from the AImage and bind it to the texture.
- if (!gpu::CreateAndBindEglImage(image_, texture_owner_->GetTextureId(),
- &texture_owner_->loader_))
- return;
-
- image_bound_ = true;
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/image_reader_gl_owner.h b/chromium/media/gpu/android/image_reader_gl_owner.h
deleted file mode 100644
index 8d9fd9c3236..00000000000
--- a/chromium/media/gpu/android/image_reader_gl_owner.h
+++ /dev/null
@@ -1,137 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_IMAGE_READER_GL_OWNER_H_
-#define MEDIA_GPU_ANDROID_IMAGE_READER_GL_OWNER_H_
-
-#include <memory>
-
-#include "base/android/android_image_reader_compat.h"
-#include "base/containers/flat_map.h"
-#include "media/gpu/android/texture_owner.h"
-#include "ui/gl/gl_fence_egl.h"
-#include "ui/gl/gl_image_ahardwarebuffer.h"
-
-namespace base {
-namespace android {
-class ScopedHardwareBufferFenceSync;
-} // namespace android
-} // namespace base
-
-namespace media {
-
-struct FrameAvailableEvent_ImageReader;
-
-// This class wraps the AImageReader usage and is used to create a GL texture
-// using the current platform GL context and returns a new ImageReaderGLOwner
-// attached to it. The surface handle of the AImageReader is attached to
-// decoded media frames. Media frames can update the attached surface handle
-// with image data and this class helps to create an eglImage using that image
-// data present in the surface.
-class MEDIA_GPU_EXPORT ImageReaderGLOwner : public TextureOwner {
- public:
- gl::GLContext* GetContext() const override;
- gl::GLSurface* GetSurface() const override;
- gl::ScopedJavaSurface CreateJavaSurface() const override;
- void UpdateTexImage() override;
- void EnsureTexImageBound() override;
- void GetTransformMatrix(float mtx[16]) override;
- void ReleaseBackBuffers() override;
- void SetReleaseTimeToNow() override;
- void IgnorePendingRelease() override;
- bool IsExpectingFrameAvailable() override;
- void WaitForFrameAvailable() override;
- std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
- GetAHardwareBuffer() override;
-
- const AImageReader* image_reader_for_testing() const { return image_reader_; }
- int32_t max_images_for_testing() const { return max_images_; }
-
- protected:
- void OnTextureDestroyed(gpu::gles2::AbstractTexture*) override;
-
- private:
- friend class TextureOwner;
- class ScopedHardwareBufferImpl;
-
- // Manages ownership of the latest image retrieved from AImageReader and
- // ensuring synchronization of its use in GL using fences.
- class ScopedCurrentImageRef {
- public:
- ScopedCurrentImageRef(ImageReaderGLOwner* texture_owner,
- AImage* image,
- base::ScopedFD ready_fence);
- ~ScopedCurrentImageRef();
- AImage* image() const { return image_; }
- base::ScopedFD GetReadyFence() const;
- void EnsureBound();
-
- private:
- ImageReaderGLOwner* texture_owner_;
- AImage* image_;
- base::ScopedFD ready_fence_;
-
- // Set to true if the current image is bound to |texture_id_|.
- bool image_bound_ = false;
-
- DISALLOW_COPY_AND_ASSIGN(ScopedCurrentImageRef);
- };
-
- ImageReaderGLOwner(std::unique_ptr<gpu::gles2::AbstractTexture> texture,
- Mode secure_mode);
- ~ImageReaderGLOwner() override;
-
- // Registers and releases a ref on the image. Once the ref-count for an image
- // goes to 0, it is released back to the AImageReader with an optional release
- // fence if needed.
- void RegisterRefOnImage(AImage* image);
- void ReleaseRefOnImage(AImage* image, base::ScopedFD fence_fd);
-
- // AImageReader instance
- AImageReader* image_reader_;
-
- // Most recently acquired image using image reader. This works like a cached
- // image until next new image is acquired which overwrites this.
- base::Optional<ScopedCurrentImageRef> current_image_ref_;
- std::unique_ptr<AImageReader_ImageListener> listener_;
-
- // A map consisting of pending refs on an AImage. If an image has any refs, it
- // is automatically released once the ref-count is 0.
- struct ImageRef {
- ImageRef();
- ~ImageRef();
-
- ImageRef(ImageRef&& other);
- ImageRef& operator=(ImageRef&& other);
-
- size_t count = 0u;
- base::ScopedFD release_fence_fd;
-
- DISALLOW_COPY_AND_ASSIGN(ImageRef);
- };
- using AImageRefMap = base::flat_map<AImage*, ImageRef>;
- AImageRefMap image_refs_;
-
- // reference to the class instance which is used to dynamically
- // load the functions in android libraries at runtime.
- base::android::AndroidImageReader& loader_;
-
- // The context and surface that were used to create |texture_id_|.
- scoped_refptr<gl::GLContext> context_;
- scoped_refptr<gl::GLSurface> surface_;
-
- // When SetReleaseTimeToNow() was last called. i.e., when the last
- // codec buffer was released to this surface. Or null if
- // IgnorePendingRelease() or WaitForFrameAvailable() have been called since.
- base::TimeTicks release_time_;
- scoped_refptr<FrameAvailableEvent_ImageReader> frame_available_event_;
- int32_t max_images_ = 0;
-
- THREAD_CHECKER(thread_checker_);
- DISALLOW_COPY_AND_ASSIGN(ImageReaderGLOwner);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_IMAGE_READER_GL_OWNER_H_
diff --git a/chromium/media/gpu/android/image_reader_gl_owner_unittest.cc b/chromium/media/gpu/android/image_reader_gl_owner_unittest.cc
deleted file mode 100644
index 3c1060296b2..00000000000
--- a/chromium/media/gpu/android/image_reader_gl_owner_unittest.cc
+++ /dev/null
@@ -1,205 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/texture_owner.h"
-
-#include <stdint.h>
-#include <memory>
-#include <utility>
-
-#include "base/test/scoped_feature_list.h"
-#include "base/test/scoped_task_environment.h"
-#include "gpu/command_buffer/service/abstract_texture.h"
-#include "media/base/media_switches.h"
-#include "media/gpu/android/image_reader_gl_owner.h"
-#include "media/gpu/android/mock_abstract_texture.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "ui/gl/gl_bindings.h"
-#include "ui/gl/gl_context_egl.h"
-#include "ui/gl/gl_surface_egl.h"
-#include "ui/gl/init/gl_factory.h"
-
-namespace media {
-
-class ImageReaderGLOwnerTest : public testing::Test {
- public:
- ImageReaderGLOwnerTest() {}
- ~ImageReaderGLOwnerTest() override {}
-
- protected:
- void SetUp() override {
- if (!IsImageReaderSupported())
- return;
-
- scoped_feature_list_.InitAndEnableFeature(media::kAImageReaderVideoOutput);
- gl::init::InitializeGLOneOffImplementation(gl::kGLImplementationEGLGLES2,
- false, false, false, true);
- surface_ = new gl::PbufferGLSurfaceEGL(gfx::Size(320, 240));
- surface_->Initialize();
-
- share_group_ = new gl::GLShareGroup();
- context_ = new gl::GLContextEGL(share_group_.get());
- context_->Initialize(surface_.get(), gl::GLContextAttribs());
- ASSERT_TRUE(context_->MakeCurrent(surface_.get()));
-
- // Create a texture.
- glGenTextures(1, &texture_id_);
-
- std::unique_ptr<MockAbstractTexture> texture =
- std::make_unique<MockAbstractTexture>(texture_id_);
- abstract_texture_ = texture->AsWeakPtr();
- image_reader_ = TextureOwner::Create(std::move(texture), SecureMode());
- }
-
- virtual TextureOwner::Mode SecureMode() {
- return TextureOwner::Mode::kAImageReaderInsecure;
- }
-
- void TearDown() override {
- if (texture_id_ && context_->MakeCurrent(surface_.get()))
- glDeleteTextures(1, &texture_id_);
- image_reader_ = nullptr;
- context_ = nullptr;
- share_group_ = nullptr;
- surface_ = nullptr;
- gl::init::ShutdownGL(false);
- }
-
- bool IsImageReaderSupported() const {
- return base::android::AndroidImageReader::GetInstance().IsSupported();
- }
-
- base::test::ScopedFeatureList scoped_feature_list_;
- scoped_refptr<TextureOwner> image_reader_;
- GLuint texture_id_ = 0;
-
- base::WeakPtr<MockAbstractTexture> abstract_texture_;
-
- scoped_refptr<gl::GLContext> context_;
- scoped_refptr<gl::GLShareGroup> share_group_;
- scoped_refptr<gl::GLSurface> surface_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
-};
-
-TEST_F(ImageReaderGLOwnerTest, ImageReaderObjectCreation) {
- if (!IsImageReaderSupported())
- return;
-
- ASSERT_TRUE(image_reader_);
-}
-
-TEST_F(ImageReaderGLOwnerTest, ScopedJavaSurfaceCreation) {
- if (!IsImageReaderSupported())
- return;
-
- gl::ScopedJavaSurface temp = image_reader_->CreateJavaSurface();
- ASSERT_TRUE(temp.IsValid());
-}
-
-// Verify that ImageReaderGLOwner creates a bindable GL texture, and deletes
-// it during destruction.
-TEST_F(ImageReaderGLOwnerTest, GLTextureIsCreatedAndDestroyed) {
- if (!IsImageReaderSupported())
- return;
-
- // |texture_id| should not work anymore after we delete image_reader_.
- image_reader_ = nullptr;
- EXPECT_FALSE(abstract_texture_);
-}
-
-// Make sure that image_reader_ remembers the correct context and surface.
-TEST_F(ImageReaderGLOwnerTest, ContextAndSurfaceAreCaptured) {
- if (!IsImageReaderSupported())
- return;
-
- ASSERT_EQ(context_, image_reader_->GetContext());
- ASSERT_EQ(surface_, image_reader_->GetSurface());
-}
-
-// Verify that destruction works even if some other context is current.
-TEST_F(ImageReaderGLOwnerTest, DestructionWorksWithWrongContext) {
- if (!IsImageReaderSupported())
- return;
-
- scoped_refptr<gl::GLSurface> new_surface(
- new gl::PbufferGLSurfaceEGL(gfx::Size(320, 240)));
- new_surface->Initialize();
-
- scoped_refptr<gl::GLShareGroup> new_share_group(new gl::GLShareGroup());
- scoped_refptr<gl::GLContext> new_context(
- new gl::GLContextEGL(new_share_group.get()));
- new_context->Initialize(new_surface.get(), gl::GLContextAttribs());
- ASSERT_TRUE(new_context->MakeCurrent(new_surface.get()));
-
- image_reader_ = nullptr;
- EXPECT_FALSE(abstract_texture_);
-
- // |new_context| should still be current.
- ASSERT_TRUE(new_context->IsCurrent(new_surface.get()));
-
- new_context = nullptr;
- new_share_group = nullptr;
- new_surface = nullptr;
-}
-
-// The max number of images used by the ImageReader must be 2 for non-Surface
-// control.
-TEST_F(ImageReaderGLOwnerTest, MaxImageExpectation) {
- if (!IsImageReaderSupported())
- return;
- EXPECT_EQ(static_cast<ImageReaderGLOwner*>(image_reader_.get())
- ->max_images_for_testing(),
- 2);
-}
-
-class ImageReaderGLOwnerSecureSurfaceControlTest
- : public ImageReaderGLOwnerTest {
- public:
- TextureOwner::Mode SecureMode() final {
- return TextureOwner::Mode::kAImageReaderSecureSurfaceControl;
- }
-};
-
-TEST_F(ImageReaderGLOwnerSecureSurfaceControlTest, CreatesSecureAImageReader) {
- if (!IsImageReaderSupported())
- return;
-
- ASSERT_TRUE(image_reader_);
- auto* a_image_reader = static_cast<ImageReaderGLOwner*>(image_reader_.get())
- ->image_reader_for_testing();
- int32_t format = AIMAGE_FORMAT_YUV_420_888;
- base::android::AndroidImageReader::GetInstance().AImageReader_getFormat(
- a_image_reader, &format);
- EXPECT_EQ(format, AIMAGE_FORMAT_PRIVATE);
-}
-
-// The max number of images used by the ImageReader must be 3 for Surface
-// control.
-TEST_F(ImageReaderGLOwnerSecureSurfaceControlTest, MaxImageExpectation) {
- if (!IsImageReaderSupported())
- return;
- EXPECT_EQ(static_cast<ImageReaderGLOwner*>(image_reader_.get())
- ->max_images_for_testing(),
- 3);
-}
-
-class ImageReaderGLOwnerInsecureSurfaceControlTest
- : public ImageReaderGLOwnerTest {
- public:
- TextureOwner::Mode SecureMode() final {
- return TextureOwner::Mode::kAImageReaderInsecureSurfaceControl;
- }
-};
-
-// The max number of images used by the ImageReader must be 3 for Surface
-// control.
-TEST_F(ImageReaderGLOwnerInsecureSurfaceControlTest, MaxImageExpectation) {
- if (!IsImageReaderSupported())
- return;
- EXPECT_EQ(static_cast<ImageReaderGLOwner*>(image_reader_.get())
- ->max_images_for_testing(),
- 3);
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/maybe_render_early_manager.cc b/chromium/media/gpu/android/maybe_render_early_manager.cc
index 878a7b90a37..a206d3bb93a 100644
--- a/chromium/media/gpu/android/maybe_render_early_manager.cc
+++ b/chromium/media/gpu/android/maybe_render_early_manager.cc
@@ -18,7 +18,7 @@ namespace media {
// the actual rendering.
class GpuMaybeRenderEarlyImpl {
public:
- GpuMaybeRenderEarlyImpl() : weak_factory_(this) {}
+ GpuMaybeRenderEarlyImpl() {}
~GpuMaybeRenderEarlyImpl() = default;
void SetCodecImageGroup(scoped_refptr<CodecImageGroup> image_group) {
@@ -51,7 +51,13 @@ class GpuMaybeRenderEarlyImpl {
// adding a DestructionCB to CodecImage. However, since we use a weak ptr
// for the callback, it isn't safe. CodecImageGroup uses a strong ref.
DCHECK(std::find(images_.begin(), images_.end(), image) != images_.end());
- image_group_->RemoveCodecImage(image);
+ // Remember that |image_group_| might not be the same one that |image|
+ // belongs to. So, we can't remove it. Instead, trust that the destruction
+ // cb will also be run.
+ // TODO(liberato): Simplify this. We don't need both callbacks. The
+ // destruction CB can be renamed to the unused cb, and CodecImageGroup can
+ // set it like it used to, and notify us about it. It would be called both
+ // on CodecImage destruction and (with pooling) when it's unused.
base::Erase(images_, image);
internal::MaybeRenderEarly(&images_);
}
@@ -63,7 +69,7 @@ class GpuMaybeRenderEarlyImpl {
// replace this when SetImageGroup() is called.
scoped_refptr<CodecImageGroup> image_group_;
- base::WeakPtrFactory<GpuMaybeRenderEarlyImpl> weak_factory_;
+ base::WeakPtrFactory<GpuMaybeRenderEarlyImpl> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(GpuMaybeRenderEarlyImpl);
};
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.cc b/chromium/media/gpu/android/media_codec_video_decoder.cc
index 89b9581bbad..33398f5b11f 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder.cc
@@ -177,6 +177,11 @@ std::vector<SupportedVideoDecoderConfig> GetSupportedConfigsInternal(
} // namespace
+// When re-initializing the codec changes the resolution to be more than
+// |kReallocateThreshold| times the old one, force a codec reallocation to
+// update the hints that we provide to MediaCodec. crbug.com/989182 .
+constexpr static float kReallocateThreshold = 4;
+
// static
PendingDecode PendingDecode::CreateEos() {
return {DecoderBuffer::CreateEOSBuffer(), base::DoNothing()};
@@ -218,9 +223,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
overlay_factory_cb_(std::move(overlay_factory_cb)),
device_info_(device_info),
enable_threaded_texture_mailboxes_(
- gpu_preferences.enable_threaded_texture_mailboxes),
- weak_factory_(this),
- codec_allocator_weak_factory_(this) {
+ gpu_preferences.enable_threaded_texture_mailboxes) {
DVLOG(2) << __func__;
surface_chooser_helper_.chooser()->SetClientCallbacks(
base::Bind(&MediaCodecVideoDecoder::OnSurfaceChosen,
@@ -329,6 +332,25 @@ void MediaCodecVideoDecoder::Initialize(const VideoDecoderConfig& config,
// Do the rest of the initialization lazily on the first decode.
BindToCurrentLoop(std::move(init_cb)).Run(true);
+
+ const int width = config.coded_size().width();
+ // On re-init, reallocate the codec if the size has changed too much.
+ // Restrict this behavior to Q, where the behavior changed.
+ if (first_init) {
+ last_width_ = width;
+ } else if (width > last_width_ * kReallocateThreshold && device_info_ &&
+ device_info_->SdkVersion() > base::android::SDK_VERSION_P) {
+ DCHECK(codec_);
+ // Reallocate the codec the next time we queue input, once there are no
+ // outstanding output buffers. Note that |deferred_flush_pending_| might
+ // already be set, which is fine. We're just upgrading the flush.
+ //
+ // If the codec IsDrained(), then we'll flush anyway. However, just to be
+ // sure, request a deferred flush.
+ deferred_flush_pending_ = true;
+ deferred_reallocation_pending_ = true;
+ last_width_ = width;
+ } // else leave |last_width_| unmodified, since we're re-using the codec.
}
void MediaCodecVideoDecoder::SetCdm(CdmContext* cdm_context, InitCB init_cb) {
@@ -433,7 +455,7 @@ void MediaCodecVideoDecoder::StartLazyInit() {
}
void MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized(
- scoped_refptr<TextureOwner> texture_owner) {
+ scoped_refptr<gpu::TextureOwner> texture_owner) {
DVLOG(2) << __func__;
TRACE_EVENT0("media",
"MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized");
@@ -657,6 +679,28 @@ void MediaCodecVideoDecoder::FlushCodec() {
// If a deferred flush was pending, then it isn't anymore.
deferred_flush_pending_ = false;
+ // Release and re-allocate the codec, if needed, for a resolution change.
+ // This also counts as a flush. Note that we could also stop / configure /
+ // start the codec, but there's a fair bit of complexity in that. Timing
+ // tests didn't show any big advantage. During a resolution change, the time
+ // between the next time we queue an input buffer and the next time we get an
+ // output buffer were:
+ //
+ // flush only: 0.04 s
+ // stop / configure / start: 0.026 s
+ // release / create: 0.03 s
+ //
+ // So, it seems that flushing the codec defers some work (buffer reallocation
+ // or similar) that ends up on the critical path. I didn't verify what
+ // happens when we're flushing without a resolution change, nor can I quite
+ // explain how anything can be done off the critical path when a flush is
+ // deferred to the first queued input.
+ if (deferred_reallocation_pending_) {
+ deferred_reallocation_pending_ = false;
+ ReleaseCodec();
+ CreateCodec();
+ }
+
if (!codec_ || codec_->IsFlushed())
return;
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.h b/chromium/media/gpu/android/media_codec_video_decoder.h
index 4c2c17b8f25..c92548cd4e3 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.h
+++ b/chromium/media/gpu/android/media_codec_video_decoder.h
@@ -128,7 +128,7 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder : public VideoDecoder {
// Finishes initialization.
void StartLazyInit();
void OnVideoFrameFactoryInitialized(
- scoped_refptr<TextureOwner> texture_owner);
+ scoped_refptr<gpu::TextureOwner> texture_owner);
// Resets |waiting_for_key_| to false, indicating that MediaCodec might now
// accept buffers.
@@ -308,11 +308,20 @@ class MEDIA_GPU_EXPORT MediaCodecVideoDecoder : public VideoDecoder {
// in some random state, possibly with output buffers pending.
bool deferred_flush_pending_ = false;
+ // Should we upgrade the next flush to a full release / reallocation of the
+ // codec? This lets us update our hints to the decoder about the size of the
+ // expected video.
+ bool deferred_reallocation_pending_ = false;
+
+ // Width, in pixels, of the resolution that we last told the codec about.
+ int last_width_ = 0;
+
// Optional crypto object from the Cdm.
base::android::ScopedJavaGlobalRef<jobject> media_crypto_;
- base::WeakPtrFactory<MediaCodecVideoDecoder> weak_factory_;
- base::WeakPtrFactory<MediaCodecVideoDecoder> codec_allocator_weak_factory_;
+ base::WeakPtrFactory<MediaCodecVideoDecoder> weak_factory_{this};
+ base::WeakPtrFactory<MediaCodecVideoDecoder> codec_allocator_weak_factory_{
+ this};
DISALLOW_COPY_AND_ASSIGN(MediaCodecVideoDecoder);
};
diff --git a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
index 1b258ea0e0a..daf210590fb 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -10,9 +10,10 @@
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "gpu/config/gpu_preferences.h"
+#include "gpu/ipc/common/android/mock_texture_owner.h"
#include "media/base/android/media_codec_util.h"
#include "media/base/android/mock_android_overlay.h"
#include "media/base/android/mock_media_crypto_context.h"
@@ -23,7 +24,6 @@
#include "media/gpu/android/fake_codec_allocator.h"
#include "media/gpu/android/mock_android_video_surface_chooser.h"
#include "media/gpu/android/mock_device_info.h"
-#include "media/gpu/android/mock_texture_owner.h"
#include "media/gpu/android/video_frame_factory.h"
#include "media/video/supported_video_decoder_config.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -66,7 +66,7 @@ class MockVideoFrameFactory : public VideoFrameFactory {
MOCK_METHOD5(
MockCreateVideoFrame,
void(CodecOutputBuffer* raw_output_buffer,
- scoped_refptr<TextureOwner> texture_owner,
+ scoped_refptr<gpu::TextureOwner> texture_owner,
base::TimeDelta timestamp,
gfx::Size natural_size,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb));
@@ -80,8 +80,10 @@ class MockVideoFrameFactory : public VideoFrameFactory {
if (!surface_bundle) {
texture_owner_ = nullptr;
} else {
- texture_owner_ =
- surface_bundle->overlay() ? nullptr : surface_bundle->texture_owner();
+ texture_owner_ = surface_bundle->overlay()
+ ? nullptr
+ : surface_bundle->codec_buffer_wait_coordinator()
+ ->texture_owner();
}
}
@@ -103,7 +105,7 @@ class MockVideoFrameFactory : public VideoFrameFactory {
}
std::unique_ptr<CodecOutputBuffer> last_output_buffer_;
- scoped_refptr<TextureOwner> texture_owner_;
+ scoped_refptr<gpu::TextureOwner> texture_owner_;
base::OnceClosure last_closure_;
};
@@ -135,8 +137,8 @@ class MediaCodecVideoDecoderTest : public testing::TestWithParam<VideoCodec> {
std::make_unique<NiceMock<MockAndroidVideoSurfaceChooser>>();
surface_chooser_ = surface_chooser.get();
- auto texture_owner =
- base::MakeRefCounted<NiceMock<MockTextureOwner>>(0, nullptr, nullptr);
+ auto texture_owner = base::MakeRefCounted<NiceMock<gpu::MockTextureOwner>>(
+ 0, nullptr, nullptr);
texture_owner_ = texture_owner.get();
auto video_frame_factory =
@@ -278,13 +280,13 @@ class MediaCodecVideoDecoderTest : public testing::TestWithParam<VideoCodec> {
protected:
const VideoCodec codec_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
base::android::ScopedJavaGlobalRef<jobject> java_surface_;
scoped_refptr<DecoderBuffer> fake_decoder_buffer_;
std::unique_ptr<MockDeviceInfo> device_info_;
std::unique_ptr<FakeCodecAllocator> codec_allocator_;
MockAndroidVideoSurfaceChooser* surface_chooser_;
- MockTextureOwner* texture_owner_;
+ gpu::MockTextureOwner* texture_owner_;
MockVideoFrameFactory* video_frame_factory_;
NiceMock<base::MockCallback<VideoDecoder::DecodeCB>> decode_cb_;
std::unique_ptr<DestructionObserver> destruction_observer_;
diff --git a/chromium/media/gpu/android/mock_abstract_texture.cc b/chromium/media/gpu/android/mock_abstract_texture.cc
deleted file mode 100644
index 2adda1ff146..00000000000
--- a/chromium/media/gpu/android/mock_abstract_texture.cc
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/mock_abstract_texture.h"
-
-namespace media {
-
-MockAbstractTexture::MockAbstractTexture() = default;
-
-MockAbstractTexture::MockAbstractTexture(GLuint service_id)
- : texture_base_(std::make_unique<gpu::TextureBase>(service_id)) {
- ON_CALL(*this, GetTextureBase())
- .WillByDefault(::testing::Return(texture_base_.get()));
-}
-
-MockAbstractTexture::~MockAbstractTexture() = default;
-
-} // namespace media
diff --git a/chromium/media/gpu/android/mock_abstract_texture.h b/chromium/media/gpu/android/mock_abstract_texture.h
deleted file mode 100644
index 737c95f9de5..00000000000
--- a/chromium/media/gpu/android/mock_abstract_texture.h
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_MOCK_ABSTRACT_TEXTURE_H_
-#define MEDIA_GPU_ANDROID_MOCK_ABSTRACT_TEXTURE_H_
-
-#include "base/memory/weak_ptr.h"
-#include "gpu/command_buffer/service/abstract_texture.h"
-#include "testing/gmock/include/gmock/gmock.h"
-
-namespace media {
-
-// SupportsWeakPtr so it's easy to tell when it has been destroyed.
-class MockAbstractTexture
- : public ::testing::NiceMock<gpu::gles2::AbstractTexture>,
- public base::SupportsWeakPtr<MockAbstractTexture> {
- public:
- MockAbstractTexture();
- // If provided, we'll make a TextureBase that returns this id. We do not
- // delete this texture.
- explicit MockAbstractTexture(GLuint service_id);
- ~MockAbstractTexture() override;
-
- MOCK_METHOD0(ForceContextLost, void());
- MOCK_CONST_METHOD0(GetTextureBase, gpu::TextureBase*());
- MOCK_METHOD2(SetParameteri, void(GLenum pname, GLint param));
- MOCK_METHOD2(BindStreamTextureImage,
- void(gpu::gles2::GLStreamTextureImage* image,
- GLuint service_id));
- MOCK_METHOD2(BindImage, void(gl::GLImage* image, bool client_managed));
- MOCK_METHOD0(ReleaseImage, void());
- MOCK_CONST_METHOD0(GetImage, gl::GLImage*());
- MOCK_METHOD0(SetCleared, void());
- MOCK_METHOD1(SetCleanupCallback, void(CleanupCallback));
-
- private:
- // May be null.
- std::unique_ptr<gpu::TextureBase> texture_base_;
- CleanupCallback cleanup_callback_;
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_MOCK_ABSTRACT_TEXTURE_H_
diff --git a/chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.cc b/chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.cc
new file mode 100644
index 00000000000..ea2752219d8
--- /dev/null
+++ b/chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.cc
@@ -0,0 +1,33 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/android/mock_codec_buffer_wait_coordinator.h"
+
+namespace media {
+
+using testing::Invoke;
+using testing::Return;
+
+MockCodecBufferWaitCoordinator::MockCodecBufferWaitCoordinator(
+ scoped_refptr<NiceMock<gpu::MockTextureOwner>> texture_owner)
+ : CodecBufferWaitCoordinator(texture_owner),
+ mock_texture_owner(std::move(texture_owner)),
+ expecting_frame_available(false) {
+ ON_CALL(*this, texture_owner()).WillByDefault(Return(mock_texture_owner));
+
+ ON_CALL(*this, SetReleaseTimeToNow())
+ .WillByDefault(Invoke(
+ this, &MockCodecBufferWaitCoordinator::FakeSetReleaseTimeToNow));
+ ON_CALL(*this, IsExpectingFrameAvailable())
+ .WillByDefault(Invoke(
+ this,
+ &MockCodecBufferWaitCoordinator::FakeIsExpectingFrameAvailable));
+ ON_CALL(*this, WaitForFrameAvailable())
+ .WillByDefault(Invoke(
+ this, &MockCodecBufferWaitCoordinator::FakeWaitForFrameAvailable));
+}
+
+MockCodecBufferWaitCoordinator::~MockCodecBufferWaitCoordinator() = default;
+
+} // namespace media
diff --git a/chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.h b/chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.h
new file mode 100644
index 00000000000..14fe033bdc0
--- /dev/null
+++ b/chromium/media/gpu/android/mock_codec_buffer_wait_coordinator.h
@@ -0,0 +1,39 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_ANDROID_MOCK_CODEC_BUFFER_WAIT_COORDINATOR_H_
+#define MEDIA_GPU_ANDROID_MOCK_CODEC_BUFFER_WAIT_COORDINATOR_H_
+
+#include "gpu/ipc/common/android/mock_texture_owner.h"
+#include "media/gpu/android/codec_buffer_wait_coordinator.h"
+
+namespace media {
+
+// Mock class with mostly fake functions.
+class MockCodecBufferWaitCoordinator : public CodecBufferWaitCoordinator {
+ public:
+ MockCodecBufferWaitCoordinator(
+ scoped_refptr<NiceMock<gpu::MockTextureOwner>> texture_owner);
+
+ MOCK_CONST_METHOD0(texture_owner,
+ scoped_refptr<NiceMock<gpu::MockTextureOwner>>());
+ MOCK_METHOD0(SetReleaseTimeToNow, void());
+ MOCK_METHOD0(IsExpectingFrameAvailable, bool());
+ MOCK_METHOD0(WaitForFrameAvailable, void());
+
+ // Fake implementations that the mocks will call by default.
+ void FakeSetReleaseTimeToNow() { expecting_frame_available = true; }
+ bool FakeIsExpectingFrameAvailable() { return expecting_frame_available; }
+ void FakeWaitForFrameAvailable() { expecting_frame_available = false; }
+
+ scoped_refptr<NiceMock<gpu::MockTextureOwner>> mock_texture_owner;
+ bool expecting_frame_available;
+
+ protected:
+ ~MockCodecBufferWaitCoordinator();
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_ANDROID_MOCK_CODEC_BUFFER_WAIT_COORDINATOR_H_
diff --git a/chromium/media/gpu/android/mock_texture_owner.cc b/chromium/media/gpu/android/mock_texture_owner.cc
deleted file mode 100644
index 037ccd28279..00000000000
--- a/chromium/media/gpu/android/mock_texture_owner.cc
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/mock_texture_owner.h"
-
-#include "media/gpu/android/mock_abstract_texture.h"
-
-namespace media {
-
-using testing::Invoke;
-using testing::Return;
-
-MockTextureOwner::MockTextureOwner(GLuint fake_texture_id,
- gl::GLContext* fake_context,
- gl::GLSurface* fake_surface,
- bool binds_texture_on_update)
- : TextureOwner(binds_texture_on_update,
- std::make_unique<MockAbstractTexture>(fake_texture_id)),
- fake_context(fake_context),
- fake_surface(fake_surface),
- expecting_frame_available(false),
- expect_update_tex_image(!binds_texture_on_update) {
- ON_CALL(*this, GetTextureId()).WillByDefault(Return(fake_texture_id));
- ON_CALL(*this, GetContext()).WillByDefault(Return(fake_context));
- ON_CALL(*this, GetSurface()).WillByDefault(Return(fake_surface));
- ON_CALL(*this, SetReleaseTimeToNow())
- .WillByDefault(Invoke(this, &MockTextureOwner::FakeSetReleaseTimeToNow));
- ON_CALL(*this, IgnorePendingRelease())
- .WillByDefault(Invoke(this, &MockTextureOwner::FakeIgnorePendingRelease));
- ON_CALL(*this, IsExpectingFrameAvailable())
- .WillByDefault(
- Invoke(this, &MockTextureOwner::FakeIsExpectingFrameAvailable));
- ON_CALL(*this, WaitForFrameAvailable())
- .WillByDefault(
- Invoke(this, &MockTextureOwner::FakeWaitForFrameAvailable));
- ON_CALL(*this, EnsureTexImageBound()).WillByDefault(Invoke([this] {
- CHECK(expect_update_tex_image);
- }));
-}
-
-MockTextureOwner::~MockTextureOwner() {
- // TextureOwner requires this.
- ClearAbstractTexture();
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/mock_texture_owner.h b/chromium/media/gpu/android/mock_texture_owner.h
deleted file mode 100644
index c5f06c0b532..00000000000
--- a/chromium/media/gpu/android/mock_texture_owner.h
+++ /dev/null
@@ -1,66 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_MOCK_TEXTURE_OWNER_H_
-#define MEDIA_GPU_ANDROID_MOCK_TEXTURE_OWNER_H_
-
-#include <memory>
-
-#include "base/android/scoped_hardware_buffer_fence_sync.h"
-#include "media/gpu/android/texture_owner.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "ui/gl/gl_bindings.h"
-#include "ui/gl/gl_context.h"
-#include "ui/gl/gl_surface.h"
-
-namespace media {
-
-// This is a mock with a small amount of fake functionality too.
-class MockTextureOwner : public TextureOwner {
- public:
- MockTextureOwner(GLuint fake_texture_id,
- gl::GLContext* fake_context,
- gl::GLSurface* fake_surface,
- bool binds_texture_on_update = false);
-
- MOCK_CONST_METHOD0(GetTextureId, GLuint());
- MOCK_CONST_METHOD0(GetContext, gl::GLContext*());
- MOCK_CONST_METHOD0(GetSurface, gl::GLSurface*());
- MOCK_CONST_METHOD0(CreateJavaSurface, gl::ScopedJavaSurface());
- MOCK_METHOD0(UpdateTexImage, void());
- MOCK_METHOD0(EnsureTexImageBound, void());
- MOCK_METHOD1(GetTransformMatrix, void(float mtx[16]));
- MOCK_METHOD0(ReleaseBackBuffers, void());
- MOCK_METHOD0(SetReleaseTimeToNow, void());
- MOCK_METHOD0(IgnorePendingRelease, void());
- MOCK_METHOD0(IsExpectingFrameAvailable, bool());
- MOCK_METHOD0(WaitForFrameAvailable, void());
- MOCK_METHOD1(OnTextureDestroyed, void(gpu::gles2::AbstractTexture*));
-
- std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
- GetAHardwareBuffer() override {
- get_a_hardware_buffer_count++;
- return nullptr;
- }
-
- // Fake implementations that the mocks will call by default.
- void FakeSetReleaseTimeToNow() { expecting_frame_available = true; }
- void FakeIgnorePendingRelease() { expecting_frame_available = false; }
- bool FakeIsExpectingFrameAvailable() { return expecting_frame_available; }
- void FakeWaitForFrameAvailable() { expecting_frame_available = false; }
-
- gl::GLContext* fake_context;
- gl::GLSurface* fake_surface;
- bool expecting_frame_available;
- int get_a_hardware_buffer_count = 0;
- bool expect_update_tex_image;
-
- protected:
- ~MockTextureOwner();
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_MOCK_TEXTURE_OWNER_H_
diff --git a/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc b/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc
index 880bce817cb..cf98994cc1c 100644
--- a/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc
+++ b/chromium/media/gpu/android/promotion_hint_aggregator_impl.cc
@@ -28,8 +28,7 @@ constexpr base::TimeDelta MinimumUnpromotableFrameTime =
base::TimeDelta::FromMilliseconds(2000);
PromotionHintAggregatorImpl::PromotionHintAggregatorImpl(
- const base::TickClock* tick_clock)
- : weak_ptr_factory_(this) {
+ const base::TickClock* tick_clock) {
if (!tick_clock)
tick_clock = base::DefaultTickClock::GetInstance();
tick_clock_ = tick_clock;
diff --git a/chromium/media/gpu/android/promotion_hint_aggregator_impl.h b/chromium/media/gpu/android/promotion_hint_aggregator_impl.h
index 6fb8a26f2f4..9130800b34d 100644
--- a/chromium/media/gpu/android/promotion_hint_aggregator_impl.h
+++ b/chromium/media/gpu/android/promotion_hint_aggregator_impl.h
@@ -41,7 +41,7 @@ class MEDIA_GPU_EXPORT PromotionHintAggregatorImpl
// Number of frames which were promotable in a row.
int consecutive_promotable_frames_ = 0;
- base::WeakPtrFactory<PromotionHintAggregatorImpl> weak_ptr_factory_;
+ base::WeakPtrFactory<PromotionHintAggregatorImpl> weak_ptr_factory_{this};
DISALLOW_COPY_AND_ASSIGN(PromotionHintAggregatorImpl);
};
diff --git a/chromium/media/gpu/android/shared_image_video.cc b/chromium/media/gpu/android/shared_image_video.cc
deleted file mode 100644
index 4908964f1af..00000000000
--- a/chromium/media/gpu/android/shared_image_video.cc
+++ /dev/null
@@ -1,524 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/shared_image_video.h"
-
-#include <utility>
-
-#include "base/android/scoped_hardware_buffer_fence_sync.h"
-#include "base/android/scoped_hardware_buffer_handle.h"
-#include "components/viz/common/gpu/vulkan_context_provider.h"
-#include "components/viz/common/resources/resource_format_utils.h"
-#include "components/viz/common/resources/resource_sizes.h"
-#include "gpu/command_buffer/common/shared_image_usage.h"
-#include "gpu/command_buffer/service/abstract_texture.h"
-#include "gpu/command_buffer/service/mailbox_manager.h"
-#include "gpu/command_buffer/service/memory_tracking.h"
-#include "gpu/command_buffer/service/shared_context_state.h"
-#include "gpu/command_buffer/service/shared_image_representation.h"
-#include "gpu/command_buffer/service/shared_image_representation_skia_gl.h"
-#include "gpu/command_buffer/service/skia_utils.h"
-#include "gpu/command_buffer/service/texture_manager.h"
-#include "gpu/vulkan/vulkan_device_queue.h"
-#include "gpu/vulkan/vulkan_fence_helper.h"
-#include "gpu/vulkan/vulkan_function_pointers.h"
-#include "gpu/vulkan/vulkan_implementation.h"
-#include "gpu/vulkan/vulkan_util.h"
-#include "media/gpu/android/codec_image.h"
-#include "third_party/skia/include/core/SkPromiseImageTexture.h"
-#include "third_party/skia/include/gpu/GrBackendSemaphore.h"
-#include "third_party/skia/include/gpu/GrBackendSurface.h"
-
-namespace media {
-
-namespace {
-sk_sp<SkPromiseImageTexture> CreatePromiseTexture(
- viz::VulkanContextProvider* context_provider,
- base::android::ScopedHardwareBufferHandle ahb_handle,
- gfx::Size size,
- viz::ResourceFormat format) {
- gpu::VulkanImplementation* vk_implementation =
- context_provider->GetVulkanImplementation();
- VkDevice vk_device = context_provider->GetDeviceQueue()->GetVulkanDevice();
- VkPhysicalDevice vk_physical_device =
- context_provider->GetDeviceQueue()->GetVulkanPhysicalDevice();
-
- // Create a VkImage and import AHB.
- VkImage vk_image;
- VkImageCreateInfo vk_image_info;
- VkDeviceMemory vk_device_memory;
- VkDeviceSize mem_allocation_size;
- gpu::VulkanYCbCrInfo ycbcr_info;
- if (!vk_implementation->CreateVkImageAndImportAHB(
- vk_device, vk_physical_device, size, std::move(ahb_handle), &vk_image,
- &vk_image_info, &vk_device_memory, &mem_allocation_size,
- &ycbcr_info)) {
- return nullptr;
- }
-
- GrVkYcbcrConversionInfo fYcbcrConversionInfo(
- static_cast<VkSamplerYcbcrModelConversion>(
- ycbcr_info.suggested_ycbcr_model),
- static_cast<VkSamplerYcbcrRange>(ycbcr_info.suggested_ycbcr_range),
- static_cast<VkChromaLocation>(ycbcr_info.suggested_xchroma_offset),
- static_cast<VkChromaLocation>(ycbcr_info.suggested_ychroma_offset),
- VK_FILTER_LINEAR, // VkFilter
- 0, // VkBool32 forceExplicitReconstruction
- ycbcr_info.external_format,
- static_cast<VkFormatFeatureFlags>(ycbcr_info.format_features));
-
- // Create backend texture from the VkImage.
- GrVkAlloc alloc = {vk_device_memory, 0, mem_allocation_size, 0};
- GrVkImageInfo vk_info = {vk_image,
- alloc,
- vk_image_info.tiling,
- vk_image_info.initialLayout,
- vk_image_info.format,
- vk_image_info.mipLevels,
- VK_QUEUE_FAMILY_EXTERNAL,
- GrProtected::kNo,
- fYcbcrConversionInfo};
-
- // TODO(bsalomon): Determine whether it makes sense to attempt to reuse this
- // if the vk_info stays the same on subsequent calls.
- auto promise_texture = SkPromiseImageTexture::Make(
- GrBackendTexture(size.width(), size.height(), vk_info));
- if (!promise_texture) {
- vkDestroyImage(vk_device, vk_image, nullptr);
- vkFreeMemory(vk_device, vk_device_memory, nullptr);
- return nullptr;
- }
-
- return promise_texture;
-}
-
-void DestroyVkPromiseTexture(viz::VulkanContextProvider* context_provider,
- sk_sp<SkPromiseImageTexture> promise_texture) {
- DCHECK(promise_texture);
- DCHECK(promise_texture->unique());
-
- GrVkImageInfo vk_image_info;
- bool result =
- promise_texture->backendTexture().getVkImageInfo(&vk_image_info);
- DCHECK(result);
-
- gpu::VulkanFenceHelper* fence_helper =
- context_provider->GetDeviceQueue()->GetFenceHelper();
- fence_helper->EnqueueImageCleanupForSubmittedWork(
- vk_image_info.fImage, vk_image_info.fAlloc.fMemory);
-}
-
-} // namespace
-
-SharedImageVideo::SharedImageVideo(
- const gpu::Mailbox& mailbox,
- const gfx::ColorSpace color_space,
- scoped_refptr<CodecImage> codec_image,
- std::unique_ptr<gpu::gles2::AbstractTexture> abstract_texture,
- scoped_refptr<gpu::SharedContextState> context_state,
- bool is_thread_safe)
- : SharedImageBacking(
- mailbox,
- viz::RGBA_8888,
- codec_image->GetSize(),
- color_space,
- (gpu::SHARED_IMAGE_USAGE_DISPLAY | gpu::SHARED_IMAGE_USAGE_GLES2),
- viz::ResourceSizes::UncheckedSizeInBytes<size_t>(
- codec_image->GetSize(),
- viz::RGBA_8888),
- is_thread_safe),
- codec_image_(std::move(codec_image)),
- abstract_texture_(std::move(abstract_texture)),
- context_state_(std::move(context_state)) {
- DCHECK(codec_image_);
- DCHECK(context_state_);
-
- // Currently this backing is not thread safe.
- DCHECK(!is_thread_safe);
- context_state_->AddContextLostObserver(this);
-}
-
-SharedImageVideo::~SharedImageVideo() {
- codec_image_->ReleaseCodecBuffer();
- if (context_state_)
- context_state_->RemoveContextLostObserver(this);
-}
-
-bool SharedImageVideo::IsCleared() const {
- return true;
-}
-
-void SharedImageVideo::SetCleared() {}
-
-void SharedImageVideo::Update(std::unique_ptr<gfx::GpuFence> in_fence) {
- DCHECK(!in_fence);
-}
-
-bool SharedImageVideo::ProduceLegacyMailbox(
- gpu::MailboxManager* mailbox_manager) {
- DCHECK(abstract_texture_);
- mailbox_manager->ProduceTexture(mailbox(),
- abstract_texture_->GetTextureBase());
- return true;
-}
-
-void SharedImageVideo::Destroy() {}
-
-size_t SharedImageVideo::EstimatedSizeForMemTracking() const {
- // This backing contributes to gpu memory only if its bound to the texture and
- // not when the backing is created.
- return codec_image_->was_tex_image_bound() ? estimated_size() : 0;
-}
-
-void SharedImageVideo::OnContextLost() {
- // We release codec buffers when shared image context is lost. This is because
- // texture owner's texture was created on shared context. Once shared context
- // is lost, no one should try to use that texture.
- codec_image_->ReleaseCodecBuffer();
- context_state_->RemoveContextLostObserver(this);
- context_state_ = nullptr;
-}
-
-base::Optional<gpu::VulkanYCbCrInfo> SharedImageVideo::GetYcbcrInfo() {
- // For non-vulkan context, return null.
- if (!context_state_->GrContextIsVulkan())
- return base::nullopt;
-
- // Render the codec image.
- codec_image_->RenderToFrontBuffer();
-
- // Get the AHB from the latest image.
- auto scoped_hardware_buffer =
- codec_image_->texture_owner()->GetAHardwareBuffer();
- if (!scoped_hardware_buffer) {
- return base::nullopt;
- }
-
- DCHECK(scoped_hardware_buffer->buffer());
- auto* context_provider = context_state_->vk_context_provider();
- gpu::VulkanImplementation* vk_implementation =
- context_provider->GetVulkanImplementation();
- VkDevice vk_device = context_provider->GetDeviceQueue()->GetVulkanDevice();
-
- gpu::VulkanYCbCrInfo ycbcr_info;
- if (!vk_implementation->GetSamplerYcbcrConversionInfo(
- vk_device, scoped_hardware_buffer->TakeBuffer(), &ycbcr_info)) {
- LOG(ERROR) << "Failed to get the ycbcr info.";
- return base::nullopt;
- }
- return base::Optional<gpu::VulkanYCbCrInfo>(ycbcr_info);
-}
-
-// Representation of SharedImageVideo as a GL Texture.
-class SharedImageRepresentationGLTextureVideo
- : public gpu::SharedImageRepresentationGLTexture {
- public:
- SharedImageRepresentationGLTextureVideo(gpu::SharedImageManager* manager,
- SharedImageVideo* backing,
- gpu::MemoryTypeTracker* tracker,
- gpu::gles2::Texture* texture)
- : gpu::SharedImageRepresentationGLTexture(manager, backing, tracker),
- texture_(texture) {}
-
- gpu::gles2::Texture* GetTexture() override { return texture_; }
-
- bool BeginAccess(GLenum mode) override {
- // This representation should only be called for read.
- DCHECK_EQ(mode,
- static_cast<GLenum>(GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM));
-
- auto* video_backing = static_cast<SharedImageVideo*>(backing());
- video_backing->BeginGLReadAccess();
- return true;
- }
-
- void EndAccess() override {}
-
- private:
- gpu::gles2::Texture* texture_;
-
- DISALLOW_COPY_AND_ASSIGN(SharedImageRepresentationGLTextureVideo);
-};
-
-// Representation of SharedImageVideo as a GL Texture.
-class SharedImageRepresentationGLTexturePassthroughVideo
- : public gpu::SharedImageRepresentationGLTexturePassthrough {
- public:
- SharedImageRepresentationGLTexturePassthroughVideo(
- gpu::SharedImageManager* manager,
- SharedImageVideo* backing,
- gpu::MemoryTypeTracker* tracker,
- scoped_refptr<gpu::gles2::TexturePassthrough> texture)
- : gpu::SharedImageRepresentationGLTexturePassthrough(manager,
- backing,
- tracker),
- texture_(std::move(texture)) {}
-
- const scoped_refptr<gpu::gles2::TexturePassthrough>& GetTexturePassthrough()
- override {
- return texture_;
- }
-
- bool BeginAccess(GLenum mode) override {
- // This representation should only be called for read.
- DCHECK_EQ(mode,
- static_cast<GLenum>(GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM));
-
- auto* video_backing = static_cast<SharedImageVideo*>(backing());
- video_backing->BeginGLReadAccess();
- return true;
- }
-
- void EndAccess() override {}
-
- private:
- scoped_refptr<gpu::gles2::TexturePassthrough> texture_;
-
- DISALLOW_COPY_AND_ASSIGN(SharedImageRepresentationGLTexturePassthroughVideo);
-};
-
-// Vulkan backed Skia representation of SharedImageVideo.
-class SharedImageRepresentationVideoSkiaVk
- : public gpu::SharedImageRepresentationSkia {
- public:
- SharedImageRepresentationVideoSkiaVk(
- gpu::SharedImageManager* manager,
- gpu::SharedImageBacking* backing,
- scoped_refptr<gpu::SharedContextState> context_state,
- gpu::MemoryTypeTracker* tracker)
- : gpu::SharedImageRepresentationSkia(manager, backing, tracker),
- context_state_(std::move(context_state)) {
- DCHECK(context_state_);
- DCHECK(context_state_->vk_context_provider());
- }
-
- ~SharedImageRepresentationVideoSkiaVk() override {
- DCHECK(end_access_semaphore_ == VK_NULL_HANDLE);
-
- // |promise_texture_| could be null if we never being read.
- if (!promise_texture_)
- return;
- DestroyVkPromiseTexture(context_state_->vk_context_provider(),
- std::move(promise_texture_));
- }
-
- sk_sp<SkSurface> BeginWriteAccess(
- int final_msaa_count,
- const SkSurfaceProps& surface_props,
- std::vector<GrBackendSemaphore>* begin_semaphores,
- std::vector<GrBackendSemaphore>* end_semaphores) override {
- // Writes are not intended to used for video backed representations.
- NOTIMPLEMENTED();
- return nullptr;
- }
-
- void EndWriteAccess(sk_sp<SkSurface> surface) override { NOTIMPLEMENTED(); }
-
- sk_sp<SkPromiseImageTexture> BeginReadAccess(
- std::vector<GrBackendSemaphore>* begin_semaphores,
- std::vector<GrBackendSemaphore>* end_semaphores) override {
- if (!scoped_hardware_buffer_) {
- auto* video_backing = static_cast<SharedImageVideo*>(backing());
- DCHECK(video_backing);
- auto* codec_image = video_backing->codec_image_.get();
- auto* texture_owner = codec_image->texture_owner().get();
-
- // Render the codec image and get AHB from latest image.
- codec_image->RenderToFrontBuffer();
- scoped_hardware_buffer_ = texture_owner->GetAHardwareBuffer();
- if (!scoped_hardware_buffer_) {
- LOG(ERROR) << "Failed to get the hardware buffer.";
- return nullptr;
- }
- }
- DCHECK(scoped_hardware_buffer_->buffer());
-
- // Wait on the sync fd attached to the buffer to make sure buffer is
- // ready before the read. This is done by inserting the sync fd semaphore
- // into begin_semaphore vector which client will wait on.
- base::ScopedFD sync_fd = scoped_hardware_buffer_->TakeFence();
- if (!BeginRead(begin_semaphores, end_semaphores, std::move(sync_fd))) {
- return nullptr;
- }
-
- if (!promise_texture_) {
- // Create the promise texture.
- promise_texture_ = CreatePromiseTexture(
- context_state_->vk_context_provider(),
- scoped_hardware_buffer_->TakeBuffer(), size(), format());
- }
- return promise_texture_;
- }
-
- void EndReadAccess() override {
- DCHECK(end_access_semaphore_ != VK_NULL_HANDLE);
-
- gpu::SemaphoreHandle semaphore_handle =
- vk_implementation()->GetSemaphoreHandle(vk_device(),
- end_access_semaphore_);
- auto sync_fd = semaphore_handle.TakeHandle();
- DCHECK(sync_fd.is_valid());
-
- // Pass the end access sync fd to the scoped hardware buffer. This will make
- // sure that the AImage associated with the hardware buffer will be deleted
- // only when the read access is ending.
- scoped_hardware_buffer_->SetReadFence(std::move(sync_fd), true);
- fence_helper()->EnqueueSemaphoreCleanupForSubmittedWork(
- end_access_semaphore_);
- end_access_semaphore_ = VK_NULL_HANDLE;
- }
-
- private:
- bool BeginRead(std::vector<GrBackendSemaphore>* begin_semaphores,
- std::vector<GrBackendSemaphore>* end_semaphores,
- base::ScopedFD sync_fd) {
- DCHECK(begin_semaphores);
- DCHECK(end_semaphores);
- DCHECK(end_access_semaphore_ == VK_NULL_HANDLE);
-
- VkSemaphore begin_access_semaphore = VK_NULL_HANDLE;
- if (sync_fd.is_valid()) {
- begin_access_semaphore = vk_implementation()->ImportSemaphoreHandle(
- vk_device(),
- gpu::SemaphoreHandle(VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
- std::move(sync_fd)));
- if (begin_access_semaphore == VK_NULL_HANDLE) {
- DLOG(ERROR) << "Failed to import semaphore from sync_fd.";
- return false;
- }
- }
-
- end_access_semaphore_ =
- vk_implementation()->CreateExternalSemaphore(vk_device());
-
- if (end_access_semaphore_ == VK_NULL_HANDLE) {
- DLOG(ERROR) << "Failed to create the external semaphore.";
- if (begin_access_semaphore != VK_NULL_HANDLE) {
- vkDestroySemaphore(vk_device(), begin_access_semaphore,
- nullptr /* pAllocator */);
- }
- return false;
- }
- end_semaphores->emplace_back();
- end_semaphores->back().initVulkan(end_access_semaphore_);
-
- if (begin_access_semaphore != VK_NULL_HANDLE) {
- begin_semaphores->emplace_back();
- begin_semaphores->back().initVulkan(begin_access_semaphore);
- }
- return true;
- }
-
- VkDevice vk_device() {
- return context_state_->vk_context_provider()
- ->GetDeviceQueue()
- ->GetVulkanDevice();
- }
-
- gpu::VulkanImplementation* vk_implementation() {
- return context_state_->vk_context_provider()->GetVulkanImplementation();
- }
-
- gpu::VulkanFenceHelper* fence_helper() {
- return context_state_->vk_context_provider()
- ->GetDeviceQueue()
- ->GetFenceHelper();
- }
-
- sk_sp<SkPromiseImageTexture> promise_texture_;
- scoped_refptr<gpu::SharedContextState> context_state_;
- std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
- scoped_hardware_buffer_;
- VkSemaphore end_access_semaphore_ = VK_NULL_HANDLE;
-};
-
-// TODO(vikassoni): Currently GLRenderer doesn't support overlays with shared
-// image. Add support for overlays in GLRenderer as well as overlay
-// representations of shared image.
-std::unique_ptr<gpu::SharedImageRepresentationGLTexture>
-SharedImageVideo::ProduceGLTexture(gpu::SharedImageManager* manager,
- gpu::MemoryTypeTracker* tracker) {
- // For (old) overlays, we don't have a texture owner, but overlay promotion
- // might not happen for some reasons. In that case, it will try to draw
- // which should result in no image.
- if (!codec_image_->texture_owner())
- return nullptr;
- // TODO(vikassoni): We would want to give the TextureOwner's underlying
- // Texture, but it was not set with the correct size. The AbstractTexture,
- // that we use for legacy mailbox, is correctly set.
- auto* texture =
- gpu::gles2::Texture::CheckedCast(abstract_texture_->GetTextureBase());
- DCHECK(texture);
-
- return std::make_unique<SharedImageRepresentationGLTextureVideo>(
- manager, this, tracker, texture);
-}
-
-// TODO(vikassoni): Currently GLRenderer doesn't support overlays with shared
-// image. Add support for overlays in GLRenderer as well as overlay
-// representations of shared image.
-std::unique_ptr<gpu::SharedImageRepresentationGLTexturePassthrough>
-SharedImageVideo::ProduceGLTexturePassthrough(gpu::SharedImageManager* manager,
- gpu::MemoryTypeTracker* tracker) {
- // For (old) overlays, we don't have a texture owner, but overlay promotion
- // might not happen for some reasons. In that case, it will try to draw
- // which should result in no image.
- if (!codec_image_->texture_owner())
- return nullptr;
- // TODO(vikassoni): We would want to give the TextureOwner's underlying
- // Texture, but it was not set with the correct size. The AbstractTexture,
- // that we use for legacy mailbox, is correctly set.
- scoped_refptr<gpu::gles2::TexturePassthrough> texture =
- gpu::gles2::TexturePassthrough::CheckedCast(
- abstract_texture_->GetTextureBase());
- DCHECK(texture);
-
- return std::make_unique<SharedImageRepresentationGLTexturePassthroughVideo>(
- manager, this, tracker, std::move(texture));
-}
-
-// Currently SkiaRenderer doesn't support overlays.
-std::unique_ptr<gpu::SharedImageRepresentationSkia>
-SharedImageVideo::ProduceSkia(
- gpu::SharedImageManager* manager,
- gpu::MemoryTypeTracker* tracker,
- scoped_refptr<gpu::SharedContextState> context_state) {
- DCHECK(context_state);
-
- // For (old) overlays, we don't have a texture owner, but overlay promotion
- // might not happen for some reasons. In that case, it will try to draw
- // which should result in no image.
- if (!codec_image_->texture_owner())
- return nullptr;
-
- if (context_state->GrContextIsVulkan()) {
- return std::make_unique<SharedImageRepresentationVideoSkiaVk>(
- manager, this, std::move(context_state), tracker);
- }
-
- DCHECK(context_state->GrContextIsGL());
- auto* texture = gpu::gles2::Texture::CheckedCast(
- codec_image_->texture_owner()->GetTextureBase());
- DCHECK(texture);
-
- // In GL mode, create the SharedImageRepresentationGLTextureVideo
- // representation to use with SharedImageRepresentationVideoSkiaGL.
- auto gl_representation =
- std::make_unique<SharedImageRepresentationGLTextureVideo>(
- manager, this, tracker, texture);
- return gpu::SharedImageRepresentationSkiaGL::Create(
- std::move(gl_representation), nullptr, manager, this, tracker);
-}
-
-void SharedImageVideo::BeginGLReadAccess() {
- // Render the codec image.
- codec_image_->RenderToFrontBuffer();
-
- // Bind the tex image if it's not already bound.
- auto* texture_owner = codec_image_->texture_owner().get();
- if (!texture_owner->binds_texture_on_update())
- texture_owner->EnsureTexImageBound();
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/shared_image_video.h b/chromium/media/gpu/android/shared_image_video.h
deleted file mode 100644
index cffd661cbea..00000000000
--- a/chromium/media/gpu/android/shared_image_video.h
+++ /dev/null
@@ -1,98 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_SHARED_IMAGE_VIDEO_H_
-#define MEDIA_GPU_ANDROID_SHARED_IMAGE_VIDEO_H_
-
-#include <memory>
-
-#include "base/memory/scoped_refptr.h"
-#include "base/optional.h"
-#include "gpu/command_buffer/service/shared_context_state.h"
-#include "gpu/command_buffer/service/shared_image_backing.h"
-#include "gpu/ipc/common/vulkan_ycbcr_info.h"
-#include "media/gpu/media_gpu_export.h"
-
-namespace gpu {
-class SharedImageRepresentationGLTexture;
-class SharedImageRepresentationSkia;
-struct Mailbox;
-
-namespace gles2 {
-class AbstractTexture;
-} // namespace gles2
-
-} // namespace gpu
-
-namespace media {
-class CodecImage;
-
-// Implementation of SharedImageBacking that renders MediaCodec buffers to a
-// TextureOwner or overlay as needed in order to draw them.
-class MEDIA_GPU_EXPORT SharedImageVideo
- : public gpu::SharedImageBacking,
- public gpu::SharedContextState::ContextLostObserver {
- public:
- SharedImageVideo(
- const gpu::Mailbox& mailbox,
- const gfx::ColorSpace color_space,
- scoped_refptr<CodecImage> codec_image,
- std::unique_ptr<gpu::gles2::AbstractTexture> abstract_texture,
- scoped_refptr<gpu::SharedContextState> shared_context_state,
- bool is_thread_safe);
-
- ~SharedImageVideo() override;
-
- // SharedImageBacking implementation.
- bool IsCleared() const override;
- void SetCleared() override;
- void Update(std::unique_ptr<gfx::GpuFence> in_fence) override;
- bool ProduceLegacyMailbox(gpu::MailboxManager* mailbox_manager) override;
- void Destroy() override;
- size_t EstimatedSizeForMemTracking() const override;
-
- // SharedContextState::ContextLostObserver implementation.
- void OnContextLost() override;
-
- // Returns ycbcr information. This is only valid in vulkan context and
- // nullopt for other context.
- base::Optional<gpu::VulkanYCbCrInfo> GetYcbcrInfo();
-
- protected:
- std::unique_ptr<gpu::SharedImageRepresentationGLTexture> ProduceGLTexture(
- gpu::SharedImageManager* manager,
- gpu::MemoryTypeTracker* tracker) override;
-
- std::unique_ptr<gpu::SharedImageRepresentationGLTexturePassthrough>
- ProduceGLTexturePassthrough(gpu::SharedImageManager* manager,
- gpu::MemoryTypeTracker* tracker) override;
-
- std::unique_ptr<gpu::SharedImageRepresentationSkia> ProduceSkia(
- gpu::SharedImageManager* manager,
- gpu::MemoryTypeTracker* tracker,
- scoped_refptr<gpu::SharedContextState> context_state) override;
-
- // TODO(vikassoni): Add overlay and AHardwareBuffer representations in future
- // patch. Overlays are anyways using legacy mailbox for now.
-
- private:
- friend class SharedImageRepresentationGLTextureVideo;
- friend class SharedImageRepresentationGLTexturePassthroughVideo;
- friend class SharedImageRepresentationVideoSkiaGL;
- friend class SharedImageRepresentationVideoSkiaVk;
-
- void BeginGLReadAccess();
-
- scoped_refptr<CodecImage> codec_image_;
-
- // |abstract_texture_| is only used for legacy mailbox.
- std::unique_ptr<gpu::gles2::AbstractTexture> abstract_texture_;
- scoped_refptr<gpu::SharedContextState> context_state_;
-
- DISALLOW_COPY_AND_ASSIGN(SharedImageVideo);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_SHARED_IMAGE_VIDEO_H_
diff --git a/chromium/media/gpu/android/shared_image_video_provider.h b/chromium/media/gpu/android/shared_image_video_provider.h
index 1b9cedb0308..269fe0de2cb 100644
--- a/chromium/media/gpu/android/shared_image_video_provider.h
+++ b/chromium/media/gpu/android/shared_image_video_provider.h
@@ -82,7 +82,7 @@ class MEDIA_GPU_EXPORT SharedImageVideoProvider {
// |cb| back before returning, or we might post it for later.
virtual void RequestImage(ImageReadyCB cb,
const ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner) = 0;
+ scoped_refptr<gpu::TextureOwner> texture_owner) = 0;
private:
DISALLOW_COPY_AND_ASSIGN(SharedImageVideoProvider);
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner.cc b/chromium/media/gpu/android/surface_texture_gl_owner.cc
deleted file mode 100644
index f19f2ad4a42..00000000000
--- a/chromium/media/gpu/android/surface_texture_gl_owner.cc
+++ /dev/null
@@ -1,164 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/surface_texture_gl_owner.h"
-
-#include <memory>
-
-#include "base/android/scoped_hardware_buffer_fence_sync.h"
-#include "base/bind.h"
-#include "base/logging.h"
-#include "base/memory/ptr_util.h"
-#include "base/metrics/histogram_macros.h"
-#include "base/synchronization/waitable_event.h"
-#include "base/threading/thread_task_runner_handle.h"
-#include "gpu/command_buffer/service/abstract_texture.h"
-#include "ui/gl/scoped_binders.h"
-#include "ui/gl/scoped_make_current.h"
-
-namespace media {
-
-// FrameAvailableEvent is a RefCounted wrapper for a WaitableEvent
-// (it's not possible to put one in RefCountedData).
-// This lets us safely signal an event on any thread.
-struct FrameAvailableEvent
- : public base::RefCountedThreadSafe<FrameAvailableEvent> {
- FrameAvailableEvent()
- : event(base::WaitableEvent::ResetPolicy::AUTOMATIC,
- base::WaitableEvent::InitialState::NOT_SIGNALED) {}
- void Signal() { event.Signal(); }
- base::WaitableEvent event;
-
- private:
- friend class RefCountedThreadSafe<FrameAvailableEvent>;
- ~FrameAvailableEvent() = default;
-};
-
-SurfaceTextureGLOwner::SurfaceTextureGLOwner(
- std::unique_ptr<gpu::gles2::AbstractTexture> texture)
- : TextureOwner(true /*binds_texture_on_update */, std::move(texture)),
- surface_texture_(gl::SurfaceTexture::Create(GetTextureId())),
- context_(gl::GLContext::GetCurrent()),
- surface_(gl::GLSurface::GetCurrent()),
- frame_available_event_(new FrameAvailableEvent()) {
- DCHECK(context_);
- DCHECK(surface_);
- surface_texture_->SetFrameAvailableCallbackOnAnyThread(base::BindRepeating(
- &FrameAvailableEvent::Signal, frame_available_event_));
-}
-
-SurfaceTextureGLOwner::~SurfaceTextureGLOwner() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- // Clear the texture before we return, so that it can OnTextureDestroyed() if
- // it hasn't already.
- ClearAbstractTexture();
-}
-
-void SurfaceTextureGLOwner::OnTextureDestroyed(gpu::gles2::AbstractTexture*) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- // Make sure that the SurfaceTexture isn't using the GL objects.
- surface_texture_ = nullptr;
-}
-
-gl::ScopedJavaSurface SurfaceTextureGLOwner::CreateJavaSurface() const {
- // |surface_texture_| might be null, but that's okay.
- return gl::ScopedJavaSurface(surface_texture_.get());
-}
-
-void SurfaceTextureGLOwner::UpdateTexImage() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (surface_texture_)
- surface_texture_->UpdateTexImage();
-}
-
-void SurfaceTextureGLOwner::EnsureTexImageBound() {
- NOTREACHED();
-}
-
-void SurfaceTextureGLOwner::GetTransformMatrix(float mtx[]) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- // If we don't have a SurfaceTexture, then the matrix doesn't matter. We
- // still initialize it for good measure.
- if (surface_texture_)
- surface_texture_->GetTransformMatrix(mtx);
- else
- memset(mtx, 0, sizeof(mtx[0]) * 16);
-}
-
-void SurfaceTextureGLOwner::ReleaseBackBuffers() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (surface_texture_)
- surface_texture_->ReleaseBackBuffers();
-}
-
-gl::GLContext* SurfaceTextureGLOwner::GetContext() const {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- return context_.get();
-}
-
-gl::GLSurface* SurfaceTextureGLOwner::GetSurface() const {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- return surface_.get();
-}
-
-void SurfaceTextureGLOwner::SetReleaseTimeToNow() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- release_time_ = base::TimeTicks::Now();
-}
-
-void SurfaceTextureGLOwner::IgnorePendingRelease() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- release_time_ = base::TimeTicks();
-}
-
-bool SurfaceTextureGLOwner::IsExpectingFrameAvailable() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- return !release_time_.is_null();
-}
-
-void SurfaceTextureGLOwner::WaitForFrameAvailable() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- DCHECK(!release_time_.is_null());
-
- // 5msec covers >99.9% of cases, so just wait for up to that much before
- // giving up. If an error occurs, we might not ever get a notification.
- const base::TimeDelta max_wait = base::TimeDelta::FromMilliseconds(5);
- const base::TimeTicks call_time = base::TimeTicks::Now();
- const base::TimeDelta elapsed = call_time - release_time_;
- const base::TimeDelta remaining = max_wait - elapsed;
- release_time_ = base::TimeTicks();
- bool timed_out = false;
-
- if (remaining <= base::TimeDelta()) {
- if (!frame_available_event_->event.IsSignaled()) {
- DVLOG(1) << "Deferred WaitForFrameAvailable() timed out, elapsed: "
- << elapsed.InMillisecondsF() << "ms";
- timed_out = true;
- }
- } else {
- DCHECK_LE(remaining, max_wait);
- SCOPED_UMA_HISTOGRAM_TIMER(
- "Media.CodecImage.SurfaceTextureGLOwner.WaitTimeForFrame");
- if (!frame_available_event_->event.TimedWait(remaining)) {
- DVLOG(1) << "WaitForFrameAvailable() timed out, elapsed: "
- << elapsed.InMillisecondsF()
- << "ms, additionally waited: " << remaining.InMillisecondsF()
- << "ms, total: " << (elapsed + remaining).InMillisecondsF()
- << "ms";
- timed_out = true;
- }
- }
- UMA_HISTOGRAM_BOOLEAN("Media.CodecImage.SurfaceTextureGLOwner.FrameTimedOut",
- timed_out);
-}
-
-std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
-SurfaceTextureGLOwner::GetAHardwareBuffer() {
- NOTREACHED() << "Don't use AHardwareBuffers with SurfaceTextureGLOwner";
- return nullptr;
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner.h b/chromium/media/gpu/android/surface_texture_gl_owner.h
deleted file mode 100644
index f2634aaac4f..00000000000
--- a/chromium/media/gpu/android/surface_texture_gl_owner.h
+++ /dev/null
@@ -1,72 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_SURFACE_TEXTURE_GL_OWNER_H_
-#define MEDIA_GPU_ANDROID_SURFACE_TEXTURE_GL_OWNER_H_
-
-#include "media/gpu/android/texture_owner.h"
-
-#include "base/memory/ref_counted.h"
-#include "base/threading/thread_checker.h"
-#include "media/gpu/media_gpu_export.h"
-#include "ui/gl/android/surface_texture.h"
-
-namespace base {
-namespace android {
-class ScopedHardwareBufferFenceSync;
-} // namespace android
-} // namespace base
-
-namespace media {
-
-struct FrameAvailableEvent;
-
-// This class wraps the Surface Texture usage. It is used to create a surface
-// texture attached to a new texture of the current platform GL context. The
-// surface handle of the SurfaceTexture is attached to the decoded media
-// frames. Media frames can update the attached surface handle with image data.
-// This class helps to update the attached texture using that image data
-// present in the surface.
-class MEDIA_GPU_EXPORT SurfaceTextureGLOwner : public TextureOwner {
- public:
- gl::GLContext* GetContext() const override;
- gl::GLSurface* GetSurface() const override;
- gl::ScopedJavaSurface CreateJavaSurface() const override;
- void UpdateTexImage() override;
- void EnsureTexImageBound() override;
- void GetTransformMatrix(float mtx[16]) override;
- void ReleaseBackBuffers() override;
- void SetReleaseTimeToNow() override;
- void IgnorePendingRelease() override;
- bool IsExpectingFrameAvailable() override;
- void WaitForFrameAvailable() override;
- std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
- GetAHardwareBuffer() override;
-
- protected:
- void OnTextureDestroyed(gpu::gles2::AbstractTexture*) override;
-
- private:
- friend class TextureOwner;
-
- SurfaceTextureGLOwner(std::unique_ptr<gpu::gles2::AbstractTexture> texture);
- ~SurfaceTextureGLOwner() override;
-
- scoped_refptr<gl::SurfaceTexture> surface_texture_;
- // The context and surface that were used to create |surface_texture_|.
- scoped_refptr<gl::GLContext> context_;
- scoped_refptr<gl::GLSurface> surface_;
- // When SetReleaseTimeToNow() was last called. i.e., when the last
- // codec buffer was released to this surface. Or null if
- // IgnorePendingRelease() or WaitForFrameAvailable() have been called since.
- base::TimeTicks release_time_;
- scoped_refptr<FrameAvailableEvent> frame_available_event_;
-
- THREAD_CHECKER(thread_checker_);
- DISALLOW_COPY_AND_ASSIGN(SurfaceTextureGLOwner);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_SURFACE_TEXTURE_GL_OWNER_H_
diff --git a/chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc b/chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc
deleted file mode 100644
index 9eace9c4737..00000000000
--- a/chromium/media/gpu/android/surface_texture_gl_owner_unittest.cc
+++ /dev/null
@@ -1,128 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/surface_texture_gl_owner.h"
-
-#include <stdint.h>
-
-#include <memory>
-
-#include "base/bind.h"
-#include "base/logging.h"
-#include "base/test/scoped_task_environment.h"
-#include "media/gpu/android/mock_abstract_texture.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "ui/gl/gl_bindings.h"
-#include "ui/gl/gl_context_egl.h"
-#include "ui/gl/gl_implementation.h"
-#include "ui/gl/gl_share_group.h"
-#include "ui/gl/gl_surface_egl.h"
-#include "ui/gl/init/gl_factory.h"
-
-using testing::Invoke;
-using testing::NiceMock;
-using testing::_;
-
-namespace media {
-
-class SurfaceTextureGLOwnerTest : public testing::Test {
- public:
- SurfaceTextureGLOwnerTest() {}
- ~SurfaceTextureGLOwnerTest() override {}
-
- protected:
- void SetUp() override {
- gl::init::InitializeGLOneOffImplementation(gl::kGLImplementationEGLGLES2,
- false, false, false, true);
- surface_ = new gl::PbufferGLSurfaceEGL(gfx::Size(320, 240));
- surface_->Initialize();
-
- share_group_ = new gl::GLShareGroup();
- context_ = new gl::GLContextEGL(share_group_.get());
- context_->Initialize(surface_.get(), gl::GLContextAttribs());
- ASSERT_TRUE(context_->MakeCurrent(surface_.get()));
-
- // Create a texture.
- glGenTextures(1, &texture_id_);
-
- std::unique_ptr<MockAbstractTexture> texture =
- std::make_unique<MockAbstractTexture>(texture_id_);
- abstract_texture_ = texture->AsWeakPtr();
- surface_texture_ = SurfaceTextureGLOwner::Create(
- std::move(texture), TextureOwner::Mode::kSurfaceTextureInsecure);
- texture_id_ = surface_texture_->GetTextureId();
- EXPECT_TRUE(abstract_texture_);
- }
-
- void TearDown() override {
- if (texture_id_ && context_->MakeCurrent(surface_.get()))
- glDeleteTextures(1, &texture_id_);
- surface_texture_ = nullptr;
- context_ = nullptr;
- share_group_ = nullptr;
- surface_ = nullptr;
- gl::init::ShutdownGL(false);
- }
-
- scoped_refptr<TextureOwner> surface_texture_;
- GLuint texture_id_ = 0;
-
- base::WeakPtr<MockAbstractTexture> abstract_texture_;
-
- scoped_refptr<gl::GLContext> context_;
- scoped_refptr<gl::GLShareGroup> share_group_;
- scoped_refptr<gl::GLSurface> surface_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
-};
-
-TEST_F(SurfaceTextureGLOwnerTest, OwnerReturnsServiceId) {
- // The owner should give us back the same service id we provided.
- EXPECT_EQ(texture_id_, surface_texture_->GetTextureId());
-}
-
-// Verify that SurfaceTextureGLOwner creates a bindable GL texture, and deletes
-// it during destruction.
-TEST_F(SurfaceTextureGLOwnerTest, GLTextureIsCreatedAndDestroyed) {
- // |texture_id| should not work anymore after we delete |surface_texture|.
- surface_texture_ = nullptr;
- EXPECT_FALSE(abstract_texture_);
-}
-
-// Calling ReleaseBackBuffers shouldn't deallocate the texture handle.
-TEST_F(SurfaceTextureGLOwnerTest, ReleaseDoesntDestroyTexture) {
- surface_texture_->ReleaseBackBuffers();
- EXPECT_TRUE(abstract_texture_);
-}
-
-// Make sure that |surface_texture_| remembers the correct context and surface.
-TEST_F(SurfaceTextureGLOwnerTest, ContextAndSurfaceAreCaptured) {
- ASSERT_EQ(context_, surface_texture_->GetContext());
- ASSERT_EQ(surface_, surface_texture_->GetSurface());
-}
-
-// Verify that destruction works even if some other context is current.
-TEST_F(SurfaceTextureGLOwnerTest, DestructionWorksWithWrongContext) {
- scoped_refptr<gl::GLSurface> new_surface(
- new gl::PbufferGLSurfaceEGL(gfx::Size(320, 240)));
- new_surface->Initialize();
-
- scoped_refptr<gl::GLShareGroup> new_share_group(new gl::GLShareGroup());
- scoped_refptr<gl::GLContext> new_context(
- new gl::GLContextEGL(new_share_group.get()));
- new_context->Initialize(new_surface.get(), gl::GLContextAttribs());
- ASSERT_TRUE(new_context->MakeCurrent(new_surface.get()));
-
- surface_texture_ = nullptr;
- EXPECT_FALSE(abstract_texture_);
-
- // |new_context| should still be current.
- ASSERT_TRUE(new_context->IsCurrent(new_surface.get()));
-
- new_context = nullptr;
- new_share_group = nullptr;
- new_surface = nullptr;
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/texture_owner.cc b/chromium/media/gpu/android/texture_owner.cc
deleted file mode 100644
index da02e54e010..00000000000
--- a/chromium/media/gpu/android/texture_owner.cc
+++ /dev/null
@@ -1,92 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/android/texture_owner.h"
-
-#include <memory>
-
-#include "base/bind.h"
-#include "base/feature_list.h"
-#include "base/threading/thread_task_runner_handle.h"
-#include "gpu/command_buffer/service/abstract_texture.h"
-#include "gpu/command_buffer/service/abstract_texture_impl_shared_context_state.h"
-#include "gpu/command_buffer/service/decoder_context.h"
-#include "gpu/command_buffer/service/feature_info.h"
-#include "gpu/command_buffer/service/texture_base.h"
-#include "media/gpu/android/image_reader_gl_owner.h"
-#include "media/gpu/android/surface_texture_gl_owner.h"
-#include "ui/gl/scoped_binders.h"
-
-namespace media {
-
-TextureOwner::TextureOwner(bool binds_texture_on_update,
- std::unique_ptr<gpu::gles2::AbstractTexture> texture)
- : base::RefCountedDeleteOnSequence<TextureOwner>(
- base::ThreadTaskRunnerHandle::Get()),
- binds_texture_on_update_(binds_texture_on_update),
- texture_(std::move(texture)),
- task_runner_(base::ThreadTaskRunnerHandle::Get()) {
- // Notify the subclass when the texture is destroyed.
- // Unretained is safe, since we insist that |texture_| is dropped before we're
- // destroyed, which implies that the callback has run.
- texture_->SetCleanupCallback(base::BindOnce(&TextureOwner::OnTextureDestroyed,
- base::Unretained(this)));
-}
-
-TextureOwner::~TextureOwner() {
- // The subclass must delete the texture before now.
- DCHECK(!texture_);
-}
-
-// static
-scoped_refptr<TextureOwner> TextureOwner::Create(
- std::unique_ptr<gpu::gles2::AbstractTexture> texture,
- Mode mode) {
- switch (mode) {
- case Mode::kAImageReaderInsecure:
- case Mode::kAImageReaderInsecureSurfaceControl:
- case Mode::kAImageReaderSecureSurfaceControl:
- return new ImageReaderGLOwner(std::move(texture), mode);
- case Mode::kSurfaceTextureInsecure:
- return new SurfaceTextureGLOwner(std::move(texture));
- }
-
- NOTREACHED();
- return nullptr;
-}
-
-// static
-std::unique_ptr<gpu::gles2::AbstractTexture> TextureOwner::CreateTexture(
- scoped_refptr<gpu::SharedContextState> context_state) {
- DCHECK(context_state);
-
- gpu::gles2::FeatureInfo* feature_info = context_state->feature_info();
- if (feature_info && feature_info->is_passthrough_cmd_decoder()) {
- return std::make_unique<
- gpu::gles2::AbstractTextureImplOnSharedContextPassthrough>(
- GL_TEXTURE_EXTERNAL_OES, std::move(context_state));
- }
-
- return std::make_unique<gpu::gles2::AbstractTextureImplOnSharedContext>(
- GL_TEXTURE_EXTERNAL_OES, GL_RGBA,
- 0, // width
- 0, // height
- 1, // depth
- 0, // border
- GL_RGBA, GL_UNSIGNED_BYTE, std::move(context_state));
-}
-
-GLuint TextureOwner::GetTextureId() const {
- return texture_->service_id();
-}
-
-gpu::TextureBase* TextureOwner::GetTextureBase() const {
- return texture_->GetTextureBase();
-}
-
-void TextureOwner::ClearAbstractTexture() {
- texture_.reset();
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/android/texture_owner.h b/chromium/media/gpu/android/texture_owner.h
deleted file mode 100644
index af807f09a19..00000000000
--- a/chromium/media/gpu/android/texture_owner.h
+++ /dev/null
@@ -1,153 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_ANDROID_TEXTURE_OWNER_H_
-#define MEDIA_GPU_ANDROID_TEXTURE_OWNER_H_
-
-#include <android/hardware_buffer.h>
-
-#include "base/memory/ref_counted.h"
-#include "base/memory/ref_counted_delete_on_sequence.h"
-#include "base/single_thread_task_runner.h"
-#include "media/gpu/media_gpu_export.h"
-#include "ui/gl/android/scoped_java_surface.h"
-#include "ui/gl/gl_bindings.h"
-#include "ui/gl/gl_context.h"
-#include "ui/gl/gl_image.h"
-#include "ui/gl/gl_surface.h"
-
-namespace base {
-namespace android {
-class ScopedHardwareBufferFenceSync;
-} // namespace android
-} // namespace base
-
-namespace gpu {
-class SharedContextState;
-class TextureBase;
-namespace gles2 {
-class AbstractTexture;
-} // namespace gles2
-} // namespace gpu
-
-namespace media {
-
-// A Texture wrapper interface that creates and maintains ownership of the
-// attached GL or Vulkan texture. The texture is destroyed with the object.
-// It should only be accessed on the thread it was created on, with the
-// exception of CreateJavaSurface(), which can be called on any thread. It's
-// safe to keep and drop refptrs to it on any thread; it will be automatically
-// destructed on the thread it was constructed on.
-class MEDIA_GPU_EXPORT TextureOwner
- : public base::RefCountedDeleteOnSequence<TextureOwner> {
- public:
- // Creates a GL texture using the current platform GL context and returns a
- // new TextureOwner attached to it. Returns null on failure.
- // |texture| should be either from CreateAbstractTexture() or a mock. The
- // corresponding GL context must be current.
- // Mode indicates which framework API to use and whether the video textures
- // created using this owner should be hardware protected. It also indicates
- // whether SurfaceControl is being used or not.
- enum class Mode {
- kAImageReaderInsecure,
- kAImageReaderInsecureSurfaceControl,
- kAImageReaderSecureSurfaceControl,
- kSurfaceTextureInsecure
- };
- static scoped_refptr<TextureOwner> Create(
- std::unique_ptr<gpu::gles2::AbstractTexture> texture,
- Mode mode);
-
- // Create a texture that's appropriate for a TextureOwner.
- static std::unique_ptr<gpu::gles2::AbstractTexture> CreateTexture(
- scoped_refptr<gpu::SharedContextState> context_state);
-
- scoped_refptr<base::SingleThreadTaskRunner> task_runner() {
- return task_runner_;
- }
-
- // Returns the GL texture id that the TextureOwner is attached to.
- GLuint GetTextureId() const;
- gpu::TextureBase* GetTextureBase() const;
- virtual gl::GLContext* GetContext() const = 0;
- virtual gl::GLSurface* GetSurface() const = 0;
-
- // Create a java surface for the TextureOwner.
- virtual gl::ScopedJavaSurface CreateJavaSurface() const = 0;
-
- // Update the texture image using the latest available image data.
- virtual void UpdateTexImage() = 0;
-
- // Ensures that the latest texture image is bound to the texture target.
- // Should only be used if the TextureOwner requires explicit binding of the
- // image after an update.
- virtual void EnsureTexImageBound() = 0;
-
- // Transformation matrix if any associated with the texture image.
- virtual void GetTransformMatrix(float mtx[16]) = 0;
- virtual void ReleaseBackBuffers() = 0;
-
- // Sets the expectation of onFrameAVailable for a new frame because a buffer
- // was just released to this surface.
- virtual void SetReleaseTimeToNow() = 0;
-
- // Ignores a pending release that was previously indicated with
- // SetReleaseTimeToNow(). TODO(watk): This doesn't seem necessary. It
- // actually may be detrimental because the next time we release a buffer we
- // may confuse its onFrameAvailable with the one we're ignoring.
- virtual void IgnorePendingRelease() = 0;
-
- // Whether we're expecting onFrameAvailable. True when SetReleaseTimeToNow()
- // was called but neither IgnorePendingRelease() nor WaitForFrameAvailable()
- // have been called since.
- virtual bool IsExpectingFrameAvailable() = 0;
-
- // Waits for onFrameAvailable until it's been 5ms since the buffer was
- // released. This must only be called if IsExpectingFrameAvailable().
- virtual void WaitForFrameAvailable() = 0;
-
- // Retrieves the AHardwareBuffer from the latest available image data.
- // Note that the object must be used and destroyed on the same thread the
- // TextureOwner is bound to.
- virtual std::unique_ptr<base::android::ScopedHardwareBufferFenceSync>
- GetAHardwareBuffer() = 0;
-
- bool binds_texture_on_update() const { return binds_texture_on_update_; }
-
- protected:
- friend class base::RefCountedDeleteOnSequence<TextureOwner>;
- friend class base::DeleteHelper<TextureOwner>;
-
- // |texture| is the texture that we'll own.
- TextureOwner(bool binds_texture_on_update,
- std::unique_ptr<gpu::gles2::AbstractTexture> texture);
- virtual ~TextureOwner();
-
- // Drop |texture_| immediately. Will call OnTextureDestroyed immediately if
- // it hasn't been called before (e.g., due to lost context).
- // Subclasses must call this before they complete destruction, else
- // OnTextureDestroyed might be called when we drop |texture_|, which is not
- // defined once subclass destruction has completed.
- void ClearAbstractTexture();
-
- // Called when |texture_| signals that the platform texture will be destroyed.
- // See AbstractTexture::SetCleanupCallback.
- virtual void OnTextureDestroyed(gpu::gles2::AbstractTexture*) = 0;
-
- gpu::gles2::AbstractTexture* texture() const { return texture_.get(); }
-
- private:
- // Set to true if the updating the image for this owner will automatically
- // bind it to the texture target.
- const bool binds_texture_on_update_;
-
- std::unique_ptr<gpu::gles2::AbstractTexture> texture_;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(TextureOwner);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_ANDROID_TEXTURE_OWNER_H_
diff --git a/chromium/media/gpu/android/texture_pool_unittest.cc b/chromium/media/gpu/android/texture_pool_unittest.cc
index cc21988fda7..c9d78ea6653 100644
--- a/chromium/media/gpu/android/texture_pool_unittest.cc
+++ b/chromium/media/gpu/android/texture_pool_unittest.cc
@@ -7,7 +7,7 @@
#include <memory>
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "gpu/command_buffer/common/command_buffer_id.h"
#include "gpu/command_buffer/common/constants.h"
@@ -55,7 +55,7 @@ class TexturePoolTest : public testing::Test {
return texture_weak;
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
diff --git a/chromium/media/gpu/android/video_frame_factory.h b/chromium/media/gpu/android/video_frame_factory.h
index ee5e80a3595..5b7cc895ae7 100644
--- a/chromium/media/gpu/android/video_frame_factory.h
+++ b/chromium/media/gpu/android/video_frame_factory.h
@@ -19,14 +19,14 @@ namespace media {
class CodecOutputBuffer;
class CodecSurfaceBundle;
-class TextureOwner;
class VideoFrame;
// VideoFrameFactory creates CodecOutputBuffer backed VideoFrames. Not thread
// safe. Virtual for testing; see VideoFrameFactoryImpl.
class MEDIA_GPU_EXPORT VideoFrameFactory {
public:
- using InitCb = base::RepeatingCallback<void(scoped_refptr<TextureOwner>)>;
+ using InitCb =
+ base::RepeatingCallback<void(scoped_refptr<gpu::TextureOwner>)>;
using OnceOutputCb = base::OnceCallback<void(scoped_refptr<VideoFrame>)>;
VideoFrameFactory() = default;
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.cc b/chromium/media/gpu/android/video_frame_factory_impl.cc
index ca472ea42b8..1b02953c7d4 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl.cc
@@ -17,6 +17,8 @@
#include "base/task_runner_util.h"
#include "base/trace_event/trace_event.h"
#include "gpu/command_buffer/service/abstract_texture.h"
+#include "gpu/command_buffer/service/shared_context_state.h"
+#include "gpu/ipc/common/android/texture_owner.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
#include "media/base/video_frame.h"
@@ -24,7 +26,6 @@
#include "media/gpu/android/codec_image_group.h"
#include "media/gpu/android/codec_wrapper.h"
#include "media/gpu/android/maybe_render_early_manager.h"
-#include "media/gpu/android/shared_image_video.h"
#include "media/gpu/command_buffer_helper.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "ui/gl/scoped_make_current.h"
@@ -32,7 +33,7 @@
namespace media {
namespace {
-TextureOwner::Mode GetTextureOwnerMode(
+gpu::TextureOwner::Mode GetTextureOwnerMode(
VideoFrameFactory::OverlayMode overlay_mode) {
const bool a_image_reader_supported =
base::android::AndroidImageReader::GetInstance().IsSupported();
@@ -42,18 +43,18 @@ TextureOwner::Mode GetTextureOwnerMode(
case VideoFrameFactory::OverlayMode::kRequestPromotionHints:
return a_image_reader_supported && base::FeatureList::IsEnabled(
media::kAImageReaderVideoOutput)
- ? TextureOwner::Mode::kAImageReaderInsecure
- : TextureOwner::Mode::kSurfaceTextureInsecure;
+ ? gpu::TextureOwner::Mode::kAImageReaderInsecure
+ : gpu::TextureOwner::Mode::kSurfaceTextureInsecure;
case VideoFrameFactory::OverlayMode::kSurfaceControlSecure:
DCHECK(a_image_reader_supported);
- return TextureOwner::Mode::kAImageReaderSecureSurfaceControl;
+ return gpu::TextureOwner::Mode::kAImageReaderSecureSurfaceControl;
case VideoFrameFactory::OverlayMode::kSurfaceControlInsecure:
DCHECK(a_image_reader_supported);
- return TextureOwner::Mode::kAImageReaderInsecureSurfaceControl;
+ return gpu::TextureOwner::Mode::kAImageReaderInsecureSurfaceControl;
}
NOTREACHED();
- return TextureOwner::Mode::kSurfaceTextureInsecure;
+ return gpu::TextureOwner::Mode::kSurfaceTextureInsecure;
}
// Run on the GPU main thread to allocate the texture owner, and return it
@@ -67,9 +68,9 @@ static void AllocateTextureOwnerOnGpuThread(
return;
}
- std::move(init_cb).Run(
- TextureOwner::Create(TextureOwner::CreateTexture(shared_context_state),
- GetTextureOwnerMode(overlay_mode)));
+ std::move(init_cb).Run(gpu::TextureOwner::Create(
+ gpu::TextureOwner::CreateTexture(shared_context_state),
+ GetTextureOwnerMode(overlay_mode)));
}
} // namespace
@@ -85,8 +86,7 @@ VideoFrameFactoryImpl::VideoFrameFactoryImpl(
gpu_task_runner_(std::move(gpu_task_runner)),
enable_threaded_texture_mailboxes_(
gpu_preferences.enable_threaded_texture_mailboxes),
- mre_manager_(std::move(mre_manager)),
- weak_factory_(this) {}
+ mre_manager_(std::move(mre_manager)) {}
VideoFrameFactoryImpl::~VideoFrameFactoryImpl() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -109,15 +109,17 @@ void VideoFrameFactoryImpl::SetSurfaceBundle(
scoped_refptr<CodecImageGroup> image_group;
if (!surface_bundle) {
// Clear everything, just so we're not holding a reference.
- texture_owner_ = nullptr;
+ codec_buffer_wait_coordinator_ = nullptr;
} else {
- // If |surface_bundle| is using a TextureOwner, then get it. Note that the
- // only reason we need this is for legacy mailbox support; we send it to
- // the SharedImageVideoProvider so that (eventually) it can get the service
- // id from the owner for the legacy mailbox texture. Otherwise, this would
- // be a lot simpler.
- texture_owner_ =
- surface_bundle->overlay() ? nullptr : surface_bundle->texture_owner();
+ // If |surface_bundle| is using a CodecBufferWaitCoordinator, then get it.
+ // Note that the only reason we need this is for legacy mailbox support; we
+ // send it to the SharedImageVideoProvider so that (eventually) it can get
+ // the service id from the owner for the legacy mailbox texture. Otherwise,
+ // this would be a lot simpler.
+ codec_buffer_wait_coordinator_ =
+ surface_bundle->overlay()
+ ? nullptr
+ : surface_bundle->codec_buffer_wait_coordinator();
// TODO(liberato): When we enable pooling, do we need to clear the pool
// here because the CodecImageGroup has changed? It's unclear, since the
@@ -155,12 +157,15 @@ void VideoFrameFactoryImpl::CreateVideoFrame(
auto image_ready_cb = base::BindOnce(
&VideoFrameFactoryImpl::OnImageReady, weak_factory_.GetWeakPtr(),
std::move(output_cb), timestamp, coded_size, natural_size,
- std::move(output_buffer), texture_owner_, std::move(promotion_hint_cb),
- pixel_format, overlay_mode_, enable_threaded_texture_mailboxes_,
- gpu_task_runner_);
-
- image_provider_->RequestImage(std::move(image_ready_cb), spec,
- texture_owner_);
+ std::move(output_buffer), codec_buffer_wait_coordinator_,
+ std::move(promotion_hint_cb), pixel_format, overlay_mode_,
+ enable_threaded_texture_mailboxes_, gpu_task_runner_);
+
+ image_provider_->RequestImage(
+ std::move(image_ready_cb), spec,
+ codec_buffer_wait_coordinator_
+ ? codec_buffer_wait_coordinator_->texture_owner()
+ : nullptr);
}
// static
@@ -171,7 +176,7 @@ void VideoFrameFactoryImpl::OnImageReady(
gfx::Size coded_size,
gfx::Size natural_size,
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<TextureOwner> texture_owner,
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
VideoPixelFormat pixel_format,
OverlayMode overlay_mode,
@@ -188,7 +193,8 @@ void VideoFrameFactoryImpl::OnImageReady(
// used at this point. Alternatively, we could post it, or hand it off to the
// MaybeRenderEarlyManager to save a post.
record.codec_image_holder->codec_image_raw()->Initialize(
- std::move(output_buffer), texture_owner, std::move(promotion_hint_cb));
+ std::move(output_buffer), codec_buffer_wait_coordinator,
+ std::move(promotion_hint_cb));
// Send the CodecImage (via holder, since we can't touch the refcount here) to
// the MaybeRenderEarlyManager.
@@ -236,17 +242,17 @@ void VideoFrameFactoryImpl::OnImageReady(
const bool wants_promotion_hints =
overlay_mode == OverlayMode::kRequestPromotionHints;
- // Remember that we can't access |texture_owner|, but we can check if we have
- // one here.
+ // Remember that we can't access |codec_buffer_wait_coordinator|, but we can
+ // check if we have one here.
bool allow_overlay = false;
if (is_surface_control) {
- DCHECK(texture_owner);
+ DCHECK(codec_buffer_wait_coordinator);
allow_overlay = true;
} else {
// We unconditionally mark the picture as overlayable, even if
- // |!texture_owner|, if we want to get hints. It's required, else we won't
- // get hints.
- allow_overlay = !texture_owner || wants_promotion_hints;
+ // |!codec_buffer_wait_coordinator|, if we want to get hints. It's
+ // required, else we won't get hints.
+ allow_overlay = !codec_buffer_wait_coordinator || wants_promotion_hints;
}
frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY,
@@ -254,7 +260,7 @@ void VideoFrameFactoryImpl::OnImageReady(
frame->metadata()->SetBoolean(VideoFrameMetadata::WANTS_PROMOTION_HINT,
wants_promotion_hints);
frame->metadata()->SetBoolean(VideoFrameMetadata::TEXTURE_OWNER,
- !!texture_owner);
+ !!codec_buffer_wait_coordinator);
frame->SetReleaseMailboxCB(std::move(record.release_cb));
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.h b/chromium/media/gpu/android/video_frame_factory_impl.h
index 21a04baf4b4..bef5d831a67 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.h
+++ b/chromium/media/gpu/android/video_frame_factory_impl.h
@@ -12,11 +12,11 @@
#include "base/single_thread_task_runner.h"
#include "gpu/config/gpu_preferences.h"
#include "media/base/video_frame.h"
+#include "media/gpu/android/codec_buffer_wait_coordinator.h"
#include "media/gpu/android/codec_image.h"
#include "media/gpu/android/codec_wrapper.h"
#include "media/gpu/android/maybe_render_early_manager.h"
#include "media/gpu/android/shared_image_video_provider.h"
-#include "media/gpu/android/surface_texture_gl_owner.h"
#include "media/gpu/android/video_frame_factory.h"
#include "media/gpu/media_gpu_export.h"
#include "ui/gl/gl_bindings.h"
@@ -58,6 +58,12 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
OnceOutputCb output_cb) override;
void RunAfterPendingVideoFrames(base::OnceClosure closure) override;
+ // This should be only used for testing.
+ void SetCodecBufferWaitCorrdinatorForTesting(
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator) {
+ codec_buffer_wait_coordinator_ = std::move(codec_buffer_wait_coordinator);
+ }
+
private:
// ImageReadyCB that will construct a VideoFrame, and forward it to
// |output_cb| if construction succeeds. This is static for two reasons.
@@ -77,7 +83,7 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
gfx::Size coded_size,
gfx::Size natural_size,
std::unique_ptr<CodecOutputBuffer> output_buffer,
- scoped_refptr<TextureOwner> texture_owner,
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator,
PromotionHintAggregator::NotifyPromotionHintCB promotion_hint_cb,
VideoPixelFormat pixel_format,
OverlayMode overlay_mode,
@@ -90,8 +96,8 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
std::unique_ptr<SharedImageVideoProvider> image_provider_;
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
- // The texture owner that video frames should use, or nullptr.
- scoped_refptr<TextureOwner> texture_owner_;
+ // The CodecBufferWaitCoordintor that video frames should use, or nullptr.
+ scoped_refptr<CodecBufferWaitCoordinator> codec_buffer_wait_coordinator_;
OverlayMode overlay_mode_ = OverlayMode::kDontRequestPromotionHints;
@@ -107,7 +113,7 @@ class MEDIA_GPU_EXPORT VideoFrameFactoryImpl : public VideoFrameFactory {
SEQUENCE_CHECKER(sequence_checker_);
- base::WeakPtrFactory<VideoFrameFactoryImpl> weak_factory_;
+ base::WeakPtrFactory<VideoFrameFactoryImpl> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(VideoFrameFactoryImpl);
};
diff --git a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
index 9805d208f96..5296e54be63 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
@@ -8,11 +8,13 @@
#include "base/single_thread_task_runner.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "gpu/command_buffer/service/shared_context_state.h"
#include "gpu/config/gpu_preferences.h"
+#include "gpu/ipc/common/android/mock_texture_owner.h"
#include "media/base/limits.h"
+#include "media/gpu/android/codec_buffer_wait_coordinator.h"
#include "media/gpu/android/maybe_render_early_manager.h"
#include "media/gpu/android/mock_codec_image.h"
#include "media/gpu/android/shared_image_video_provider.h"
@@ -46,7 +48,7 @@ class MockSharedImageVideoProvider : public SharedImageVideoProvider {
void RequestImage(ImageReadyCB cb,
const ImageSpec& spec,
- scoped_refptr<TextureOwner> texture_owner) override {
+ scoped_refptr<gpu::TextureOwner> texture_owner) override {
cb_ = std::move(cb);
spec_ = spec;
texture_owner_ = std::move(texture_owner);
@@ -59,7 +61,7 @@ class MockSharedImageVideoProvider : public SharedImageVideoProvider {
// Most recent arguments to RequestImage.
ImageReadyCB cb_;
ImageSpec spec_;
- scoped_refptr<TextureOwner> texture_owner_;
+ scoped_refptr<gpu::TextureOwner> texture_owner_;
};
class VideoFrameFactoryImplTest : public testing::Test {
@@ -78,6 +80,15 @@ class VideoFrameFactoryImplTest : public testing::Test {
impl_ = std::make_unique<VideoFrameFactoryImpl>(
task_runner_, gpu_preferences_, std::move(image_provider),
std::move(mre_manager));
+ auto texture_owner = base::MakeRefCounted<NiceMock<gpu::MockTextureOwner>>(
+ 0, nullptr, nullptr, true);
+ auto codec_buffer_wait_coordinator =
+ base::MakeRefCounted<CodecBufferWaitCoordinator>(
+ std::move(texture_owner));
+
+ // Provide a non-null |codec_buffer_wait_coordinator| to |impl_|.
+ impl_->SetCodecBufferWaitCorrdinatorForTesting(
+ std::move(codec_buffer_wait_coordinator));
}
~VideoFrameFactoryImplTest() override = default;
@@ -103,7 +114,7 @@ class VideoFrameFactoryImplTest : public testing::Test {
base::RunLoop().RunUntilIdle();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
std::unique_ptr<VideoFrameFactoryImpl> impl_;
@@ -127,7 +138,7 @@ TEST_F(VideoFrameFactoryImplTest, ImageProviderInitFailure) {
.Times(1)
.WillOnce(RunOnceCallback<0>(nullptr));
base::MockCallback<VideoFrameFactory::InitCb> init_cb;
- EXPECT_CALL(init_cb, Run(scoped_refptr<TextureOwner>(nullptr)));
+ EXPECT_CALL(init_cb, Run(scoped_refptr<gpu::TextureOwner>(nullptr)));
impl_->Initialize(VideoFrameFactory::OverlayMode::kDontRequestPromotionHints,
init_cb.Get());
base::RunLoop().RunUntilIdle();
@@ -140,8 +151,11 @@ TEST_F(VideoFrameFactoryImplTest, ImageProviderInitFailure) {
TEST_F(VideoFrameFactoryImplTest,
SetSurfaceBundleForwardsToMaybeRenderEarlyManager) {
// Sending a non-null CodecSurfaceBundle should forward it to |mre_manager|.
+ // Also provide a non-null TextureOwner to it.
scoped_refptr<CodecSurfaceBundle> surface_bundle =
- base::MakeRefCounted<CodecSurfaceBundle>();
+ base::MakeRefCounted<CodecSurfaceBundle>(
+ base::MakeRefCounted<NiceMock<gpu::MockTextureOwner>>(0, nullptr,
+ nullptr, true));
EXPECT_CALL(*mre_manager_raw_, SetSurfaceBundle(surface_bundle));
impl_->SetSurfaceBundle(surface_bundle);
base::RunLoop().RunUntilIdle();
diff --git a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
index 8497e3786c4..153c8c74e18 100644
--- a/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
+++ b/chromium/media/gpu/chromeos/chromeos_video_decoder_factory.cc
@@ -9,8 +9,12 @@
#include "base/sequenced_task_runner.h"
#include "media/base/video_decoder.h"
#include "media/gpu/buildflags.h"
+
+#if BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#include "media/gpu/linux/mailbox_video_frame_converter.h"
#include "media/gpu/linux/platform_video_frame_pool.h"
+#include "media/gpu/linux/video_decoder_pipeline.h"
+#endif // BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#if BUILDFLAG(USE_VAAPI)
#include "media/gpu/vaapi/vaapi_video_decoder.h"
@@ -20,12 +24,35 @@
#include "media/gpu/v4l2/v4l2_slice_video_decoder.h"
#endif
-#if BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC)
-#include "media/gpu/linux/video_decoder_pipeline.h"
-#endif
-
namespace media {
+namespace {
+
+#if BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+// Get a list of the available functions for creating VideoDeocoder.
+base::queue<VideoDecoderPipeline::CreateVDFunc> GetCreateVDFunctions(
+ VideoDecoderPipeline::CreateVDFunc cur_create_vd_func) {
+ static constexpr VideoDecoderPipeline::CreateVDFunc kCreateVDFuncs[] = {
+#if BUILDFLAG(USE_V4L2_CODEC)
+ &V4L2SliceVideoDecoder::Create,
+#endif // BUILDFLAG(USE_V4L2_CODEC)
+
+#if BUILDFLAG(USE_VAAPI)
+ &VaapiVideoDecoder::Create,
+#endif // BUILDFLAG(USE_VAAPI)
+ };
+
+ base::queue<VideoDecoderPipeline::CreateVDFunc> ret;
+ for (const auto& func : kCreateVDFuncs) {
+ if (func != cur_create_vd_func)
+ ret.push(func);
+ }
+ return ret;
+}
+#endif // BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+
+} // namespace
+
// static
SupportedVideoDecoderConfigs
ChromeosVideoDecoderFactory::GetSupportedConfigs() {
@@ -52,29 +79,13 @@ std::unique_ptr<VideoDecoder> ChromeosVideoDecoderFactory::Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
std::unique_ptr<DmabufVideoFramePool> frame_pool,
std::unique_ptr<VideoFrameConverter> frame_converter) {
- if (!client_task_runner || !frame_pool || !frame_converter)
- return nullptr;
-
- std::unique_ptr<VideoDecoder> decoder;
+#if BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+ return VideoDecoderPipeline::Create(
+ std::move(client_task_runner), std::move(frame_pool),
+ std::move(frame_converter), base::BindRepeating(&GetCreateVDFunctions));
+#endif // BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
- // TODO(dstaessens@): We first try VAAPI as USE_V4L2_CODEC might also be
- // set, even though initialization of V4L2SliceVideoDecoder would fail. We
- // need to implement a better way to select the correct decoder.
-#if BUILDFLAG(USE_VAAPI)
- decoder =
- VaapiVideoDecoder::Create(client_task_runner, std::move(frame_pool));
-#elif BUILDFLAG(USE_V4L2_CODEC)
- decoder =
- V4L2SliceVideoDecoder::Create(client_task_runner, std::move(frame_pool));
-#endif
-
-#if BUILDFLAG(USE_VAAPI) || BUILDFLAG(USE_V4L2_CODEC)
- return std::make_unique<VideoDecoderPipeline>(std::move(client_task_runner),
- std::move(decoder),
- std::move(frame_converter));
-#else
return nullptr;
-#endif
}
} // namespace media
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
index f9787be0a3a..6f521463770 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
@@ -13,6 +13,7 @@
#include "media/base/media_switches.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/gpu_video_accelerator_util.h"
+#include "media/gpu/macros.h"
#include "media/gpu/media_gpu_export.h"
#if defined(OS_WIN)
@@ -55,7 +56,7 @@ gpu::VideoDecodeAcceleratorCapabilities GetDecoderCapabilitiesInternal(
capabilities.supported_profiles =
DXVAVideoDecodeAccelerator::GetSupportedProfiles(gpu_preferences,
workarounds);
-#elif BUILDFLAG(USE_V4L2_CODEC) || BUILDFLAG(USE_VAAPI)
+#elif BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
VideoDecodeAccelerator::SupportedProfiles vda_profiles;
#if BUILDFLAG(USE_V4L2_CODEC)
vda_profiles = V4L2VideoDecodeAccelerator::GetSupportedProfiles();
@@ -124,8 +125,21 @@ GpuVideoDecodeAcceleratorFactory::GetDecoderCapabilities(
// change between calls.
// TODO(sandersd): Move cache to GpuMojoMediaClient once
// |video_decode_accelerator_capabilities| is removed from GPUInfo.
- static const gpu::VideoDecodeAcceleratorCapabilities capabilities =
+ static gpu::VideoDecodeAcceleratorCapabilities capabilities =
GetDecoderCapabilitiesInternal(gpu_preferences, workarounds);
+
+#if BUILDFLAG(USE_V4L2_CODEC)
+ // V4L2-only: the decoder devices may not be visible at the time the GPU
+ // process is starting. If the capabilities vector is empty, try to query the
+ // devices again in the hope that they will have appeared in the meantime.
+ // TODO(crbug.com/948147): trigger query when an device add/remove event
+ // (e.g. via udev) has happened instead.
+ if (capabilities.supported_profiles.empty()) {
+ VLOGF(1) << "Capabilities empty, querying again...";
+ capabilities = GetDecoderCapabilitiesInternal(gpu_preferences, workarounds);
+ }
+#endif
+
return capabilities;
}
diff --git a/chromium/media/gpu/image_processor.cc b/chromium/media/gpu/image_processor.cc
index f6da2f1578a..4a7e6857b16 100644
--- a/chromium/media/gpu/image_processor.cc
+++ b/chromium/media/gpu/image_processor.cc
@@ -12,7 +12,18 @@ ImageProcessor::PortConfig::PortConfig(
const VideoFrameLayout& layout,
const gfx::Size& visible_size,
const std::vector<VideoFrame::StorageType>& preferred_storage_types)
+ : PortConfig(layout,
+ kUnassignedFourCC,
+ visible_size,
+ preferred_storage_types) {}
+
+ImageProcessor::PortConfig::PortConfig(
+ const VideoFrameLayout& layout,
+ uint32_t fourcc,
+ const gfx::Size& visible_size,
+ const std::vector<VideoFrame::StorageType>& preferred_storage_types)
: layout(layout),
+ fourcc(fourcc),
visible_size(visible_size),
preferred_storage_types(preferred_storage_types) {}
diff --git a/chromium/media/gpu/image_processor.h b/chromium/media/gpu/image_processor.h
index d7b35ebacb6..a2a71782066 100644
--- a/chromium/media/gpu/image_processor.h
+++ b/chromium/media/gpu/image_processor.h
@@ -5,6 +5,8 @@
#ifndef MEDIA_GPU_IMAGE_PROCESSOR_H_
#define MEDIA_GPU_IMAGE_PROCESSOR_H_
+#include <stdint.h>
+
#include <vector>
#include "base/callback_forward.h"
@@ -46,15 +48,26 @@ class MEDIA_GPU_EXPORT ImageProcessor {
};
// Encapsulates ImageProcessor input / output configurations.
+ // Note that |fourcc| is used when format cannot be described in |layout|,
+ // e.g. platform specific format not listed in VideoPixelFormat. The default
+ // value of |fourcc| is kUnassignedFourCC.
struct MEDIA_GPU_EXPORT PortConfig {
PortConfig() = delete;
PortConfig(
const VideoFrameLayout& layout,
const gfx::Size& visible_size,
const std::vector<VideoFrame::StorageType>& preferred_storage_types);
+ PortConfig(
+ const VideoFrameLayout& layout,
+ uint32_t fourcc,
+ const gfx::Size& visible_size,
+ const std::vector<VideoFrame::StorageType>& preferred_storage_types);
~PortConfig();
+ static const uint32_t kUnassignedFourCC = 0u;
+
const VideoFrameLayout layout;
+ const uint32_t fourcc;
const gfx::Size visible_size;
const std::vector<VideoFrame::StorageType> preferred_storage_types;
};
diff --git a/chromium/media/gpu/image_processor_test.cc b/chromium/media/gpu/image_processor_test.cc
index 0a358c9e1e6..90c2f22ac4d 100644
--- a/chromium/media/gpu/image_processor_test.cc
+++ b/chromium/media/gpu/image_processor_test.cc
@@ -17,6 +17,7 @@
#include "media/gpu/image_processor.h"
#include "media/gpu/test/image.h"
#include "media/gpu/test/image_processor/image_processor_client.h"
+#include "media/gpu/test/video_frame_file_writer.h"
#include "media/gpu/test/video_frame_helpers.h"
#include "media/gpu/test/video_frame_validator.h"
#include "media/gpu/test/video_test_environment.h"
@@ -27,21 +28,49 @@
namespace media {
namespace {
+const char* usage_msg =
+ "usage: image_processor_test\n"
+ "[--gtest_help] [--help] [-v=<level>] [--vmodule=<config>] "
+ "[--save_images]\n";
+
+const char* help_msg =
+ "Run the image processor tests.\n\n"
+ "The following arguments are supported:\n"
+ " --gtest_help display the gtest help and exit.\n"
+ " --help display this help and exit.\n"
+ " -v enable verbose mode, e.g. -v=2.\n"
+ " --vmodule enable verbose mode for the specified module.\n"
+ " --save_images write images processed by a image processor to\n"
+ " the \"<testname>\" folder.\n";
+
+bool g_save_images = false;
+
+media::test::VideoTestEnvironment* g_env;
+
+// Files for pixel format conversion test.
// TODO(crbug.com/944822): Use kI420Image for I420 -> NV12 test case. It is
// currently disabled because there is currently no way of creating DMABUF I420
// buffer by NativePixmap.
// constexpr const base::FilePath::CharType* kI420Image =
// FILE_PATH_LITERAL("bear_320x192.i420.yuv");
-constexpr const base::FilePath::CharType* kNV12Image =
+const base::FilePath::CharType* kNV12Image =
FILE_PATH_LITERAL("bear_320x192.nv12.yuv");
-constexpr const base::FilePath::CharType* kRGBAImage =
+const base::FilePath::CharType* kRGBAImage =
FILE_PATH_LITERAL("bear_320x192.rgba");
-constexpr const base::FilePath::CharType* kBGRAImage =
+const base::FilePath::CharType* kBGRAImage =
FILE_PATH_LITERAL("bear_320x192.bgra");
-constexpr const base::FilePath::CharType* kYV12Image =
+const base::FilePath::CharType* kYV12Image =
FILE_PATH_LITERAL("bear_320x192.yv12.yuv");
-class ImageProcessorSimpleParamTest
+// Files for scaling test.
+const base::FilePath::CharType* kNV12Image720P =
+ FILE_PATH_LITERAL("puppets-1280x720.nv12.yuv");
+const base::FilePath::CharType* kNV12Image360P =
+ FILE_PATH_LITERAL("puppets-640x360.nv12.yuv");
+const base::FilePath::CharType* kNV12Image180P =
+ FILE_PATH_LITERAL("puppets-320x180.nv12.yuv");
+
+class ImageProcessorParamTest
: public ::testing::Test,
public ::testing::WithParamInterface<
std::tuple<base::FilePath, base::FilePath>> {
@@ -71,11 +100,28 @@ class ImageProcessorSimpleParamTest
LOG_ASSERT(output_image.IsMetadataLoaded());
std::vector<std::unique_ptr<test::VideoFrameProcessor>> frame_processors;
// TODO(crbug.com/944823): Use VideoFrameValidator for RGB formats.
- if (IsYuvPlanar(input_format) && IsYuvPlanar(output_format)) {
+ // TODO(crbug.com/917951): We should validate a scaled image with SSIM.
+ // Validating processed frames is currently not supported when a format is
+ // not YUV or when scaling images.
+ if (IsYuvPlanar(input_format) && IsYuvPlanar(output_format) &&
+ input_image.Size() == output_image.Size()) {
auto vf_validator = test::VideoFrameValidator::Create(
{output_image.Checksum()}, output_image.PixelFormat());
frame_processors.push_back(std::move(vf_validator));
}
+
+ if (g_save_images) {
+ base::FilePath output_dir =
+ base::FilePath(base::FilePath::kCurrentDirectory)
+ .Append(base::FilePath(g_env->GetTestName()));
+ test::VideoFrameFileWriter::OutputFormat saved_file_format =
+ IsYuvPlanar(output_format)
+ ? test::VideoFrameFileWriter::OutputFormat::kYUV
+ : test::VideoFrameFileWriter::OutputFormat::kPNG;
+ frame_processors.push_back(
+ test::VideoFrameFileWriter::Create(output_dir, saved_file_format));
+ }
+
auto ip_client = test::ImageProcessorClient::Create(
input_config, output_config, kNumBuffers, std::move(frame_processors));
LOG_ASSERT(ip_client) << "Failed to create ImageProcessorClient";
@@ -83,13 +129,20 @@ class ImageProcessorSimpleParamTest
}
};
-TEST_P(ImageProcessorSimpleParamTest, ConvertOneTime_MemToMem) {
+TEST_P(ImageProcessorParamTest, ConvertOneTime_MemToMem) {
// Load the test input image. We only need the output image's metadata so we
// can compare checksums.
test::Image input_image(std::get<0>(GetParam()));
test::Image output_image(std::get<1>(GetParam()));
ASSERT_TRUE(input_image.Load());
ASSERT_TRUE(output_image.LoadMetadata());
+ if (input_image.PixelFormat() == output_image.PixelFormat()) {
+ // If the input format is the same as the output format, then the conversion
+ // is scaling. LibyuvImageProcessor doesn't support scaling yet. So skip
+ // this test case.
+ // TODO(hiroh): Remove this skip once LibyuvIP supports scaling.
+ GTEST_SKIP();
+ }
auto ip_client = CreateImageProcessorClient(
input_image, {VideoFrame::STORAGE_OWNED_MEMORY}, output_image,
@@ -106,16 +159,23 @@ TEST_P(ImageProcessorSimpleParamTest, ConvertOneTime_MemToMem) {
#if defined(OS_CHROMEOS)
// We don't yet have the function to create Dmabuf-backed VideoFrame on
// platforms except ChromeOS. So MemToDmabuf test is limited on ChromeOS.
-TEST_P(ImageProcessorSimpleParamTest, ConvertOneTime_MemToDmabuf) {
+TEST_P(ImageProcessorParamTest, ConvertOneTime_DmabufToMem) {
// Load the test input image. We only need the output image's metadata so we
// can compare checksums.
test::Image input_image(std::get<0>(GetParam()));
test::Image output_image(std::get<1>(GetParam()));
ASSERT_TRUE(input_image.Load());
ASSERT_TRUE(output_image.LoadMetadata());
+ if (input_image.PixelFormat() == output_image.PixelFormat()) {
+ // If the input format is the same as the output format, then the conversion
+ // is scaling. LibyuvImageProcessor doesn't support scaling yet. So skip
+ // this test case.
+ // TODO(hiroh): Remove this skip once LibyuvIP supports scaling.
+ GTEST_SKIP();
+ }
auto ip_client = CreateImageProcessorClient(
- input_image, {VideoFrame::STORAGE_OWNED_MEMORY}, output_image,
+ input_image, {VideoFrame::STORAGE_DMABUFS}, output_image,
{VideoFrame::STORAGE_OWNED_MEMORY});
ip_client->Process(input_image, output_image);
@@ -125,6 +185,27 @@ TEST_P(ImageProcessorSimpleParamTest, ConvertOneTime_MemToDmabuf) {
EXPECT_EQ(ip_client->GetNumOfProcessedImages(), 1u);
EXPECT_TRUE(ip_client->WaitForFrameProcessors());
}
+
+TEST_P(ImageProcessorParamTest, ConvertOneTime_DmabufToDmabuf) {
+ // Load the test input image. We only need the output image's metadata so we
+ // can compare checksums.
+ test::Image input_image(std::get<0>(GetParam()));
+ test::Image output_image(std::get<1>(GetParam()));
+ ASSERT_TRUE(input_image.Load());
+ ASSERT_TRUE(output_image.LoadMetadata());
+
+ auto ip_client =
+ CreateImageProcessorClient(input_image, {VideoFrame::STORAGE_DMABUFS},
+ output_image, {VideoFrame::STORAGE_DMABUFS});
+
+ ip_client->Process(input_image, output_image);
+
+ EXPECT_TRUE(ip_client->WaitUntilNumImageProcessed(1u));
+ EXPECT_EQ(ip_client->GetErrorCount(), 0u);
+ EXPECT_EQ(ip_client->GetNumOfProcessedImages(), 1u);
+ EXPECT_TRUE(ip_client->WaitForFrameProcessors());
+}
+
#endif // defined(OS_CHROMEOS)
// BGRA -> NV12
@@ -132,8 +213,8 @@ TEST_P(ImageProcessorSimpleParamTest, ConvertOneTime_MemToDmabuf) {
// RGBA -> NV12
// YV12 -> NV12
INSTANTIATE_TEST_SUITE_P(
- ConvertToNV12,
- ImageProcessorSimpleParamTest,
+ PixelFormatConversionToNV12,
+ ImageProcessorParamTest,
::testing::Values(std::make_tuple(kBGRAImage, kNV12Image),
// TODO(crbug.com/944822): Add I420 -> NV12 test case.
// There is currently no way of creating DMABUF
@@ -142,20 +223,53 @@ INSTANTIATE_TEST_SUITE_P(
std::make_tuple(kRGBAImage, kNV12Image),
std::make_tuple(kYV12Image, kNV12Image)));
+INSTANTIATE_TEST_SUITE_P(
+ NV12DownScaling,
+ ImageProcessorParamTest,
+ ::testing::Values(std::make_tuple(kNV12Image720P, kNV12Image360P),
+ std::make_tuple(kNV12Image720P, kNV12Image180P),
+ std::make_tuple(kNV12Image360P, kNV12Image180P)));
+
#if defined(OS_CHROMEOS)
// TODO(hiroh): Add more tests.
// MEM->DMABUF (V4L2VideoEncodeAccelerator),
-// DMABUF->DMABUF (GpuArcVideoEncodeAccelerator),
#endif
-
} // namespace
} // namespace media
int main(int argc, char** argv) {
- testing::InitGoogleTest(&argc, argv);
base::CommandLine::Init(argc, argv);
+ // Print the help message if requested. This needs to be done before
+ // initializing gtest, to overwrite the default gtest help message.
+ const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
+ LOG_ASSERT(cmd_line);
+ if (cmd_line->HasSwitch("help")) {
+ std::cout << media::usage_msg << "\n" << media::help_msg;
+ return 0;
+ }
+
+ base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
+ for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
+ it != switches.end(); ++it) {
+ if (it->first.find("gtest_") == 0 || // Handled by GoogleTest
+ it->first == "v" || it->first == "vmodule") { // Handled by Chrome
+ continue;
+ }
+
+ if (it->first == "save_images") {
+ media::g_save_images = true;
+ } else {
+ std::cout << "unknown option: --" << it->first << "\n"
+ << media::usage_msg;
+ return EXIT_FAILURE;
+ }
+ }
+
+ testing::InitGoogleTest(&argc, argv);
+
auto* const test_environment = new media::test::VideoTestEnvironment;
- testing::AddGlobalTestEnvironment(test_environment);
+ media::g_env = reinterpret_cast<media::test::VideoTestEnvironment*>(
+ testing::AddGlobalTestEnvironment(test_environment));
return RUN_ALL_TESTS();
}
diff --git a/chromium/media/gpu/ipc/service/BUILD.gn b/chromium/media/gpu/ipc/service/BUILD.gn
index e50a46b8882..b2f80462add 100644
--- a/chromium/media/gpu/ipc/service/BUILD.gn
+++ b/chromium/media/gpu/ipc/service/BUILD.gn
@@ -68,6 +68,7 @@ source_set("unit_tests") {
"//base",
"//base/test:test_support",
"//media:test_support",
+ "//media/gpu:test_support",
"//testing/gmock",
"//testing/gtest",
]
diff --git a/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc b/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
index 2d3e28409ed..38b0482c6f8 100644
--- a/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
+++ b/chromium/media/gpu/ipc/service/picture_buffer_manager_unittest.cc
@@ -9,7 +9,7 @@
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/simple_sync_token_client.h"
#include "media/gpu/test/fake_command_buffer_helper.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -71,7 +71,7 @@ class PictureBufferManagerImplTest : public testing::Test {
return sync_token;
}
- base::test::ScopedTaskEnvironment environment_;
+ base::test::TaskEnvironment environment_;
uint64_t next_release_count_ = 1;
testing::StrictMock<
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
index a7ba09d625b..b81c1c35cce 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder_unittest.cc
@@ -12,7 +12,7 @@
#include "base/single_thread_task_runner.h"
#include "base/stl_util.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "base/time/time.h"
#include "gpu/command_buffer/common/sync_token.h"
@@ -288,7 +288,7 @@ class VdaVideoDecoderTest : public testing::TestWithParam<bool> {
return std::move(owned_vda_);
}
- base::test::ScopedTaskEnvironment environment_;
+ base::test::TaskEnvironment environment_;
base::Thread gpu_thread_;
testing::NiceMock<MockMediaLog> media_log_;
diff --git a/chromium/media/gpu/linux/mailbox_video_frame_converter.cc b/chromium/media/gpu/linux/mailbox_video_frame_converter.cc
index 2991e95ff30..b2dde5de56c 100644
--- a/chromium/media/gpu/linux/mailbox_video_frame_converter.cc
+++ b/chromium/media/gpu/linux/mailbox_video_frame_converter.cc
@@ -6,7 +6,7 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
-#include "base/callback_helpers.h"
+#include "base/memory/ptr_util.h"
#include "base/optional.h"
#include "base/task/post_task.h"
#include "media/gpu/format_utils.h"
@@ -70,21 +70,55 @@ void WaitForSyncToken(
} // namespace
+// static
+std::unique_ptr<VideoFrameConverter> MailboxVideoFrameConverter::Create(
+ UnwrapFrameCB unwrap_frame_cb,
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ GetCommandBufferStubCB get_stub_cb) {
+ if (!unwrap_frame_cb || !gpu_task_runner || !get_stub_cb)
+ return nullptr;
+
+ return base::WrapUnique<VideoFrameConverter>(new MailboxVideoFrameConverter(
+ std::move(unwrap_frame_cb), std::move(gpu_task_runner),
+ std::move(get_stub_cb)));
+}
+
MailboxVideoFrameConverter::MailboxVideoFrameConverter(
UnwrapFrameCB unwrap_frame_cb,
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
GetCommandBufferStubCB get_stub_cb)
: unwrap_frame_cb_(std::move(unwrap_frame_cb)),
gpu_task_runner_(std::move(gpu_task_runner)),
- get_stub_cb_(std::move(get_stub_cb)),
- weak_this_factory_(this) {
- weak_this_ = weak_this_factory_.GetWeakPtr();
+ get_stub_cb_(std::move(get_stub_cb)) {
+ DVLOGF(2);
+
+ parent_weak_this_ = parent_weak_this_factory_.GetWeakPtr();
+ gpu_weak_this_ = gpu_weak_this_factory_.GetWeakPtr();
}
-MailboxVideoFrameConverter::~MailboxVideoFrameConverter() {
- DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
+void MailboxVideoFrameConverter::Destroy() {
+ DCHECK(!parent_task_runner_ ||
+ parent_task_runner_->RunsTasksInCurrentSequence());
+ DVLOGF(2);
+
+ parent_weak_this_factory_.InvalidateWeakPtrs();
+ gpu_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&MailboxVideoFrameConverter::DestroyOnGPUThread,
+ gpu_weak_this_));
+}
+
+void MailboxVideoFrameConverter::DestroyOnGPUThread() {
+ DCHECK(gpu_task_runner_->RunsTasksInCurrentSequence());
+ DVLOGF(2);
+
+ gpu_weak_this_factory_.InvalidateWeakPtrs();
+ delete this;
+}
- weak_this_factory_.InvalidateWeakPtrs();
+MailboxVideoFrameConverter::~MailboxVideoFrameConverter() {
+ // |gpu_weak_this_factory_| is already invalidated here.
+ DCHECK(gpu_task_runner_->RunsTasksInCurrentSequence());
+ DVLOGF(2);
}
bool MailboxVideoFrameConverter::CreateCommandBufferHelper() {
@@ -102,83 +136,86 @@ bool MailboxVideoFrameConverter::CreateCommandBufferHelper() {
return command_buffer_helper_ != nullptr;
}
-scoped_refptr<VideoFrame> MailboxVideoFrameConverter::ConvertFrame(
- scoped_refptr<VideoFrame> frame) {
+void MailboxVideoFrameConverter::ConvertFrame(scoped_refptr<VideoFrame> frame) {
DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
DVLOGF(4);
- if (!frame) {
- DVLOGF(1) << "nullptr input.";
- return nullptr;
- }
- if (!frame->HasDmaBufs()) {
- DVLOGF(1) << "Only converting DMA-buf frames is supported.";
- return nullptr;
- }
+ if (!frame || !frame->HasDmaBufs())
+ return OnError("Invalid frame.");
VideoFrame* origin_frame = unwrap_frame_cb_.Run(*frame);
- gpu::Mailbox mailbox;
- auto it = mailbox_table_.find(origin_frame->unique_id());
- if (it != mailbox_table_.end())
- mailbox = it->second;
-
- if (mailbox.IsZero()) {
- base::WaitableEvent event;
- // We wait until GenerateMailbox() finished, so base::Unretained(this) is
- // safe.
+ if (!origin_frame)
+ return OnError("Failed to get origin frame.");
+
+ // Generate mailbox at gpu thread if we haven't.
+ const int origin_frame_id = origin_frame->unique_id();
+ if (mailbox_table_.find(origin_frame_id) == mailbox_table_.end()) {
+ DVLOGF(4) << "Generate mailbox for frame: " << origin_frame_id;
+ // Set an empty mailbox first to prevent generating mailbox multiple times.
+ mailbox_table_.emplace(origin_frame_id, gpu::Mailbox());
+
+ // |frame| keeps a refptr of |origin_frame|. |origin_frame| is guaranteed
+ // alive by carrying |frame|. So it's safe to use base::Unretained here.
gpu_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&MailboxVideoFrameConverter::GenerateMailbox,
- base::Unretained(this), base::Unretained(origin_frame),
- base::Unretained(&mailbox), base::Unretained(&event)));
- event.Wait();
+ gpu_weak_this_, base::Unretained(origin_frame), frame));
+ }
+ input_frame_queue_.emplace(std::move(frame), origin_frame_id);
+ TryOutputFrames();
+}
+
+void MailboxVideoFrameConverter::TryOutputFrames() {
+ DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
+ DVLOGF(4) << "input_frame_queue_ size: " << input_frame_queue_.size();
+
+ while (!input_frame_queue_.empty()) {
+ const int origin_frame_id = input_frame_queue_.front().second;
+ const gpu::Mailbox& mailbox = mailbox_table_[origin_frame_id];
if (mailbox.IsZero()) {
- VLOGF(1) << "Failed to create mailbox.";
- return nullptr;
+ DVLOGF(4) << "Mailbox for frame: " << origin_frame_id
+ << " is not generated yet.";
+ return;
}
- RegisterMailbox(origin_frame, mailbox);
+ auto frame = std::move(input_frame_queue_.front().first);
+ input_frame_queue_.pop();
+
+ gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes];
+ mailbox_holders[0] =
+ gpu::MailboxHolder(mailbox, gpu::SyncToken(), kTextureTarget);
+ scoped_refptr<VideoFrame> mailbox_frame = VideoFrame::WrapNativeTextures(
+ frame->format(), mailbox_holders,
+ base::BindOnce(&WaitForSyncToken, gpu_task_runner_,
+ command_buffer_helper_, frame),
+ frame->coded_size(), frame->visible_rect(), frame->natural_size(),
+ frame->timestamp());
+ mailbox_frame->metadata()->MergeMetadataFrom(frame->metadata());
+ output_cb_.Run(mailbox_frame);
}
-
- gpu::MailboxHolder mailbox_holders[VideoFrame::kMaxPlanes];
- mailbox_holders[0] =
- gpu::MailboxHolder(mailbox, gpu::SyncToken(), kTextureTarget);
- scoped_refptr<VideoFrame> mailbox_frame = VideoFrame::WrapNativeTextures(
- frame->format(), mailbox_holders,
- base::BindOnce(&WaitForSyncToken, gpu_task_runner_,
- command_buffer_helper_, frame),
- frame->coded_size(), frame->visible_rect(), frame->natural_size(),
- frame->timestamp());
- mailbox_frame->metadata()->MergeMetadataFrom(frame->metadata());
- return mailbox_frame;
}
-void MailboxVideoFrameConverter::GenerateMailbox(VideoFrame* origin_frame,
- gpu::Mailbox* mailbox,
- base::WaitableEvent* event) {
+void MailboxVideoFrameConverter::GenerateMailbox(
+ VideoFrame* origin_frame,
+ scoped_refptr<VideoFrame> frame) {
DCHECK(gpu_task_runner_->BelongsToCurrentThread());
- DVLOGF(4);
-
- // Signal the event when leaving the method.
- base::ScopedClosureRunner signal_event(
- base::BindOnce(&base::WaitableEvent::Signal, base::Unretained(event)));
+ DVLOGF(4) << "frame: " << origin_frame->unique_id();
// CreateCommandBufferHelper() should be called on |gpu_task_runner_| so we
// call it here lazily instead of at constructor.
- if (!command_buffer_helper_ && !CreateCommandBufferHelper()) {
- VLOGF(1) << "Failed to create command buffer helper.";
- return;
- }
+ if (!command_buffer_helper_ && !CreateCommandBufferHelper())
+ return OnError("Failed to create command buffer helper.");
// Get NativePixmap.
scoped_refptr<gfx::NativePixmap> pixmap;
auto buffer_format =
VideoPixelFormatToGfxBufferFormat(origin_frame->format());
if (!buffer_format) {
- VLOGF(1) << "Unsupported format: " << origin_frame->format();
- return;
- };
+ return OnError("Unsupported format: " +
+ VideoPixelFormatToString(origin_frame->format()));
+ }
+
#if defined(USE_OZONE)
gfx::GpuMemoryBufferHandle handle = CreateGpuMemoryBufferHandle(origin_frame);
DCHECK(!handle.is_null());
@@ -188,24 +225,19 @@ void MailboxVideoFrameConverter::GenerateMailbox(VideoFrame* origin_frame,
gfx::kNullAcceleratedWidget, origin_frame->coded_size(),
*buffer_format, std::move(handle.native_pixmap_handle));
#endif // defined(USE_OZONE)
- if (!pixmap) {
- VLOGF(1) << "Cannot create NativePixmap.";
- return;
- }
+ if (!pixmap)
+ return OnError("Cannot create NativePixmap.");
// Create GLImage.
auto image = base::MakeRefCounted<gl::GLImageNativePixmap>(
origin_frame->coded_size(), *buffer_format);
- if (!image->Initialize(std::move(pixmap))) {
- VLOGF(1) << "Failed to initialize GLImage.";
- return;
- }
+ if (!image->Initialize(std::move(pixmap)))
+ return OnError("Failed to initialize GLImage.");
// Create texture and bind image to texture.
- if (!command_buffer_helper_->MakeContextCurrent()) {
- VLOGF(1) << "Failed to make context current.";
- return;
- }
+ if (!command_buffer_helper_->MakeContextCurrent())
+ return OnError("Failed to make context current.");
+
GLuint service_id = command_buffer_helper_->CreateTexture(
kTextureTarget, GL_RGBA, origin_frame->coded_size().width(),
origin_frame->coded_size().height(), GL_RGBA, GL_UNSIGNED_BYTE);
@@ -215,35 +247,45 @@ void MailboxVideoFrameConverter::GenerateMailbox(VideoFrame* origin_frame,
DCHECK(ret);
command_buffer_helper_->BindImage(service_id, image.get(), true);
command_buffer_helper_->SetCleared(service_id);
- *mailbox = command_buffer_helper_->CreateMailbox(service_id);
+ gpu::Mailbox mailbox = command_buffer_helper_->CreateMailbox(service_id);
// Destroy the texture after the DMA-buf VideoFrame is destructed.
origin_frame->AddDestructionObserver(base::BindOnce(
&DestroyTexture, gpu_task_runner_, command_buffer_helper_, service_id));
- return;
+
+ // |frame| keeps a refptr of |origin_frame|. |origin_frame| is guaranteed
+ // alive by carrying |frame|. So it's safe to use base::Unretained here.
+ parent_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&MailboxVideoFrameConverter::RegisterMailbox,
+ parent_weak_this_, base::Unretained(origin_frame), mailbox,
+ std::move(frame)));
}
-void MailboxVideoFrameConverter::RegisterMailbox(VideoFrame* origin_frame,
- const gpu::Mailbox& mailbox) {
+void MailboxVideoFrameConverter::RegisterMailbox(
+ VideoFrame* origin_frame,
+ const gpu::Mailbox& mailbox,
+ scoped_refptr<VideoFrame> frame) {
DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
DCHECK(!mailbox.IsZero());
- DVLOGF(4);
+ DVLOGF(4) << "frame: " << origin_frame->unique_id();
- auto ret =
- mailbox_table_.insert(std::make_pair(origin_frame->unique_id(), mailbox));
- DCHECK(ret.second);
+ mailbox_table_[origin_frame->unique_id()] = mailbox;
origin_frame->AddDestructionObserver(base::BindOnce(
&MailboxVideoFrameConverter::UnregisterMailboxThunk, parent_task_runner_,
- weak_this_, origin_frame->unique_id()));
+ parent_weak_this_, origin_frame->unique_id()));
+
+ // Mailbox has been generated. It's time to convert frame again.
+ TryOutputFrames();
}
// static
void MailboxVideoFrameConverter::UnregisterMailboxThunk(
scoped_refptr<base::SequencedTaskRunner> task_runner,
base::Optional<base::WeakPtr<MailboxVideoFrameConverter>> converter,
- int origin_frame_id) {
+ const int origin_frame_id) {
DCHECK(converter);
- DVLOGF(4);
+ DVLOGF(4) << "frame: " << origin_frame_id;
// MailboxVideoFrameConverter might have already been destroyed when this
// method is called. In this case, the WeakPtr will have been invalidated at
@@ -253,7 +295,7 @@ void MailboxVideoFrameConverter::UnregisterMailboxThunk(
*converter, origin_frame_id));
}
-void MailboxVideoFrameConverter::UnregisterMailbox(int origin_frame_id) {
+void MailboxVideoFrameConverter::UnregisterMailbox(const int origin_frame_id) {
DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
DVLOGF(4);
@@ -262,4 +304,30 @@ void MailboxVideoFrameConverter::UnregisterMailbox(int origin_frame_id) {
mailbox_table_.erase(it);
}
+void MailboxVideoFrameConverter::AbortPendingFrames() {
+ DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
+ DVLOGF(4) << "Number of pending frames: " << input_frame_queue_.size();
+
+ input_frame_queue_ = {};
+}
+
+bool MailboxVideoFrameConverter::HasPendingFrames() const {
+ DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
+ DVLOGF(4) << "Number of pending frames: " << input_frame_queue_.size();
+
+ return !input_frame_queue_.empty();
+}
+
+void MailboxVideoFrameConverter::OnError(const std::string& msg) {
+ VLOGF(1) << msg;
+
+ parent_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&MailboxVideoFrameConverter::AbortPendingFrames,
+ parent_weak_this_));
+ // Currently we don't have a dedicated callback to notify client that error
+ // occurs. Output a null frame to indicate any error occurs.
+ // TODO(akahuang): Create an error notification callback.
+ parent_task_runner_->PostTask(FROM_HERE, base::BindOnce(output_cb_, nullptr));
+}
+
} // namespace media
diff --git a/chromium/media/gpu/linux/mailbox_video_frame_converter.h b/chromium/media/gpu/linux/mailbox_video_frame_converter.h
index 67a5d235e22..65a26d0c557 100644
--- a/chromium/media/gpu/linux/mailbox_video_frame_converter.h
+++ b/chromium/media/gpu/linux/mailbox_video_frame_converter.h
@@ -7,11 +7,12 @@
#include <map>
+#include "base/callback_forward.h"
+#include "base/containers/queue.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
-#include "base/synchronization/waitable_event.h"
#include "media/base/video_decoder.h"
#include "media/base/video_frame.h"
#include "media/gpu/command_buffer_helper.h"
@@ -32,39 +33,59 @@ class MEDIA_GPU_EXPORT MailboxVideoFrameConverter : public VideoFrameConverter {
base::RepeatingCallback<VideoFrame*(const VideoFrame& wrapped_frame)>;
using GetCommandBufferStubCB = base::OnceCallback<gpu::CommandBufferStub*()>;
+ // Create a MailboxVideoFrameConverter instance. Return nullptr if any
+ // argument is invalid.
+ static std::unique_ptr<VideoFrameConverter> Create(
+ UnwrapFrameCB unwrap_frame_cb,
+ scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
+ GetCommandBufferStubCB get_stub_cb);
+
+ // Convert DMA-buf VideoFrame to mailbox VideoFrame.
+ // For each frame, we bind DMA-buf to GL texture and create mailbox on the GPU
+ // main thread.
+ // The mailbox of each frame will be stored at |mailbox_table_|. When
+ // converting a frame second time, we just lookup the table instead of
+ // creating texture and mailbox.
+ void ConvertFrame(scoped_refptr<VideoFrame> frame) override;
+ void AbortPendingFrames() override;
+ bool HasPendingFrames() const override;
+
+ private:
// In order to recycle VideoFrame, the DmabufVideoFramePool implementation may
// wrap the frame. We want to create texture only once for the same buffer, so
// we need to get the original frame at ConvertFrame(). |unwrap_frame_cb| is
// the callback used to get the original frame.
+ // |gpu_task_runner| is the task runner of the GPU main thread. We generate
+ // mailbox on it.
// |get_stub_cb| is the callback used to get the CommandBufferStub, which is
// used to create CommandBufferHelper.
MailboxVideoFrameConverter(
UnwrapFrameCB unwrap_frame_cb,
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner,
GetCommandBufferStubCB get_stub_cb);
+ // Destructor runs on the GPU main thread.
~MailboxVideoFrameConverter() override;
+ void Destroy() override;
+ void DestroyOnGPUThread();
- // Convert DMA-buf VideoFrame to mailbox VideoFrame.
- // For each frame, we bind DMA-buf to GL texture and create mailbox at GPU
- // thread, and block working thread waiting for the result.
- // The mailbox of each frame will be stored at |mailbox_table_|. When
- // converting a frame second time, we just lookup the table instead of
- // creating texture and mailbox at GPU thread.
- scoped_refptr<VideoFrame> ConvertFrame(
- scoped_refptr<VideoFrame> frame) override;
-
- private:
bool CreateCommandBufferHelper();
+ // Try to convert frames in |input_frame_queue_| and output the converted
+ // frames to client.
+ void TryOutputFrames();
+
// Generate mailbox for the DMA-buf VideoFrame. This method runs on the GPU
- // thread.
+ // main thread.
// |origin_frame| is unwrapped from |frame| passed from ConvertFrame().
+ // |frame| is passed only for keeping |origin_frame| alive.
void GenerateMailbox(VideoFrame* origin_frame,
- gpu::Mailbox* mailbox,
- base::WaitableEvent* event);
+ scoped_refptr<VideoFrame> frame);
// Register the mapping between DMA-buf VideoFrame and the mailbox.
- void RegisterMailbox(VideoFrame* origin_frame, const gpu::Mailbox& mailbox);
+ // |frame| is passed only for keeping |origin_frame| alive.
+ void RegisterMailbox(VideoFrame* origin_frame,
+ const gpu::Mailbox& mailbox,
+ scoped_refptr<VideoFrame> frame);
// Thunk for calling UnregisterMailbox() on |task_runner|.
// Because this thunk may be called in any thread, We cannot dereference
@@ -73,14 +94,13 @@ class MEDIA_GPU_EXPORT MailboxVideoFrameConverter : public VideoFrameConverter {
static void UnregisterMailboxThunk(
scoped_refptr<base::SequencedTaskRunner> task_runner,
base::Optional<base::WeakPtr<MailboxVideoFrameConverter>> converter,
- int origin_frame_id);
- // Remove the mapping between DMA-buf VideoFrame and the mailbox.
- void UnregisterMailbox(int origin_frame_id);
+ const int origin_frame_id);
+ // Remove the mapping between the frame whose unique id is |origin_frame_id|
+ // and the mailbox.
+ void UnregisterMailbox(const int origin_frame_id);
- // Destruction callback of converted frame. |frame| is the frame passed from
- // ConvertFrame().
- void OnMailboxHoldersReleased(scoped_refptr<VideoFrame> frame,
- const gpu::SyncToken& sync_token);
+ // Invoked when any error occurs. |msg| is the error message.
+ void OnError(const std::string& msg);
// In DmabufVideoFramePool, we recycle the unused frames. To do that, each
// time a frame is requested from the pool it is wrapped inside another frame.
@@ -91,19 +111,29 @@ class MEDIA_GPU_EXPORT MailboxVideoFrameConverter : public VideoFrameConverter {
// |unwrap_frame_cb_| is used to get the origin frame.
UnwrapFrameCB unwrap_frame_cb_;
- scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
+ const scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
GetCommandBufferStubCB get_stub_cb_;
// The interface to communicate with command buffer. We use this to create and
// destroy texture, wait for SyncToken, and generate mailbox.
scoped_refptr<CommandBufferHelper> command_buffer_helper_;
- // Mapping from the unique_id of origin frame to its corresponding mailbox.
+ // Mapping from the unique id of the frame to its corresponding mailbox.
+ // Accessed only on |parent_task_runner_|.
std::map<int, gpu::Mailbox> mailbox_table_;
+ // The queue of input frames and the unique_id of their origin frame.
+ // Accessed only on |parent_task_runner_|.
+ base::queue<std::pair<scoped_refptr<VideoFrame>, int>> input_frame_queue_;
+
// The weak pointer of this, bound to |parent_task_runner_|.
// Used at the VideoFrame destruction callback.
- base::WeakPtr<MailboxVideoFrameConverter> weak_this_;
- base::WeakPtrFactory<MailboxVideoFrameConverter> weak_this_factory_;
+ base::WeakPtr<MailboxVideoFrameConverter> parent_weak_this_;
+ // The weak pointer of this, bound to |gpu_task_runner_|.
+ // Used to generate mailbox on the GPU main thread.
+ base::WeakPtr<MailboxVideoFrameConverter> gpu_weak_this_;
+ base::WeakPtrFactory<MailboxVideoFrameConverter> parent_weak_this_factory_{
+ this};
+ base::WeakPtrFactory<MailboxVideoFrameConverter> gpu_weak_this_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MailboxVideoFrameConverter);
};
diff --git a/chromium/media/gpu/linux/platform_video_frame_pool_unittest.cc b/chromium/media/gpu/linux/platform_video_frame_pool_unittest.cc
index f01b48967cf..6a2d5293756 100644
--- a/chromium/media/gpu/linux/platform_video_frame_pool_unittest.cc
+++ b/chromium/media/gpu/linux/platform_video_frame_pool_unittest.cc
@@ -11,8 +11,8 @@
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/files/scoped_file.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/gpu/linux/platform_video_frame_pool.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -54,8 +54,7 @@ class PlatformVideoFramePoolTest
using DmabufId = PlatformVideoFramePool::DmabufId;
PlatformVideoFramePoolTest()
- : scoped_task_environment_(
- base::test::ScopedTaskEnvironment::TimeSource::MOCK_TIME) {
+ : task_environment_(base::test::TaskEnvironment::TimeSource::MOCK_TIME) {
pool_.reset(new PlatformVideoFramePool(
base::BindRepeating(&CreateDmabufVideoFrame), &test_clock_));
pool_->set_parent_task_runner(base::ThreadTaskRunnerHandle::Get());
@@ -90,7 +89,7 @@ class PlatformVideoFramePoolTest
DmabufId GetDmabufId(const VideoFrame& frame) { return &(frame.DmabufFds()); }
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
base::SimpleTestTickClock test_clock_;
std::unique_ptr<PlatformVideoFramePool,
std::default_delete<DmabufVideoFramePool>>
@@ -114,7 +113,7 @@ TEST_F(PlatformVideoFramePoolTest, SingleFrameReuse) {
// Clear frame reference to return the frame to the pool.
frame = nullptr;
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Verify that the next frame from the pool uses the same memory.
scoped_refptr<VideoFrame> new_frame = GetFrame(20);
@@ -129,18 +128,18 @@ TEST_F(PlatformVideoFramePoolTest, MultipleFrameReuse) {
DmabufId id2 = GetDmabufId(*frame2);
frame1 = nullptr;
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
frame1 = GetFrame(30);
EXPECT_EQ(id1, GetDmabufId(*frame1));
frame2 = nullptr;
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
frame2 = GetFrame(40);
EXPECT_EQ(id2, GetDmabufId(*frame2));
frame1 = nullptr;
frame2 = nullptr;
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
CheckPoolSize(2u);
}
@@ -152,7 +151,7 @@ TEST_F(PlatformVideoFramePoolTest, FormatChange) {
// Clear frame references to return the frames to the pool.
frame_a = nullptr;
frame_b = nullptr;
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Verify that both frames are in the pool.
CheckPoolSize(2u);
@@ -173,16 +172,16 @@ TEST_F(PlatformVideoFramePoolTest, StaleFramesAreExpired) {
// Drop frame and verify that resources are still available for reuse.
frame_1 = nullptr;
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
CheckPoolSize(1u);
// Advance clock far enough to hit stale timer; ensure only frame_1 has its
// resources released.
base::TimeDelta time_forward = base::TimeDelta::FromMinutes(1);
test_clock_.Advance(time_forward);
- scoped_task_environment_.FastForwardBy(time_forward);
+ task_environment_.FastForwardBy(time_forward);
frame_2 = nullptr;
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
CheckPoolSize(1u);
}
diff --git a/chromium/media/gpu/linux/video_decoder_pipeline.cc b/chromium/media/gpu/linux/video_decoder_pipeline.cc
index 8a7391f850f..d1acc03d5a2 100644
--- a/chromium/media/gpu/linux/video_decoder_pipeline.cc
+++ b/chromium/media/gpu/linux/video_decoder_pipeline.cc
@@ -7,65 +7,126 @@
#include "base/bind.h"
#include "base/sequenced_task_runner.h"
#include "base/task/post_task.h"
+#include "base/task/task_traits.h"
+#include "media/gpu/linux/dmabuf_video_frame_pool.h"
#include "media/gpu/macros.h"
-#include "media/gpu/video_frame_converter.h"
namespace media {
+// static
+std::unique_ptr<VideoDecoder> VideoDecoderPipeline::Create(
+ scoped_refptr<base::SequencedTaskRunner> client_task_runner,
+ std::unique_ptr<DmabufVideoFramePool> frame_pool,
+ std::unique_ptr<VideoFrameConverter> frame_converter,
+ GetCreateVDFunctionsCB get_create_vd_functions_cb) {
+ if (!client_task_runner || !frame_pool || !frame_converter) {
+ VLOGF(1) << "One of arguments is nullptr.";
+ return nullptr;
+ }
+
+ if (get_create_vd_functions_cb.Run(nullptr).empty()) {
+ VLOGF(1) << "No available function to create video decoder.";
+ return nullptr;
+ }
+
+ return base::WrapUnique<VideoDecoder>(new VideoDecoderPipeline(
+ std::move(client_task_runner), std::move(frame_pool),
+ std::move(frame_converter), std::move(get_create_vd_functions_cb)));
+}
+
VideoDecoderPipeline::VideoDecoderPipeline(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<VideoDecoder> decoder,
- std::unique_ptr<VideoFrameConverter> frame_converter)
+ std::unique_ptr<DmabufVideoFramePool> frame_pool,
+ std::unique_ptr<VideoFrameConverter> frame_converter,
+ GetCreateVDFunctionsCB get_create_vd_functions_cb)
: client_task_runner_(std::move(client_task_runner)),
- decoder_(std::move(decoder)),
+ decoder_task_runner_(base::CreateSingleThreadTaskRunner(
+ {base::ThreadPool(), base::WithBaseSyncPrimitives(),
+ base::TaskPriority::USER_VISIBLE},
+ base::SingleThreadTaskRunnerThreadMode::DEDICATED)),
+ frame_pool_(std::move(frame_pool)),
frame_converter_(std::move(frame_converter)),
- weak_this_factory_(this) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(decoder_);
+ get_create_vd_functions_cb_(std::move(get_create_vd_functions_cb)) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DETACH_FROM_SEQUENCE(decoder_sequence_checker_);
+ DCHECK(frame_pool_);
DCHECK(frame_converter_);
DCHECK(client_task_runner_);
+ DVLOGF(2);
- frame_converter_->set_parent_task_runner(client_task_runner_);
+ weak_this_ = weak_this_factory_.GetWeakPtr();
+ frame_pool_->set_parent_task_runner(decoder_task_runner_);
+ frame_converter_->Initialize(
+ client_task_runner_,
+ base::BindRepeating(&VideoDecoderPipeline::OnFrameConverted, weak_this_));
}
VideoDecoderPipeline::~VideoDecoderPipeline() {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ // We have to destroy |frame_pool_| on |decoder_task_runner_|, so the
+ // destructor is also called on |decoder_task_runner_|.
+ DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
+ DVLOGF(3);
}
void VideoDecoderPipeline::Destroy() {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DVLOGF(2);
+
+ weak_this_factory_.InvalidateWeakPtrs();
+
+ decoder_.reset();
+ used_create_vd_func_ = nullptr;
+ frame_converter_.reset();
+
+ decoder_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VideoDecoderPipeline::DestroyTask,
+ base::Unretained(this)));
+}
+
+void VideoDecoderPipeline::DestroyTask() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
+ DVLOGF(3);
+ // |frame_pool_| should be destroyed on |decoder_task_runner_|, which is set
+ // by frame_pool_->set_parent_task_runner().
+ frame_pool_.reset();
delete this;
}
std::string VideoDecoderPipeline::GetDisplayName() const {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
- return decoder_->GetDisplayName();
+ return "VideoDecoderPipeline";
}
bool VideoDecoderPipeline::IsPlatformDecoder() const {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
- return decoder_->IsPlatformDecoder();
+ return true;
}
int VideoDecoderPipeline::GetMaxDecodeRequests() const {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
- return decoder_->GetMaxDecodeRequests();
+ if (!decoder_)
+ DVLOGF(1) << "Call before Initialize() success.";
+ return decoder_ ? decoder_->GetMaxDecodeRequests() : 1;
}
bool VideoDecoderPipeline::NeedsBitstreamConversion() const {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
- return decoder_->NeedsBitstreamConversion();
+ if (!decoder_)
+ DVLOGF(1) << "Call before Initialize() success.";
+ return decoder_ ? decoder_->NeedsBitstreamConversion() : false;
}
bool VideoDecoderPipeline::CanReadWithoutStalling() const {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
- return decoder_->CanReadWithoutStalling();
+ if (!decoder_)
+ DVLOGF(1) << "Call before Initialize() success.";
+ return decoder_ ? decoder_->CanReadWithoutStalling() : false;
}
void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
@@ -74,30 +135,153 @@ void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DCHECK(!init_cb_);
+ VLOGF(2) << "config: " << config.AsHumanReadableString();
client_output_cb_ = std::move(output_cb);
+ init_cb_ = std::move(init_cb);
+ base::queue<VideoDecoderPipeline::CreateVDFunc> create_vd_funcs =
+ get_create_vd_functions_cb_.Run(used_create_vd_func_);
+
+ if (!decoder_) {
+ CreateAndInitializeVD(std::move(create_vd_funcs), config, low_delay,
+ cdm_context, waiting_cb);
+ } else {
+ decoder_->Initialize(
+ config, low_delay, cdm_context,
+ // If it fails to re-initialize current |decoder_|, it will create
+ // another decoder instance by trying available VD creation functions
+ // again. See |OnInitializeDone| for detail.
+ base::BindOnce(&VideoDecoderPipeline::OnInitializeDone, weak_this_,
+ std::move(create_vd_funcs), config, low_delay,
+ cdm_context, waiting_cb),
+ base::BindRepeating(&VideoDecoderPipeline::OnFrameDecodedThunk,
+ client_task_runner_, weak_this_),
+ waiting_cb);
+ }
+}
+
+void VideoDecoderPipeline::CreateAndInitializeVD(
+ base::queue<VideoDecoderPipeline::CreateVDFunc> create_vd_funcs,
+ VideoDecoderConfig config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ WaitingCB waiting_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DCHECK(init_cb_);
+ DCHECK(!decoder_);
+ DCHECK(!used_create_vd_func_);
+ DVLOGF(3);
+
+ if (create_vd_funcs.empty()) {
+ DVLOGF(2) << "No available video decoder.";
+ std::move(init_cb_).Run(false);
+ return;
+ }
+
+ used_create_vd_func_ = create_vd_funcs.front();
+ create_vd_funcs.pop();
+ decoder_ = used_create_vd_func_(
+ client_task_runner_, decoder_task_runner_,
+ base::BindRepeating(&VideoDecoderPipeline::GetVideoFramePool,
+ base::Unretained(this)));
+ if (!decoder_) {
+ DVLOGF(2) << "Failed to create VideoDecoder.";
+ used_create_vd_func_ = nullptr;
+ return CreateAndInitializeVD(std::move(create_vd_funcs), config, low_delay,
+ cdm_context, std::move(waiting_cb));
+ }
decoder_->Initialize(
- config, low_delay, cdm_context, std::move(init_cb),
+ config, low_delay, cdm_context,
+ base::BindOnce(&VideoDecoderPipeline::OnInitializeDone, weak_this_,
+ std::move(create_vd_funcs), config, low_delay, cdm_context,
+ waiting_cb),
base::BindRepeating(&VideoDecoderPipeline::OnFrameDecodedThunk,
- client_task_runner_, weak_this_factory_.GetWeakPtr()),
- std::move(waiting_cb));
+ client_task_runner_, weak_this_),
+ waiting_cb);
+}
+
+void VideoDecoderPipeline::OnInitializeDone(
+ base::queue<VideoDecoderPipeline::CreateVDFunc> create_vd_funcs,
+ VideoDecoderConfig config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ WaitingCB waiting_cb,
+ bool success) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DCHECK(init_cb_);
+ DVLOGF(4) << "Initialization " << (success ? "success." : "failure.");
+
+ if (success) {
+ DVLOGF(2) << "Initialize VD successfully.";
+ std::move(init_cb_).Run(true);
+ return;
+ }
+
+ DVLOGF(3) << "Reset VD, try the next create function.";
+ decoder_ = nullptr;
+ used_create_vd_func_ = nullptr;
+ CreateAndInitializeVD(std::move(create_vd_funcs), config, low_delay,
+ cdm_context, std::move(waiting_cb));
}
void VideoDecoderPipeline::Reset(base::OnceClosure closure) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DCHECK(decoder_);
+ DCHECK(!client_reset_cb_);
+ DVLOGF(3);
- // TODO(acourbot) make the decoder jump into our own closure and only call
- // the client's when all parts of the pipeline are properly reset.
- decoder_->Reset(std::move(closure));
+ client_reset_cb_ = std::move(closure);
+ decoder_->Reset(
+ base::BindOnce(&VideoDecoderPipeline::OnResetDone, weak_this_));
+}
+
+void VideoDecoderPipeline::OnResetDone() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DCHECK(client_reset_cb_);
+ DVLOGF(3);
+
+ frame_converter_->AbortPendingFrames();
+
+ CallFlushCbIfNeeded(DecodeStatus::ABORTED);
+
+ std::move(client_reset_cb_).Run();
}
void VideoDecoderPipeline::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DCHECK(decoder_);
+ DVLOGF(4);
+
+ bool is_flush = buffer->end_of_stream();
+ decoder_->Decode(std::move(buffer),
+ base::BindOnce(&VideoDecoderPipeline::OnDecodeDone,
+ weak_this_, is_flush, std::move(decode_cb)));
+}
+
+void VideoDecoderPipeline::OnDecodeDone(bool is_flush,
+ DecodeCB decode_cb,
+ DecodeStatus status) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DVLOGF(4) << "is_flush: " << is_flush << ", status: " << status;
+
+ if (has_error_)
+ status = DecodeStatus::DECODE_ERROR;
+
+ if (is_flush && status == DecodeStatus::OK) {
+ client_flush_cb_ = std::move(decode_cb);
+ // TODO(akahuang): The order between flush cb and output cb is preserved
+ // only when OnFrameDecodedThunk() run on |client_task_runner_|. Remove
+ // OnFrameDecodedThunk() when we make sure all VD callbacks are called on
+ // the same thread.
+ CallFlushCbIfNeeded(DecodeStatus::OK);
+ return;
+ }
- decoder_->Decode(std::move(buffer), std::move(decode_cb));
+ std::move(decode_cb).Run(status);
}
// static
@@ -107,6 +291,7 @@ void VideoDecoderPipeline::OnFrameDecodedThunk(
scoped_refptr<VideoFrame> frame) {
DCHECK(task_runner);
DCHECK(pipeline);
+ DVLOGF(4);
// Workaround for some decoder's non-conformant behavior:
// Decoders are supposed to call the output callback "as soon as possible",
@@ -136,18 +321,54 @@ void VideoDecoderPipeline::OnFrameDecodedThunk(
}
void VideoDecoderPipeline::OnFrameDecoded(scoped_refptr<VideoFrame> frame) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
DCHECK(frame_converter_);
+ DVLOGF(4);
+
+ frame_converter_->ConvertFrame(std::move(frame));
+}
+
+void VideoDecoderPipeline::OnFrameConverted(scoped_refptr<VideoFrame> frame) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DVLOGF(4);
- scoped_refptr<VideoFrame> converted_frame =
- frame_converter_->ConvertFrame(frame);
- if (!converted_frame) {
- // TODO(acourbot) we need to call the decode_cb with DECODE_ERROR here!
- VLOGF(1) << "Error converting frame!";
+ if (!frame)
+ return OnError("Frame converter returns null frame.");
+ if (has_error_) {
+ DVLOGF(2) << "Skip returning frames after error occurs.";
return;
}
- client_output_cb_.Run(converted_frame);
+ client_output_cb_.Run(std::move(frame));
+
+ // After outputting a frame, flush might be completed.
+ CallFlushCbIfNeeded(DecodeStatus::OK);
+}
+
+void VideoDecoderPipeline::OnError(const std::string& msg) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ VLOGF(1) << msg;
+
+ has_error_ = true;
+ CallFlushCbIfNeeded(DecodeStatus::DECODE_ERROR);
+}
+
+void VideoDecoderPipeline::CallFlushCbIfNeeded(DecodeStatus status) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
+ DVLOGF(3) << "status: " << status;
+
+ if (!client_flush_cb_)
+ return;
+
+ // Flush is not completed yet.
+ if (status == DecodeStatus::OK && frame_converter_->HasPendingFrames())
+ return;
+
+ std::move(client_flush_cb_).Run(status);
+}
+
+DmabufVideoFramePool* VideoDecoderPipeline::GetVideoFramePool() const {
+ return frame_pool_.get();
}
} // namespace media
diff --git a/chromium/media/gpu/linux/video_decoder_pipeline.h b/chromium/media/gpu/linux/video_decoder_pipeline.h
index 944102e9249..85014b12abd 100644
--- a/chromium/media/gpu/linux/video_decoder_pipeline.h
+++ b/chromium/media/gpu/linux/video_decoder_pipeline.h
@@ -7,11 +7,14 @@
#include <memory>
+#include "base/containers/queue.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/sequence_checker.h"
#include "media/base/video_decoder.h"
+#include "media/base/video_decoder_config.h"
#include "media/gpu/media_gpu_export.h"
+#include "media/gpu/video_frame_converter.h"
namespace base {
class SequencedTaskRunner;
@@ -19,14 +22,24 @@ class SequencedTaskRunner;
namespace media {
-class VideoFrameConverter;
+class DmabufVideoFramePool;
class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder {
public:
- VideoDecoderPipeline(
+ // Function signature for creating VideoDecoder.
+ using CreateVDFunc = std::unique_ptr<VideoDecoder> (*)(
+ scoped_refptr<base::SequencedTaskRunner>,
+ scoped_refptr<base::SequencedTaskRunner>,
+ base::RepeatingCallback<DmabufVideoFramePool*()>);
+ using GetCreateVDFunctionsCB =
+ base::RepeatingCallback<base::queue<CreateVDFunc>(CreateVDFunc)>;
+
+ static std::unique_ptr<VideoDecoder> Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<VideoDecoder> decoder,
- std::unique_ptr<VideoFrameConverter> frame_converter);
+ std::unique_ptr<DmabufVideoFramePool> frame_pool,
+ std::unique_ptr<VideoFrameConverter> frame_converter,
+ GetCreateVDFunctionsCB get_create_vd_functions_cb);
+
~VideoDecoderPipeline() override;
// VideoDecoder implementation
@@ -46,12 +59,34 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder {
void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
private:
- void Destroy() override;
+ // Get a list of the available functions for creating VideoDeocoder.
+ static base::queue<CreateVDFunc> GetCreateVDFunctions(
+ CreateVDFunc current_func);
- const scoped_refptr<base::SequencedTaskRunner> client_task_runner_;
-
- const std::unique_ptr<VideoDecoder> decoder_;
- const std::unique_ptr<VideoFrameConverter> frame_converter_;
+ VideoDecoderPipeline(
+ scoped_refptr<base::SequencedTaskRunner> client_task_runner,
+ std::unique_ptr<DmabufVideoFramePool> frame_pool,
+ std::unique_ptr<VideoFrameConverter> frame_converter,
+ GetCreateVDFunctionsCB get_create_vd_functions_cb);
+ void Destroy() override;
+ void DestroyTask();
+
+ void CreateAndInitializeVD(base::queue<CreateVDFunc> create_vd_funcs,
+ VideoDecoderConfig config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ WaitingCB waiting_cb);
+ void OnInitializeDone(base::queue<CreateVDFunc> create_vd_funcs,
+ VideoDecoderConfig config,
+ bool low_delay,
+ CdmContext* cdm_context,
+ WaitingCB waiting_cb,
+ bool success);
+
+ void OnDecodeDone(bool eos_buffer, DecodeCB decode_cb, DecodeStatus status);
+ void OnResetDone();
+ void OnFrameConverted(scoped_refptr<VideoFrame> frame);
+ void OnError(const std::string& msg);
static void OnFrameDecodedThunk(
scoped_refptr<base::SequencedTaskRunner> task_runner,
@@ -59,11 +94,50 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder {
scoped_refptr<VideoFrame> frame);
void OnFrameDecoded(scoped_refptr<VideoFrame> frame);
+ // Call |client_flush_cb_| with |status| if we need.
+ void CallFlushCbIfNeeded(DecodeStatus status);
+
+ // Get the video frame pool without passing the ownership.
+ DmabufVideoFramePool* GetVideoFramePool() const;
+
+ // The client task runner and its sequence checker. All public methods should
+ // run on this task runner.
+ const scoped_refptr<base::SequencedTaskRunner> client_task_runner_;
+ SEQUENCE_CHECKER(client_sequence_checker_);
+
+ // The decoder task runner and its sequence checker. |decoder_| should post
+ // time-consuming task and call |frame_pool_|'s methods on this task runner.
+ const scoped_refptr<base::SequencedTaskRunner> decoder_task_runner_;
+ SEQUENCE_CHECKER(decoder_sequence_checker_);
+
+ // The frame pool passed from the client. Destroyed on |decoder_task_runner_|.
+ std::unique_ptr<DmabufVideoFramePool> frame_pool_;
+ // The frame converter passed from the client. Destroyed on
+ // |client_task_runner_|.
+ std::unique_ptr<VideoFrameConverter> frame_converter_;
+
+ // The callback to get a list of function for creating VideoDecoder.
+ GetCreateVDFunctionsCB get_create_vd_functions_cb_;
+
+ // The current video decoder implementation. Valid after initialization is
+ // successfully done.
+ std::unique_ptr<VideoDecoder> decoder_;
+ // The create function of |decoder_|. nullptr iff |decoder_| is nullptr.
+ CreateVDFunc used_create_vd_func_ = nullptr;
+
+ // Callback from the client. These callback are called on
+ // |client_task_runner_|.
+ InitCB init_cb_;
OutputCB client_output_cb_;
+ DecodeCB client_flush_cb_;
+ base::OnceClosure client_reset_cb_;
- SEQUENCE_CHECKER(sequence_checker_);
+ // Set to true when any unexpected error occurs.
+ bool has_error_ = false;
- base::WeakPtrFactory<VideoDecoderPipeline> weak_this_factory_;
+ // The weak pointer of this, bound to |client_task_runner_|.
+ base::WeakPtr<VideoDecoderPipeline> weak_this_;
+ base::WeakPtrFactory<VideoDecoderPipeline> weak_this_factory_{this};
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/BUILD.gn b/chromium/media/gpu/v4l2/BUILD.gn
index 3bb250aff93..4a3df4ff7ff 100644
--- a/chromium/media/gpu/v4l2/BUILD.gn
+++ b/chromium/media/gpu/v4l2/BUILD.gn
@@ -35,6 +35,8 @@ source_set("v4l2") {
"v4l2_device.h",
"v4l2_h264_accelerator.cc",
"v4l2_h264_accelerator.h",
+ "v4l2_h264_accelerator_legacy.cc",
+ "v4l2_h264_accelerator_legacy.h",
"v4l2_image_processor.cc",
"v4l2_image_processor.h",
"v4l2_slice_video_decode_accelerator.cc",
@@ -43,12 +45,16 @@ source_set("v4l2") {
"v4l2_slice_video_decoder.h",
"v4l2_stateful_workaround.cc",
"v4l2_stateful_workaround.h",
+ "v4l2_vda_helpers.cc",
+ "v4l2_vda_helpers.h",
"v4l2_video_decode_accelerator.cc",
"v4l2_video_decode_accelerator.h",
"v4l2_video_encode_accelerator.cc",
"v4l2_video_encode_accelerator.h",
"v4l2_vp8_accelerator.cc",
"v4l2_vp8_accelerator.h",
+ "v4l2_vp8_accelerator_legacy.cc",
+ "v4l2_vp8_accelerator_legacy.h",
"v4l2_vp9_accelerator.cc",
"v4l2_vp9_accelerator.h",
]
@@ -67,6 +73,10 @@ source_set("v4l2") {
configs += [ "//third_party/libyuv:libyuv_config" ]
+ public_deps = [
+ "//ui/gl",
+ ]
+
deps = [
"//base",
"//gpu/ipc/common",
@@ -96,6 +106,7 @@ source_set("v4l2") {
deps += [
"//components/chromeos_camera:jpeg_encode_accelerator",
"//components/chromeos_camera:mjpeg_decode_accelerator",
+ "//media/gpu:video_frame_mapper_common",
"//media/parsers",
]
}
diff --git a/chromium/media/gpu/v4l2/tegra_v4l2_device.cc b/chromium/media/gpu/v4l2/tegra_v4l2_device.cc
index 8c9863b91f6..175d3da9ea5 100644
--- a/chromium/media/gpu/v4l2/tegra_v4l2_device.cc
+++ b/chromium/media/gpu/v4l2/tegra_v4l2_device.cc
@@ -64,15 +64,26 @@ int TegraV4L2Device::Ioctl(int flags, void* arg) {
return ret;
// Workarounds for Tegra's broken closed-source V4L2 interface.
- struct v4l2_format* format;
switch (flags) {
- // VIDIOC_G_FMT returns 0 planes for multiplanar formats with 1 plane.
- case static_cast<int>(VIDIOC_G_FMT):
- format = static_cast<struct v4l2_format*>(arg);
- if (V4L2_TYPE_IS_MULTIPLANAR(format->type) &&
- format->fmt.pix_mp.num_planes == 0)
- format->fmt.pix_mp.num_planes = 1;
+ case static_cast<int>(VIDIOC_S_FMT): {
+ struct v4l2_format format;
+ memcpy(&format, arg, sizeof(struct v4l2_format));
+ v4l2_format_cache_[static_cast<enum v4l2_buf_type>(format.type)] = format;
break;
+ }
+ case static_cast<int>(VIDIOC_G_FMT): {
+ // The driver doesn't fill values of returned v4l2_format correctly. Set
+ // the value that is previously passed to driver via VIDIOC_S_FMT.
+ struct v4l2_format* format = static_cast<struct v4l2_format*>(arg);
+ const auto it = v4l2_format_cache_.find(
+ static_cast<enum v4l2_buf_type>(format->type));
+ if (it != v4l2_format_cache_.end()) {
+ format->fmt.pix_mp.pixelformat = it->second.fmt.pix_mp.pixelformat;
+ if (format->fmt.pix_mp.num_planes == 0)
+ format->fmt.pix_mp.num_planes = it->second.fmt.pix_mp.num_planes;
+ }
+ break;
+ }
default:
break;
}
diff --git a/chromium/media/gpu/v4l2/tegra_v4l2_device.h b/chromium/media/gpu/v4l2/tegra_v4l2_device.h
index bcea9520cc0..763f0991f29 100644
--- a/chromium/media/gpu/v4l2/tegra_v4l2_device.h
+++ b/chromium/media/gpu/v4l2/tegra_v4l2_device.h
@@ -11,6 +11,7 @@
#include <stddef.h>
#include <stdint.h>
+#include <map>
#include <vector>
#include "base/macros.h"
@@ -86,6 +87,10 @@ class TegraV4L2Device : public V4L2Device {
// The actual device fd.
int device_fd_ = -1;
+ // The v4l2_format cache passed to the driver via VIDIOC_S_FMT. The key is
+ // v4l2_buf_type.
+ std::map<enum v4l2_buf_type, struct v4l2_format> v4l2_format_cache_;
+
DISALLOW_COPY_AND_ASSIGN(TegraV4L2Device);
};
diff --git a/chromium/media/gpu/v4l2/v4l2_decode_surface.cc b/chromium/media/gpu/v4l2/v4l2_decode_surface.cc
index 161d9d6368a..ec6cab3b8fc 100644
--- a/chromium/media/gpu/v4l2/v4l2_decode_surface.cc
+++ b/chromium/media/gpu/v4l2/v4l2_decode_surface.cc
@@ -6,6 +6,7 @@
#include <linux/media.h>
#include <linux/videodev2.h>
+#include <poll.h>
#include <sys/ioctl.h>
#include "base/logging.h"
@@ -15,23 +16,12 @@
namespace media {
-V4L2DecodeSurface::V4L2DecodeSurface(int input_record,
- int output_record,
- base::OnceClosure release_cb)
- : input_record_(input_record),
- output_record_(output_record),
- decoded_(false),
- release_cb_(std::move(release_cb)) {
- DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
-}
-
V4L2DecodeSurface::V4L2DecodeSurface(V4L2WritableBufferRef input_buffer,
V4L2WritableBufferRef output_buffer,
- scoped_refptr<VideoFrame> frame,
- base::OnceClosure release_cb)
- : V4L2DecodeSurface(input_buffer.BufferId(),
- output_buffer.BufferId(),
- std::move(release_cb)) {
+ scoped_refptr<VideoFrame> frame)
+ : input_record_(input_buffer.BufferId()),
+ output_record_(output_buffer.BufferId()),
+ decoded_(false) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
input_buffer_ = std::move(input_buffer);
output_buffer_ = std::move(output_buffer);
@@ -72,6 +62,11 @@ void V4L2DecodeSurface::SetReferenceSurfaces(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(reference_surfaces_.empty());
+#if DCHECK_IS_ON()
+ for (const auto& ref : reference_surfaces_)
+ DCHECK_NE(ref->output_record(), output_record_);
+#endif
+
reference_surfaces_ = std::move(ref_surfaces);
}
@@ -133,28 +128,33 @@ bool V4L2ConfigStoreDecodeSurface::Submit() const {
return true;
}
-V4L2RequestDecodeSurface::V4L2RequestDecodeSurface(int input_record,
- int output_record,
- int request_fd,
- base::OnceClosure release_cb)
- : V4L2DecodeSurface(input_record, output_record, std::move(release_cb)),
- request_fd_(request_fd) {}
-
// static
base::Optional<scoped_refptr<V4L2RequestDecodeSurface>>
-V4L2RequestDecodeSurface::Create(int input_record,
- int output_record,
- int request_fd,
- base::OnceClosure release_cb) {
- // First reinit the request to make sure we can use it for a new submission.
- int ret = HANDLE_EINTR(ioctl(request_fd, MEDIA_REQUEST_IOC_REINIT));
+V4L2RequestDecodeSurface::Create(V4L2WritableBufferRef input_buffer,
+ V4L2WritableBufferRef output_buffer,
+ scoped_refptr<VideoFrame> frame,
+ int request_fd) {
+ constexpr int kPollTimeoutMs = 500;
+ int ret;
+ struct pollfd poll_fd = {request_fd, POLLPRI, 0};
+
+ // First poll the request to ensure its previous task is done
+ ret = poll(&poll_fd, 1, kPollTimeoutMs);
+ if (ret != 1) {
+ VPLOGF(1) << "Failed to poll request: ";
+ return base::nullopt;
+ }
+
+ // Then reinit the request to make sure we can use it for a new submission.
+ ret = HANDLE_EINTR(ioctl(request_fd, MEDIA_REQUEST_IOC_REINIT));
if (ret < 0) {
VPLOGF(1) << "Failed to reinit request: ";
return base::nullopt;
}
- return new V4L2RequestDecodeSurface(input_record, output_record, request_fd,
- std::move(release_cb));
+ return new V4L2RequestDecodeSurface(std::move(input_buffer),
+ std::move(output_buffer),
+ std::move(frame), request_fd);
}
void V4L2RequestDecodeSurface::PrepareSetCtrls(
@@ -173,16 +173,19 @@ void V4L2RequestDecodeSurface::PrepareQueueBuffer(
buffer->request_fd = request_fd_;
buffer->flags |= V4L2_BUF_FLAG_REQUEST_FD;
- // Copy the buffer index as the timestamp.
+ // Use the output buffer index as the timestamp.
+ // Since the client is supposed to keep the output buffer out of the V4L2
+ // queue for as long as it is used as a reference frame, this ensures that
+ // all the requests we submit have unique IDs at any point in time.
DCHECK_EQ(static_cast<int>(buffer->index), input_record());
buffer->timestamp.tv_sec = 0;
- buffer->timestamp.tv_usec = buffer->index;
+ buffer->timestamp.tv_usec = output_record();
}
uint64_t V4L2RequestDecodeSurface::GetReferenceID() const {
// Convert the input buffer ID to what the internal representation of
// the timestamp we submitted will be (tv_usec * 1000).
- return input_record() * 1000;
+ return output_record() * 1000;
}
bool V4L2RequestDecodeSurface::Submit() const {
diff --git a/chromium/media/gpu/v4l2/v4l2_decode_surface.h b/chromium/media/gpu/v4l2/v4l2_decode_surface.h
index c9a7e4f1620..a830cdd34e1 100644
--- a/chromium/media/gpu/v4l2/v4l2_decode_surface.h
+++ b/chromium/media/gpu/v4l2/v4l2_decode_surface.h
@@ -27,21 +27,13 @@ class V4L2DecodeSurface : public base::RefCounted<V4L2DecodeSurface> {
public:
// V4L2DecodeSurfaceHandler maintains a list of InputRecords, which records
// the status and metadata of input buffers.
- // |input_record| is the index of the input record that corresponds to this
- // V4L2DecodeSurface instance.
- // |output_record|, similar to |input_record|, is the index of output record
- // that corresponds to this instance.
- // |release_cb| is the callback function that will be called when the instance
- // is destroyed.
- // DEPRECATED: use the other constructor for new code.
- V4L2DecodeSurface(int input_record,
- int output_record,
- base::OnceClosure release_cb);
-
+ // |input_buffer| and |output_buffer| are the buffers to be used as input and
+ // output in this transaction.
+ // |frame| is optional, and allows the caller to keep a reference to a
+ // VideoFrame for as long as this decode surface exists.
V4L2DecodeSurface(V4L2WritableBufferRef input_buffer,
V4L2WritableBufferRef output_buffer,
- scoped_refptr<VideoFrame> frame,
- base::OnceClosure release_cb);
+ scoped_refptr<VideoFrame> frame);
// Mark the surface as decoded. This will also release all surfaces used for
// reference, as they are not needed anymore and execute the done callback,
@@ -121,20 +113,12 @@ class V4L2DecodeSurface : public base::RefCounted<V4L2DecodeSurface> {
// associate controls/buffers to frames.
class V4L2ConfigStoreDecodeSurface : public V4L2DecodeSurface {
public:
- V4L2ConfigStoreDecodeSurface(int input_record,
- int output_record,
- base::OnceClosure release_cb)
- : V4L2DecodeSurface(input_record, output_record, std::move(release_cb)),
- config_store_(input_record + 1) {}
-
V4L2ConfigStoreDecodeSurface(V4L2WritableBufferRef input_buffer,
V4L2WritableBufferRef output_buffer,
- scoped_refptr<VideoFrame> frame,
- base::OnceClosure release_cb)
+ scoped_refptr<VideoFrame> frame)
: V4L2DecodeSurface(std::move(input_buffer),
std::move(output_buffer),
- std::move(frame),
- std::move(release_cb)),
+ std::move(frame)),
// config store IDs are arbitrarily defined to be buffer ID + 1
config_store_(this->input_buffer().BufferId() + 1) {}
@@ -148,6 +132,8 @@ class V4L2ConfigStoreDecodeSurface : public V4L2DecodeSurface {
// The configuration store of the input buffer.
uint32_t config_store_;
+
+ DISALLOW_COPY_AND_ASSIGN(V4L2ConfigStoreDecodeSurface);
};
// An implementation of V4L2DecodeSurface that uses requests to associate
@@ -161,10 +147,10 @@ class V4L2RequestDecodeSurface : public V4L2DecodeSurface {
// Note that it will not be closed after the request is submitted - the caller
// is responsible for managing its lifetime.
static base::Optional<scoped_refptr<V4L2RequestDecodeSurface>> Create(
- int input_record,
- int output_record,
- int request_fd,
- base::OnceClosure release_cb);
+ V4L2WritableBufferRef input_buffer,
+ V4L2WritableBufferRef output_buffer,
+ scoped_refptr<VideoFrame> frame,
+ int request_fd);
void PrepareSetCtrls(struct v4l2_ext_controls* ctrls) const override;
void PrepareQueueBuffer(struct v4l2_buffer* buffer) const override;
@@ -172,13 +158,21 @@ class V4L2RequestDecodeSurface : public V4L2DecodeSurface {
bool Submit() const override;
private:
+ ~V4L2RequestDecodeSurface() override = default;
+
// FD of the request to use.
const int request_fd_;
- V4L2RequestDecodeSurface(int input_record,
- int output_record,
- int request_fd,
- base::OnceClosure release_cb);
+ V4L2RequestDecodeSurface(V4L2WritableBufferRef input_buffer,
+ V4L2WritableBufferRef output_buffer,
+ scoped_refptr<VideoFrame> frame,
+ int request_fd)
+ : V4L2DecodeSurface(std::move(input_buffer),
+ std::move(output_buffer),
+ std::move(frame)),
+ request_fd_(request_fd) {}
+
+ DISALLOW_COPY_AND_ASSIGN(V4L2RequestDecodeSurface);
};
} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_device.cc b/chromium/media/gpu/v4l2/v4l2_device.cc
index 8269030ea3f..d8288d9b9fd 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.cc
+++ b/chromium/media/gpu/v4l2/v4l2_device.cc
@@ -219,6 +219,9 @@ class V4L2BuffersList : public base::RefCountedThreadSafe<V4L2BuffersList> {
size_t size() const;
private:
+ friend class base::RefCountedThreadSafe<V4L2BuffersList>;
+ ~V4L2BuffersList() = default;
+
mutable base::Lock lock_;
std::set<size_t> free_buffers_ GUARDED_BY(lock_);
DISALLOW_COPY_AND_ASSIGN(V4L2BuffersList);
@@ -499,6 +502,26 @@ size_t V4L2WritableBufferRef::GetPlaneSize(const size_t plane) const {
return buffer_data_->v4l2_buffer_.m.planes[plane].length;
}
+void V4L2WritableBufferRef::SetPlaneSize(const size_t plane,
+ const size_t size) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(IsValid());
+
+ enum v4l2_memory memory = Memory();
+ if (memory == V4L2_MEMORY_MMAP) {
+ DCHECK_EQ(buffer_data_->v4l2_buffer_.m.planes[plane].length, size);
+ return;
+ }
+ DCHECK(memory == V4L2_MEMORY_USERPTR || memory == V4L2_MEMORY_DMABUF);
+
+ if (plane >= PlanesCount()) {
+ VLOGF(1) << "Invalid plane " << plane << " requested.";
+ return;
+ }
+
+ buffer_data_->v4l2_buffer_.m.planes[plane].length = size;
+}
+
void* V4L2WritableBufferRef::GetPlaneMapping(const size_t plane) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(IsValid());
@@ -551,9 +574,22 @@ size_t V4L2WritableBufferRef::GetPlaneBytesUsed(const size_t plane) const {
return buffer_data_->v4l2_buffer_.m.planes[plane].bytesused;
}
+void V4L2WritableBufferRef::SetPlaneDataOffset(const size_t plane,
+ const size_t data_offset) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(IsValid());
+
+ if (plane >= PlanesCount()) {
+ VLOGF(1) << "Invalid plane " << plane << " requested.";
+ return;
+ }
+
+ buffer_data_->v4l2_buffer_.m.planes[plane].data_offset = data_offset;
+}
+
void V4L2WritableBufferRef::PrepareQueueBuffer(
- scoped_refptr<V4L2DecodeSurface> surface) {
- surface->PrepareQueueBuffer(&(buffer_data_->v4l2_buffer_));
+ const V4L2DecodeSurface& surface) {
+ surface.PrepareQueueBuffer(&(buffer_data_->v4l2_buffer_));
}
size_t V4L2WritableBufferRef::BufferId() const {
@@ -590,6 +626,13 @@ bool V4L2ReadableBuffer::IsLast() const {
return buffer_data_->v4l2_buffer_.flags & V4L2_BUF_FLAG_LAST;
}
+bool V4L2ReadableBuffer::IsKeyframe() const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(buffer_data_);
+
+ return buffer_data_->v4l2_buffer_.flags & V4L2_BUF_FLAG_KEYFRAME;
+}
+
struct timeval V4L2ReadableBuffer::GetTimeStamp() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer_data_);
@@ -604,6 +647,13 @@ size_t V4L2ReadableBuffer::PlanesCount() const {
return buffer_data_->v4l2_buffer_.length;
}
+const void* V4L2ReadableBuffer::GetPlaneMapping(const size_t plane) const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(buffer_data_);
+
+ return buffer_data_->GetPlaneMapping(plane);
+}
+
size_t V4L2ReadableBuffer::GetPlaneBytesUsed(const size_t plane) const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer_data_);
@@ -616,6 +666,18 @@ size_t V4L2ReadableBuffer::GetPlaneBytesUsed(const size_t plane) const {
return buffer_data_->v4l2_planes_[plane].bytesused;
}
+size_t V4L2ReadableBuffer::GetPlaneDataOffset(const size_t plane) const {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(buffer_data_);
+
+ if (plane >= PlanesCount()) {
+ VLOGF(1) << "Invalid plane " << plane << " requested.";
+ return 0;
+ }
+
+ return buffer_data_->v4l2_planes_[plane].data_offset;
+}
+
size_t V4L2ReadableBuffer::BufferId() const {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer_data_);
@@ -1004,8 +1066,19 @@ VideoPixelFormat V4L2Device::V4L2PixFmtToVideoPixelFormat(uint32_t pix_fmt) {
case V4L2_PIX_FMT_NV12M:
return PIXEL_FORMAT_NV12;
+ // V4L2_PIX_FMT_MT21C is only used for MT8173 hardware video decoder output
+ // and should be converted by MT8173 image processor for compositor to
+ // render. Since it is an intermediate format for video decoder,
+ // VideoPixelFormat shall not have its mapping. However, we need to create a
+ // VideoFrameLayout for the format to process the intermediate frame. Hence
+ // we map V4L2_PIX_FMT_MT21C to PIXEL_FORMAT_NV12 as their layout are the
+ // same.
case V4L2_PIX_FMT_MT21C:
- return PIXEL_FORMAT_MT21;
+ // V4L2_PIX_FMT_MM21 is used for MT8183 hardware video decoder. It is
+ // similar to V4L2_PIX_FMT_MT21C but is not compressed ; thus it can also
+ // be mapped to PIXEL_FORMAT_NV12.
+ case V4L2_PIX_FMT_MM21:
+ return PIXEL_FORMAT_NV12;
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YUV420M:
@@ -1033,15 +1106,14 @@ uint32_t V4L2Device::VideoPixelFormatToV4L2PixFmt(const VideoPixelFormat format,
switch (format) {
case PIXEL_FORMAT_NV12:
return single_planar ? V4L2_PIX_FMT_NV12 : V4L2_PIX_FMT_NV12M;
- case PIXEL_FORMAT_MT21:
- // No single plane format for MT21.
- return single_planar ? 0 : V4L2_PIX_FMT_MT21C;
case PIXEL_FORMAT_I420:
return single_planar ? V4L2_PIX_FMT_YUV420 : V4L2_PIX_FMT_YUV420M;
case PIXEL_FORMAT_YV12:
return single_planar ? V4L2_PIX_FMT_YVU420 : V4L2_PIX_FMT_YVU420M;
default:
- LOG(FATAL) << "Add more cases as needed";
+ LOG(ERROR) << "Add more cases as needed, format: "
+ << VideoPixelFormatToString(format)
+ << ", single_planar: " << single_planar;
return 0;
}
}
@@ -1072,7 +1144,7 @@ uint32_t V4L2Device::VideoCodecProfileToV4L2PixFmt(VideoCodecProfile profile,
else
return V4L2_PIX_FMT_VP9;
} else {
- LOG(FATAL) << "Add more cases as needed";
+ LOG(ERROR) << "Unknown profile: " << GetProfileName(profile);
return 0;
}
}
@@ -1512,8 +1584,8 @@ base::Optional<VideoFrameLayout> V4L2Device::V4L2FormatToVideoFrameLayout(
// static
bool V4L2Device::IsMultiPlanarV4L2PixFmt(uint32_t pix_fmt) {
constexpr uint32_t kMultiV4L2PixFmts[] = {
- V4L2_PIX_FMT_NV12M, V4L2_PIX_FMT_MT21C, V4L2_PIX_FMT_YUV420M,
- V4L2_PIX_FMT_YVU420M, V4L2_PIX_FMT_YUV422M,
+ V4L2_PIX_FMT_NV12M, V4L2_PIX_FMT_MT21C, V4L2_PIX_FMT_MM21,
+ V4L2_PIX_FMT_YUV420M, V4L2_PIX_FMT_YVU420M, V4L2_PIX_FMT_YUV422M,
};
return std::find(std::cbegin(kMultiV4L2PixFmts), std::cend(kMultiV4L2PixFmts),
pix_fmt) != std::cend(kMultiV4L2PixFmts);
diff --git a/chromium/media/gpu/v4l2/v4l2_device.h b/chromium/media/gpu/v4l2/v4l2_device.h
index 7c69ad1491d..a0f6a09bda7 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.h
+++ b/chromium/media/gpu/v4l2/v4l2_device.h
@@ -41,6 +41,12 @@
#define V4L2_CID_JPEG_CHROMA_QUANTIZATION (V4L2_CID_JPEG_CLASS_BASE + 6)
#endif
+// TODO(b/132589320): remove this once V4L2 header is updated.
+#ifndef V4L2_PIX_FMT_MM21
+// MTK 8-bit block mode, two non-contiguous planes.
+#define V4L2_PIX_FMT_MM21 v4l2_fourcc('M', 'M', '2', '1')
+#endif
+
namespace media {
class V4L2Queue;
@@ -94,6 +100,10 @@ class MEDIA_GPU_EXPORT V4L2WritableBufferRef {
size_t PlanesCount() const;
// Returns the size of the requested |plane|, in bytes.
size_t GetPlaneSize(const size_t plane) const;
+ // Set the size of the requested |plane|, in bytes. It is only valid for
+ // USERPTR and DMABUF buffers. When using MMAP buffer, this method triggers a
+ // DCHECK and is a no-op for release builds.
+ void SetPlaneSize(const size_t plane, const size_t size);
// This method can only be used with MMAP buffers.
// It will return a pointer to the data of the |plane|th plane.
// In case of error (invalid plane index or mapping failed), a nullptr is
@@ -107,6 +117,8 @@ class MEDIA_GPU_EXPORT V4L2WritableBufferRef {
void SetPlaneBytesUsed(const size_t plane, const size_t bytes_used);
// Returns the previously-set number of bytes used for |plane|.
size_t GetPlaneBytesUsed(const size_t plane) const;
+ // Set the data offset for |plane|, in bytes.
+ void SetPlaneDataOffset(const size_t plane, const size_t data_offset);
// Return the VideoFrame underlying this buffer. The VideoFrame's layout
// will match that of the V4L2 format. This method will *always* return the
@@ -120,7 +132,7 @@ class MEDIA_GPU_EXPORT V4L2WritableBufferRef {
// Add the request or config store information to |surface|.
// TODO(acourbot): This method is a temporary hack. Implement proper config
// store/request API support.
- void PrepareQueueBuffer(scoped_refptr<V4L2DecodeSurface> surface);
+ void PrepareQueueBuffer(const V4L2DecodeSurface& surface);
// Return the V4L2 buffer ID of the underlying buffer.
// TODO(acourbot) This is used for legacy clients but should be ultimately
@@ -160,12 +172,21 @@ class MEDIA_GPU_EXPORT V4L2ReadableBuffer
public:
// Returns whether the V4L2_BUF_FLAG_LAST flag is set for this buffer.
bool IsLast() const;
+ // Returns whether the V4L2_BUF_FLAG_KEYFRAME flag is set for this buffer.
+ bool IsKeyframe() const;
// Return the timestamp set by the driver on this buffer.
struct timeval GetTimeStamp() const;
// Returns the number of planes in this buffer.
size_t PlanesCount() const;
// Returns the number of bytes used for |plane|.
size_t GetPlaneBytesUsed(size_t plane) const;
+ // Returns the data offset for |plane|.
+ size_t GetPlaneDataOffset(size_t plane) const;
+ // This method can only be used with MMAP buffers.
+ // It will return a pointer to the data of the |plane|th plane.
+ // In case of error (invalid plane index or mapping failed), a nullptr is
+ // returned.
+ const void* GetPlaneMapping(const size_t plane) const;
// Return the V4L2 buffer ID of the underlying buffer.
// TODO(acourbot) This is used for legacy clients but should be ultimately
diff --git a/chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc
index 54b89244205..f64586f97df 100644
--- a/chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_h264_accelerator.cc
@@ -4,6 +4,12 @@
#include "media/gpu/v4l2/v4l2_h264_accelerator.h"
+// TODO(987856): prevent legacy headers being included from videodev2.h until
+// v4.14
+// support is deprecated.
+#define _H264_CTRLS_LEGACY_H_
+
+#include <linux/media/h264-ctrls.h>
#include <linux/videodev2.h>
#include <type_traits>
@@ -23,8 +29,8 @@ struct V4L2H264AcceleratorPrivate {
// TODO(posciak): This should be queried from hardware once supported.
static constexpr size_t kMaxSlices = 16;
- struct v4l2_ctrl_h264_slice_param v4l2_slice_params[kMaxSlices];
- struct v4l2_ctrl_h264_decode_param v4l2_decode_param;
+ struct v4l2_ctrl_h264_slice_params v4l2_slice_params[kMaxSlices];
+ struct v4l2_ctrl_h264_decode_params v4l2_decode_param;
};
class V4L2H264Picture : public H264Picture {
@@ -97,7 +103,7 @@ void V4L2H264Accelerator::H264DPBToV4L2DPB(
}
struct v4l2_h264_dpb_entry& entry = priv_->v4l2_decode_param.dpb[i++];
- entry.buf_index = index;
+ entry.reference_ts = index;
entry.frame_num = pic->frame_num;
entry.pic_num = pic->pic_num;
entry.top_field_order_cnt = pic->top_field_order_cnt;
@@ -306,7 +312,7 @@ H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitSlice(
return Status::kFail;
}
- struct v4l2_ctrl_h264_slice_param& v4l2_slice_param =
+ struct v4l2_ctrl_h264_slice_params& v4l2_slice_param =
priv_->v4l2_slice_params[num_slices_++];
memset(&v4l2_slice_param, 0, sizeof(v4l2_slice_param));
@@ -338,11 +344,12 @@ H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitSlice(
#define SET_V4L2_SPARM_FLAG_IF(cond, flag) \
v4l2_slice_param.flags |= ((slice_hdr->cond) ? (flag) : 0)
- SET_V4L2_SPARM_FLAG_IF(field_pic_flag, V4L2_SLICE_FLAG_FIELD_PIC);
- SET_V4L2_SPARM_FLAG_IF(bottom_field_flag, V4L2_SLICE_FLAG_BOTTOM_FIELD);
+ SET_V4L2_SPARM_FLAG_IF(field_pic_flag, V4L2_H264_SLICE_FLAG_FIELD_PIC);
+ SET_V4L2_SPARM_FLAG_IF(bottom_field_flag, V4L2_H264_SLICE_FLAG_BOTTOM_FIELD);
SET_V4L2_SPARM_FLAG_IF(direct_spatial_mv_pred_flag,
- V4L2_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED);
- SET_V4L2_SPARM_FLAG_IF(sp_for_switch_flag, V4L2_SLICE_FLAG_SP_FOR_SWITCH);
+ V4L2_H264_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED);
+ SET_V4L2_SPARM_FLAG_IF(sp_for_switch_flag,
+ V4L2_H264_SLICE_FLAG_SP_FOR_SWITCH);
#undef SET_V4L2_SPARM_FLAG_IF
struct v4l2_h264_pred_weight_table* pred_weight_table =
@@ -422,7 +429,9 @@ H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitDecode(
H264PictureToV4L2DecodeSurface(pic.get());
priv_->v4l2_decode_param.num_slices = num_slices_;
- priv_->v4l2_decode_param.idr_pic_flag = pic->idr;
+ if (pic->idr) {
+ priv_->v4l2_decode_param.flags |= 1;
+ }
priv_->v4l2_decode_param.top_field_order_cnt = pic->top_field_order_cnt;
priv_->v4l2_decode_param.bottom_field_order_cnt = pic->bottom_field_order_cnt;
@@ -430,13 +439,13 @@ H264Decoder::H264Accelerator::Status V4L2H264Accelerator::SubmitDecode(
std::vector<struct v4l2_ext_control> ctrls;
memset(&ctrl, 0, sizeof(ctrl));
- ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAM;
+ ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS;
ctrl.size = sizeof(priv_->v4l2_slice_params);
ctrl.ptr = priv_->v4l2_slice_params;
ctrls.push_back(ctrl);
memset(&ctrl, 0, sizeof(ctrl));
- ctrl.id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAM;
+ ctrl.id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAMS;
ctrl.size = sizeof(priv_->v4l2_decode_param);
ctrl.ptr = &priv_->v4l2_decode_param;
ctrls.push_back(ctrl);
diff --git a/chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.cc b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.cc
new file mode 100644
index 00000000000..d33b98303d9
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.cc
@@ -0,0 +1,484 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/v4l2/v4l2_h264_accelerator_legacy.h"
+
+#include <linux/media/h264-ctrls-legacy.h>
+#include <linux/videodev2.h>
+#include <type_traits>
+
+#include "base/logging.h"
+#include "base/stl_util.h"
+#include "media/gpu/macros.h"
+#include "media/gpu/v4l2/v4l2_decode_surface.h"
+#include "media/gpu/v4l2/v4l2_decode_surface_handler.h"
+#include "media/gpu/v4l2/v4l2_device.h"
+
+namespace media {
+
+// This struct contains the kernel-specific parts of the H264 acceleration,
+// that we don't want to expose in the .h file since they may differ from
+// upstream.
+struct V4L2LegacyH264AcceleratorPrivate {
+ // TODO(posciak): This should be queried from hardware once supported.
+ static constexpr size_t kMaxSlices = 16;
+
+ struct v4l2_ctrl_h264_slice_param v4l2_slice_params[kMaxSlices];
+ struct v4l2_ctrl_h264_decode_param v4l2_decode_param;
+};
+
+class V4L2H264Picture : public H264Picture {
+ public:
+ explicit V4L2H264Picture(const scoped_refptr<V4L2DecodeSurface>& dec_surface)
+ : dec_surface_(dec_surface) {}
+
+ V4L2H264Picture* AsV4L2H264Picture() override { return this; }
+ scoped_refptr<V4L2DecodeSurface> dec_surface() { return dec_surface_; }
+
+ private:
+ ~V4L2H264Picture() override {}
+
+ scoped_refptr<V4L2DecodeSurface> dec_surface_;
+
+ DISALLOW_COPY_AND_ASSIGN(V4L2H264Picture);
+};
+
+V4L2LegacyH264Accelerator::V4L2LegacyH264Accelerator(
+ V4L2DecodeSurfaceHandler* surface_handler,
+ V4L2Device* device)
+ : num_slices_(0),
+ surface_handler_(surface_handler),
+ device_(device),
+ priv_(std::make_unique<V4L2LegacyH264AcceleratorPrivate>()) {
+ DCHECK(surface_handler_);
+}
+
+V4L2LegacyH264Accelerator::~V4L2LegacyH264Accelerator() {}
+
+scoped_refptr<H264Picture> V4L2LegacyH264Accelerator::CreateH264Picture() {
+ scoped_refptr<V4L2DecodeSurface> dec_surface =
+ surface_handler_->CreateSurface();
+ if (!dec_surface)
+ return nullptr;
+
+ return new V4L2H264Picture(dec_surface);
+}
+
+void V4L2LegacyH264Accelerator::H264PictureListToDPBIndicesList(
+ const H264Picture::Vector& src_pic_list,
+ uint8_t dst_list[kDPBIndicesListSize]) {
+ size_t i;
+ for (i = 0; i < src_pic_list.size() && i < kDPBIndicesListSize; ++i) {
+ const scoped_refptr<H264Picture>& pic = src_pic_list[i];
+ dst_list[i] = pic ? pic->dpb_position : VIDEO_MAX_FRAME;
+ }
+
+ while (i < kDPBIndicesListSize)
+ dst_list[i++] = VIDEO_MAX_FRAME;
+}
+
+void V4L2LegacyH264Accelerator::H264DPBToV4L2DPB(
+ const H264DPB& dpb,
+ std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces) {
+ memset(priv_->v4l2_decode_param.dpb, 0, sizeof(priv_->v4l2_decode_param.dpb));
+ size_t i = 0;
+ for (const auto& pic : dpb) {
+ if (i >= base::size(priv_->v4l2_decode_param.dpb)) {
+ VLOGF(1) << "Invalid DPB size";
+ break;
+ }
+
+ int index = VIDEO_MAX_FRAME;
+ if (!pic->nonexisting) {
+ scoped_refptr<V4L2DecodeSurface> dec_surface =
+ H264PictureToV4L2DecodeSurface(pic.get());
+ index = dec_surface->GetReferenceID();
+ ref_surfaces->push_back(dec_surface);
+ }
+
+ struct v4l2_h264_dpb_entry& entry = priv_->v4l2_decode_param.dpb[i++];
+ entry.buf_index = index;
+ entry.frame_num = pic->frame_num;
+ entry.pic_num = pic->pic_num;
+ entry.top_field_order_cnt = pic->top_field_order_cnt;
+ entry.bottom_field_order_cnt = pic->bottom_field_order_cnt;
+ entry.flags = (pic->ref ? V4L2_H264_DPB_ENTRY_FLAG_ACTIVE : 0) |
+ (pic->long_term ? V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM : 0);
+ }
+}
+
+H264Decoder::H264Accelerator::Status
+V4L2LegacyH264Accelerator::SubmitFrameMetadata(
+ const H264SPS* sps,
+ const H264PPS* pps,
+ const H264DPB& dpb,
+ const H264Picture::Vector& ref_pic_listp0,
+ const H264Picture::Vector& ref_pic_listb0,
+ const H264Picture::Vector& ref_pic_listb1,
+ scoped_refptr<H264Picture> pic) {
+ struct v4l2_ext_control ctrl;
+ std::vector<struct v4l2_ext_control> ctrls;
+
+ struct v4l2_ctrl_h264_sps v4l2_sps;
+ memset(&v4l2_sps, 0, sizeof(v4l2_sps));
+ v4l2_sps.constraint_set_flags =
+ (sps->constraint_set0_flag ? V4L2_H264_SPS_CONSTRAINT_SET0_FLAG : 0) |
+ (sps->constraint_set1_flag ? V4L2_H264_SPS_CONSTRAINT_SET1_FLAG : 0) |
+ (sps->constraint_set2_flag ? V4L2_H264_SPS_CONSTRAINT_SET2_FLAG : 0) |
+ (sps->constraint_set3_flag ? V4L2_H264_SPS_CONSTRAINT_SET3_FLAG : 0) |
+ (sps->constraint_set4_flag ? V4L2_H264_SPS_CONSTRAINT_SET4_FLAG : 0) |
+ (sps->constraint_set5_flag ? V4L2_H264_SPS_CONSTRAINT_SET5_FLAG : 0);
+#define SPS_TO_V4L2SPS(a) v4l2_sps.a = sps->a
+ SPS_TO_V4L2SPS(profile_idc);
+ SPS_TO_V4L2SPS(level_idc);
+ SPS_TO_V4L2SPS(seq_parameter_set_id);
+ SPS_TO_V4L2SPS(chroma_format_idc);
+ SPS_TO_V4L2SPS(bit_depth_luma_minus8);
+ SPS_TO_V4L2SPS(bit_depth_chroma_minus8);
+ SPS_TO_V4L2SPS(log2_max_frame_num_minus4);
+ SPS_TO_V4L2SPS(pic_order_cnt_type);
+ SPS_TO_V4L2SPS(log2_max_pic_order_cnt_lsb_minus4);
+ SPS_TO_V4L2SPS(offset_for_non_ref_pic);
+ SPS_TO_V4L2SPS(offset_for_top_to_bottom_field);
+ SPS_TO_V4L2SPS(num_ref_frames_in_pic_order_cnt_cycle);
+
+ static_assert(std::extent<decltype(v4l2_sps.offset_for_ref_frame)>() ==
+ std::extent<decltype(sps->offset_for_ref_frame)>(),
+ "offset_for_ref_frame arrays must be same size");
+ for (size_t i = 0; i < base::size(v4l2_sps.offset_for_ref_frame); ++i)
+ v4l2_sps.offset_for_ref_frame[i] = sps->offset_for_ref_frame[i];
+ SPS_TO_V4L2SPS(max_num_ref_frames);
+ SPS_TO_V4L2SPS(pic_width_in_mbs_minus1);
+ SPS_TO_V4L2SPS(pic_height_in_map_units_minus1);
+#undef SPS_TO_V4L2SPS
+
+#define SET_V4L2_SPS_FLAG_IF(cond, flag) \
+ v4l2_sps.flags |= ((sps->cond) ? (flag) : 0)
+ SET_V4L2_SPS_FLAG_IF(separate_colour_plane_flag,
+ V4L2_H264_SPS_FLAG_SEPARATE_COLOUR_PLANE);
+ SET_V4L2_SPS_FLAG_IF(qpprime_y_zero_transform_bypass_flag,
+ V4L2_H264_SPS_FLAG_QPPRIME_Y_ZERO_TRANSFORM_BYPASS);
+ SET_V4L2_SPS_FLAG_IF(delta_pic_order_always_zero_flag,
+ V4L2_H264_SPS_FLAG_DELTA_PIC_ORDER_ALWAYS_ZERO);
+ SET_V4L2_SPS_FLAG_IF(gaps_in_frame_num_value_allowed_flag,
+ V4L2_H264_SPS_FLAG_GAPS_IN_FRAME_NUM_VALUE_ALLOWED);
+ SET_V4L2_SPS_FLAG_IF(frame_mbs_only_flag, V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY);
+ SET_V4L2_SPS_FLAG_IF(mb_adaptive_frame_field_flag,
+ V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD);
+ SET_V4L2_SPS_FLAG_IF(direct_8x8_inference_flag,
+ V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE);
+#undef SET_V4L2_SPS_FLAG_IF
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SPS;
+ ctrl.size = sizeof(v4l2_sps);
+ ctrl.ptr = &v4l2_sps;
+ ctrls.push_back(ctrl);
+
+ struct v4l2_ctrl_h264_pps v4l2_pps;
+ memset(&v4l2_pps, 0, sizeof(v4l2_pps));
+#define PPS_TO_V4L2PPS(a) v4l2_pps.a = pps->a
+ PPS_TO_V4L2PPS(pic_parameter_set_id);
+ PPS_TO_V4L2PPS(seq_parameter_set_id);
+ PPS_TO_V4L2PPS(num_slice_groups_minus1);
+ PPS_TO_V4L2PPS(num_ref_idx_l0_default_active_minus1);
+ PPS_TO_V4L2PPS(num_ref_idx_l1_default_active_minus1);
+ PPS_TO_V4L2PPS(weighted_bipred_idc);
+ PPS_TO_V4L2PPS(pic_init_qp_minus26);
+ PPS_TO_V4L2PPS(pic_init_qs_minus26);
+ PPS_TO_V4L2PPS(chroma_qp_index_offset);
+ PPS_TO_V4L2PPS(second_chroma_qp_index_offset);
+#undef PPS_TO_V4L2PPS
+
+#define SET_V4L2_PPS_FLAG_IF(cond, flag) \
+ v4l2_pps.flags |= ((pps->cond) ? (flag) : 0)
+ SET_V4L2_PPS_FLAG_IF(entropy_coding_mode_flag,
+ V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE);
+ SET_V4L2_PPS_FLAG_IF(
+ bottom_field_pic_order_in_frame_present_flag,
+ V4L2_H264_PPS_FLAG_BOTTOM_FIELD_PIC_ORDER_IN_FRAME_PRESENT);
+ SET_V4L2_PPS_FLAG_IF(weighted_pred_flag, V4L2_H264_PPS_FLAG_WEIGHTED_PRED);
+ SET_V4L2_PPS_FLAG_IF(deblocking_filter_control_present_flag,
+ V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT);
+ SET_V4L2_PPS_FLAG_IF(constrained_intra_pred_flag,
+ V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED);
+ SET_V4L2_PPS_FLAG_IF(redundant_pic_cnt_present_flag,
+ V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT);
+ SET_V4L2_PPS_FLAG_IF(transform_8x8_mode_flag,
+ V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE);
+ SET_V4L2_PPS_FLAG_IF(pic_scaling_matrix_present_flag,
+ V4L2_H264_PPS_FLAG_PIC_SCALING_MATRIX_PRESENT);
+#undef SET_V4L2_PPS_FLAG_IF
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_MPEG_VIDEO_H264_PPS;
+ ctrl.size = sizeof(v4l2_pps);
+ ctrl.ptr = &v4l2_pps;
+ ctrls.push_back(ctrl);
+
+ struct v4l2_ctrl_h264_scaling_matrix v4l2_scaling_matrix;
+ memset(&v4l2_scaling_matrix, 0, sizeof(v4l2_scaling_matrix));
+
+ static_assert(
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_4x4)>() <=
+ std::extent<decltype(pps->scaling_list4x4)>() &&
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_4x4[0])>() <=
+ std::extent<decltype(pps->scaling_list4x4[0])>() &&
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_8x8)>() <=
+ std::extent<decltype(pps->scaling_list8x8)>() &&
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_8x8[0])>() <=
+ std::extent<decltype(pps->scaling_list8x8[0])>(),
+ "scaling_lists must be of correct size");
+ static_assert(
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_4x4)>() <=
+ std::extent<decltype(sps->scaling_list4x4)>() &&
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_4x4[0])>() <=
+ std::extent<decltype(sps->scaling_list4x4[0])>() &&
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_8x8)>() <=
+ std::extent<decltype(sps->scaling_list8x8)>() &&
+ std::extent<decltype(v4l2_scaling_matrix.scaling_list_8x8[0])>() <=
+ std::extent<decltype(sps->scaling_list8x8[0])>(),
+ "scaling_lists must be of correct size");
+
+ const auto* scaling_list4x4 = &sps->scaling_list4x4[0];
+ const auto* scaling_list8x8 = &sps->scaling_list8x8[0];
+ if (pps->pic_scaling_matrix_present_flag) {
+ scaling_list4x4 = &pps->scaling_list4x4[0];
+ scaling_list8x8 = &pps->scaling_list8x8[0];
+ }
+
+ for (size_t i = 0; i < base::size(v4l2_scaling_matrix.scaling_list_4x4);
+ ++i) {
+ for (size_t j = 0; j < base::size(v4l2_scaling_matrix.scaling_list_4x4[i]);
+ ++j) {
+ v4l2_scaling_matrix.scaling_list_4x4[i][j] = scaling_list4x4[i][j];
+ }
+ }
+ for (size_t i = 0; i < base::size(v4l2_scaling_matrix.scaling_list_8x8);
+ ++i) {
+ for (size_t j = 0; j < base::size(v4l2_scaling_matrix.scaling_list_8x8[i]);
+ ++j) {
+ v4l2_scaling_matrix.scaling_list_8x8[i][j] = scaling_list8x8[i][j];
+ }
+ }
+
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX;
+ ctrl.size = sizeof(v4l2_scaling_matrix);
+ ctrl.ptr = &v4l2_scaling_matrix;
+ ctrls.push_back(ctrl);
+
+ scoped_refptr<V4L2DecodeSurface> dec_surface =
+ H264PictureToV4L2DecodeSurface(pic.get());
+
+ struct v4l2_ext_controls ext_ctrls;
+ memset(&ext_ctrls, 0, sizeof(ext_ctrls));
+ ext_ctrls.count = ctrls.size();
+ ext_ctrls.controls = &ctrls[0];
+ dec_surface->PrepareSetCtrls(&ext_ctrls);
+ if (device_->Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) != 0) {
+ VPLOGF(1) << "ioctl() failed: VIDIOC_S_EXT_CTRLS";
+ return Status::kFail;
+ }
+
+ H264PictureListToDPBIndicesList(ref_pic_listp0,
+ priv_->v4l2_decode_param.ref_pic_list_p0);
+ H264PictureListToDPBIndicesList(ref_pic_listb0,
+ priv_->v4l2_decode_param.ref_pic_list_b0);
+ H264PictureListToDPBIndicesList(ref_pic_listb1,
+ priv_->v4l2_decode_param.ref_pic_list_b1);
+
+ std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
+ H264DPBToV4L2DPB(dpb, &ref_surfaces);
+ dec_surface->SetReferenceSurfaces(ref_surfaces);
+
+ return Status::kOk;
+}
+
+H264Decoder::H264Accelerator::Status V4L2LegacyH264Accelerator::SubmitSlice(
+ const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ scoped_refptr<H264Picture> pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) {
+ if (num_slices_ == priv_->kMaxSlices) {
+ VLOGF(1) << "Over limit of supported slices per frame";
+ return Status::kFail;
+ }
+
+ struct v4l2_ctrl_h264_slice_param& v4l2_slice_param =
+ priv_->v4l2_slice_params[num_slices_++];
+ memset(&v4l2_slice_param, 0, sizeof(v4l2_slice_param));
+
+ v4l2_slice_param.size = size;
+#define SHDR_TO_V4L2SPARM(a) v4l2_slice_param.a = slice_hdr->a
+ SHDR_TO_V4L2SPARM(header_bit_size);
+ SHDR_TO_V4L2SPARM(first_mb_in_slice);
+ SHDR_TO_V4L2SPARM(slice_type);
+ SHDR_TO_V4L2SPARM(pic_parameter_set_id);
+ SHDR_TO_V4L2SPARM(colour_plane_id);
+ SHDR_TO_V4L2SPARM(frame_num);
+ SHDR_TO_V4L2SPARM(idr_pic_id);
+ SHDR_TO_V4L2SPARM(pic_order_cnt_lsb);
+ SHDR_TO_V4L2SPARM(delta_pic_order_cnt_bottom);
+ SHDR_TO_V4L2SPARM(delta_pic_order_cnt0);
+ SHDR_TO_V4L2SPARM(delta_pic_order_cnt1);
+ SHDR_TO_V4L2SPARM(redundant_pic_cnt);
+ SHDR_TO_V4L2SPARM(dec_ref_pic_marking_bit_size);
+ SHDR_TO_V4L2SPARM(cabac_init_idc);
+ SHDR_TO_V4L2SPARM(slice_qp_delta);
+ SHDR_TO_V4L2SPARM(slice_qs_delta);
+ SHDR_TO_V4L2SPARM(disable_deblocking_filter_idc);
+ SHDR_TO_V4L2SPARM(slice_alpha_c0_offset_div2);
+ SHDR_TO_V4L2SPARM(slice_beta_offset_div2);
+ SHDR_TO_V4L2SPARM(num_ref_idx_l0_active_minus1);
+ SHDR_TO_V4L2SPARM(num_ref_idx_l1_active_minus1);
+ SHDR_TO_V4L2SPARM(pic_order_cnt_bit_size);
+#undef SHDR_TO_V4L2SPARM
+
+#define SET_V4L2_SPARM_FLAG_IF(cond, flag) \
+ v4l2_slice_param.flags |= ((slice_hdr->cond) ? (flag) : 0)
+ SET_V4L2_SPARM_FLAG_IF(field_pic_flag, V4L2_SLICE_FLAG_FIELD_PIC);
+ SET_V4L2_SPARM_FLAG_IF(bottom_field_flag, V4L2_SLICE_FLAG_BOTTOM_FIELD);
+ SET_V4L2_SPARM_FLAG_IF(direct_spatial_mv_pred_flag,
+ V4L2_SLICE_FLAG_DIRECT_SPATIAL_MV_PRED);
+ SET_V4L2_SPARM_FLAG_IF(sp_for_switch_flag, V4L2_SLICE_FLAG_SP_FOR_SWITCH);
+#undef SET_V4L2_SPARM_FLAG_IF
+
+ struct v4l2_h264_pred_weight_table* pred_weight_table =
+ &v4l2_slice_param.pred_weight_table;
+
+ if (((slice_hdr->IsPSlice() || slice_hdr->IsSPSlice()) &&
+ pps->weighted_pred_flag) ||
+ (slice_hdr->IsBSlice() && pps->weighted_bipred_idc == 1)) {
+ pred_weight_table->luma_log2_weight_denom =
+ slice_hdr->luma_log2_weight_denom;
+ pred_weight_table->chroma_log2_weight_denom =
+ slice_hdr->chroma_log2_weight_denom;
+
+ struct v4l2_h264_weight_factors* factorsl0 =
+ &pred_weight_table->weight_factors[0];
+
+ for (int i = 0; i < 32; ++i) {
+ factorsl0->luma_weight[i] =
+ slice_hdr->pred_weight_table_l0.luma_weight[i];
+ factorsl0->luma_offset[i] =
+ slice_hdr->pred_weight_table_l0.luma_offset[i];
+
+ for (int j = 0; j < 2; ++j) {
+ factorsl0->chroma_weight[i][j] =
+ slice_hdr->pred_weight_table_l0.chroma_weight[i][j];
+ factorsl0->chroma_offset[i][j] =
+ slice_hdr->pred_weight_table_l0.chroma_offset[i][j];
+ }
+ }
+
+ if (slice_hdr->IsBSlice()) {
+ struct v4l2_h264_weight_factors* factorsl1 =
+ &pred_weight_table->weight_factors[1];
+
+ for (int i = 0; i < 32; ++i) {
+ factorsl1->luma_weight[i] =
+ slice_hdr->pred_weight_table_l1.luma_weight[i];
+ factorsl1->luma_offset[i] =
+ slice_hdr->pred_weight_table_l1.luma_offset[i];
+
+ for (int j = 0; j < 2; ++j) {
+ factorsl1->chroma_weight[i][j] =
+ slice_hdr->pred_weight_table_l1.chroma_weight[i][j];
+ factorsl1->chroma_offset[i][j] =
+ slice_hdr->pred_weight_table_l1.chroma_offset[i][j];
+ }
+ }
+ }
+ }
+
+ H264PictureListToDPBIndicesList(ref_pic_list0,
+ v4l2_slice_param.ref_pic_list0);
+ H264PictureListToDPBIndicesList(ref_pic_list1,
+ v4l2_slice_param.ref_pic_list1);
+
+ scoped_refptr<V4L2DecodeSurface> dec_surface =
+ H264PictureToV4L2DecodeSurface(pic.get());
+
+ priv_->v4l2_decode_param.nal_ref_idc = slice_hdr->nal_ref_idc;
+
+ // TODO(posciak): Don't add start code back here, but have it passed from
+ // the parser.
+ size_t data_copy_size = size + 3;
+ std::unique_ptr<uint8_t[]> data_copy(new uint8_t[data_copy_size]);
+ memset(data_copy.get(), 0, data_copy_size);
+ data_copy[2] = 0x01;
+ memcpy(data_copy.get() + 3, data, size);
+ return surface_handler_->SubmitSlice(dec_surface, data_copy.get(),
+ data_copy_size)
+ ? Status::kOk
+ : Status::kFail;
+}
+
+H264Decoder::H264Accelerator::Status V4L2LegacyH264Accelerator::SubmitDecode(
+ scoped_refptr<H264Picture> pic) {
+ scoped_refptr<V4L2DecodeSurface> dec_surface =
+ H264PictureToV4L2DecodeSurface(pic.get());
+
+ priv_->v4l2_decode_param.num_slices = num_slices_;
+ priv_->v4l2_decode_param.idr_pic_flag = pic->idr;
+ priv_->v4l2_decode_param.top_field_order_cnt = pic->top_field_order_cnt;
+ priv_->v4l2_decode_param.bottom_field_order_cnt = pic->bottom_field_order_cnt;
+
+ struct v4l2_ext_control ctrl;
+ std::vector<struct v4l2_ext_control> ctrls;
+
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAM;
+ ctrl.size = sizeof(priv_->v4l2_slice_params);
+ ctrl.ptr = priv_->v4l2_slice_params;
+ ctrls.push_back(ctrl);
+
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAM;
+ ctrl.size = sizeof(priv_->v4l2_decode_param);
+ ctrl.ptr = &priv_->v4l2_decode_param;
+ ctrls.push_back(ctrl);
+
+ struct v4l2_ext_controls ext_ctrls;
+ memset(&ext_ctrls, 0, sizeof(ext_ctrls));
+ ext_ctrls.count = ctrls.size();
+ ext_ctrls.controls = &ctrls[0];
+ dec_surface->PrepareSetCtrls(&ext_ctrls);
+ if (device_->Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) != 0) {
+ VPLOGF(1) << "ioctl() failed: VIDIOC_S_EXT_CTRLS";
+ return Status::kFail;
+ }
+
+ Reset();
+
+ DVLOGF(4) << "Submitting decode for surface: " << dec_surface->ToString();
+ surface_handler_->DecodeSurface(dec_surface);
+ return Status::kOk;
+}
+
+bool V4L2LegacyH264Accelerator::OutputPicture(scoped_refptr<H264Picture> pic) {
+ // TODO(crbug.com/647725): Insert correct color space.
+ surface_handler_->SurfaceReady(H264PictureToV4L2DecodeSurface(pic.get()),
+ pic->bitstream_id(), pic->visible_rect(),
+ VideoColorSpace());
+ return true;
+}
+
+void V4L2LegacyH264Accelerator::Reset() {
+ num_slices_ = 0;
+ memset(&priv_->v4l2_decode_param, 0, sizeof(priv_->v4l2_decode_param));
+ memset(&priv_->v4l2_slice_params, 0, sizeof(priv_->v4l2_slice_params));
+}
+
+scoped_refptr<V4L2DecodeSurface>
+V4L2LegacyH264Accelerator::H264PictureToV4L2DecodeSurface(H264Picture* pic) {
+ V4L2H264Picture* v4l2_pic = pic->AsV4L2H264Picture();
+ CHECK(v4l2_pic);
+ return v4l2_pic->dec_surface();
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.h b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.h
new file mode 100644
index 00000000000..8bad8864845
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_h264_accelerator_legacy.h
@@ -0,0 +1,77 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_LEGACY_H_
+#define MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_LEGACY_H_
+
+#include <memory>
+#include <vector>
+
+#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
+#include "media/gpu/h264_decoder.h"
+#include "media/gpu/h264_dpb.h"
+
+namespace media {
+
+class V4L2Device;
+class V4L2DecodeSurface;
+class V4L2DecodeSurfaceHandler;
+struct V4L2LegacyH264AcceleratorPrivate;
+
+class V4L2LegacyH264Accelerator : public H264Decoder::H264Accelerator {
+ public:
+ using Status = H264Decoder::H264Accelerator::Status;
+
+ explicit V4L2LegacyH264Accelerator(V4L2DecodeSurfaceHandler* surface_handler,
+ V4L2Device* device);
+ ~V4L2LegacyH264Accelerator() override;
+
+ // H264Decoder::H264Accelerator implementation.
+ scoped_refptr<H264Picture> CreateH264Picture() override;
+ Status SubmitFrameMetadata(const H264SPS* sps,
+ const H264PPS* pps,
+ const H264DPB& dpb,
+ const H264Picture::Vector& ref_pic_listp0,
+ const H264Picture::Vector& ref_pic_listb0,
+ const H264Picture::Vector& ref_pic_listb1,
+ scoped_refptr<H264Picture> pic) override;
+ Status SubmitSlice(const H264PPS* pps,
+ const H264SliceHeader* slice_hdr,
+ const H264Picture::Vector& ref_pic_list0,
+ const H264Picture::Vector& ref_pic_list1,
+ scoped_refptr<H264Picture> pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) override;
+ Status SubmitDecode(scoped_refptr<H264Picture> pic) override;
+ bool OutputPicture(scoped_refptr<H264Picture> pic) override;
+ void Reset() override;
+
+ private:
+ // Max size of reference list.
+ static constexpr size_t kDPBIndicesListSize = 32;
+
+ void H264PictureListToDPBIndicesList(const H264Picture::Vector& src_pic_list,
+ uint8_t dst_list[kDPBIndicesListSize]);
+ void H264DPBToV4L2DPB(
+ const H264DPB& dpb,
+ std::vector<scoped_refptr<V4L2DecodeSurface>>* ref_surfaces);
+ scoped_refptr<V4L2DecodeSurface> H264PictureToV4L2DecodeSurface(
+ H264Picture* pic);
+
+ size_t num_slices_;
+ V4L2DecodeSurfaceHandler* const surface_handler_;
+ V4L2Device* const device_;
+
+ // Contains the kernel-specific structures that we don't want to expose
+ // outside of the compilation unit.
+ const std::unique_ptr<V4L2LegacyH264AcceleratorPrivate> priv_;
+
+ DISALLOW_COPY_AND_ASSIGN(V4L2LegacyH264Accelerator);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_V4L2_V4L2_H264_ACCELERATOR_LEGACY_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor.cc b/chromium/media/gpu/v4l2/v4l2_image_processor.cc
index 38f1c02631e..a215c6195b3 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor.cc
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor.cc
@@ -172,16 +172,19 @@ std::unique_ptr<V4L2ImageProcessor> V4L2ImageProcessor::Create(
}
const VideoFrameLayout& input_layout = input_config.layout;
+
+ // Use input_config.fourcc as input format if it is specified, i.e. non-zero.
const uint32_t input_format_fourcc =
- V4L2Device::VideoFrameLayoutToV4L2PixFmt(input_layout);
+ input_config.fourcc == ImageProcessor::PortConfig::kUnassignedFourCC
+ ? V4L2Device::VideoFrameLayoutToV4L2PixFmt(input_layout)
+ : input_config.fourcc;
if (!input_format_fourcc) {
VLOGF(1) << "Invalid VideoFrameLayout: " << input_layout;
return nullptr;
}
if (!device->Open(V4L2Device::Type::kImageProcessor, input_format_fourcc)) {
- VLOGF(1) << "Failed to open device for input format: "
- << VideoPixelFormatToString(input_layout.format())
- << " fourcc: " << FourccToString(input_format_fourcc);
+ VLOGF(1) << "Failed to open device with input fourcc: "
+ << FourccToString(input_format_fourcc);
return nullptr;
}
diff --git a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc
index d5342b5ca7c..d109b73b99b 100644
--- a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc
@@ -71,7 +71,7 @@ V4L2JpegEncodeAccelerator::JobRecord::JobRecord(
output_frame(output_frame),
quality(quality),
task_id(task_id),
- output_shm(base::SharedMemoryHandle(), 0, true), // dummy
+ output_shm(base::subtle::PlatformSharedMemoryRegion(), 0, true), // dummy
exif_shm(nullptr) {
if (exif_buffer) {
exif_shm.reset(new UnalignedSharedMemory(exif_buffer->TakeRegion(),
@@ -1457,8 +1457,7 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::EnqueueInputRecord() {
qbuf.m.planes[i].m.fd = (i < fds.size()) ? fds[i].get() : fds.back().get();
qbuf.m.planes[i].data_offset = planes[i].offset;
qbuf.m.planes[i].bytesused += qbuf.m.planes[i].data_offset;
- qbuf.m.planes[i].length =
- planes[i].size + qbuf.m.planes[i].data_offset;
+ qbuf.m.planes[i].length = planes[i].size + qbuf.m.planes[i].data_offset;
}
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
diff --git a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
index 7262689886a..4d56fdf0267 100644
--- a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
@@ -9,24 +9,38 @@
#include <string.h>
#include <sys/mman.h>
+#include <array>
#include <memory>
+#include <utility>
#include "base/big_endian.h"
#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
+#include "base/files/scoped_file.h"
#include "base/numerics/safe_conversions.h"
+#include "base/process/process_metrics.h"
#include "base/stl_util.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/bitstream_buffer.h"
+#include "media/base/unaligned_shared_memory.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_types.h"
+#include "media/gpu/format_utils.h"
+#include "media/gpu/linux/platform_video_frame_utils.h"
#include "media/gpu/macros.h"
+#include "media/gpu/video_frame_mapper.h"
+#include "media/gpu/video_frame_mapper_factory.h"
#include "media/parsers/jpeg_parser.h"
#include "third_party/libyuv/include/libyuv.h"
-#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_name) \
- do { \
- if (device_->Ioctl(type, arg) != 0) { \
- VPLOGF(1) << "ioctl() failed: " << type_name; \
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \
- return value; \
- } \
+#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value, type_name) \
+ do { \
+ if (device_->Ioctl(type, arg) != 0) { \
+ VPLOGF(1) << "ioctl() failed: " << type_name; \
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE); \
+ return value; \
+ } \
} while (0)
#define IOCTL_OR_ERROR_RETURN(type, arg) \
@@ -35,12 +49,12 @@
#define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false, #type)
-#define IOCTL_OR_LOG_ERROR(type, arg) \
- do { \
- if (device_->Ioctl(type, arg) != 0) { \
- VPLOGF(1) << "ioctl() failed: " << #type; \
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE); \
- } \
+#define IOCTL_OR_LOG_ERROR(type, arg) \
+ do { \
+ if (device_->Ioctl(type, arg) != 0) { \
+ VPLOGF(1) << "ioctl() failed: " << #type; \
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE); \
+ } \
} while (0)
#define READ_U8_OR_RETURN_FALSE(reader, out) \
@@ -120,6 +134,118 @@ const uint8_t kDefaultDhtSeg[] = {
0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7,
0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA};
+class V4L2MjpegDecodeAccelerator::JobRecord {
+ public:
+ virtual ~JobRecord() = default;
+
+ // Task ID passed from Decode() call.
+ virtual int32_t task_id() const = 0;
+ // Input buffer size.
+ virtual size_t size() const = 0;
+ // Input buffer offset.
+ virtual off_t offset() const = 0;
+ // Maps input buffer.
+ virtual bool map() = 0;
+ // Pointer to the input content. Only valid if map() is already called.
+ virtual const void* memory() const = 0;
+
+ // Output frame buffer.
+ virtual const scoped_refptr<VideoFrame>& out_frame() = 0;
+
+ protected:
+ JobRecord() = default;
+
+ DISALLOW_COPY_AND_ASSIGN(JobRecord);
+};
+
+// Job record when the client uses BitstreamBuffer as input in Decode().
+class JobRecordBitstreamBuffer : public V4L2MjpegDecodeAccelerator::JobRecord {
+ public:
+ JobRecordBitstreamBuffer(BitstreamBuffer bitstream_buffer,
+ scoped_refptr<VideoFrame> video_frame)
+ : task_id_(bitstream_buffer.id()),
+ shm_(bitstream_buffer.TakeRegion(),
+ bitstream_buffer.size(),
+ false /* read_only */),
+ offset_(bitstream_buffer.offset()),
+ out_frame_(video_frame) {}
+
+ int32_t task_id() const override { return task_id_; }
+ size_t size() const override { return shm_.size(); }
+ off_t offset() const override { return offset_; }
+ bool map() override { return shm_.MapAt(offset(), size()); }
+ const void* memory() const override { return shm_.memory(); }
+
+ const scoped_refptr<VideoFrame>& out_frame() override { return out_frame_; }
+
+ private:
+ int32_t task_id_;
+ UnalignedSharedMemory shm_;
+ off_t offset_;
+ scoped_refptr<VideoFrame> out_frame_;
+
+ DISALLOW_COPY_AND_ASSIGN(JobRecordBitstreamBuffer);
+};
+
+// Job record when the client uses DMA buffer as input in Decode().
+class JobRecordDmaBuf : public V4L2MjpegDecodeAccelerator::JobRecord {
+ public:
+ JobRecordDmaBuf(int32_t task_id,
+ base::ScopedFD src_dmabuf_fd,
+ size_t src_size,
+ off_t src_offset,
+ scoped_refptr<VideoFrame> dst_frame)
+ : task_id_(task_id),
+ dmabuf_fd_(std::move(src_dmabuf_fd)),
+ size_(src_size),
+ offset_(src_offset),
+ mapped_addr_(nullptr),
+ out_frame_(std::move(dst_frame)) {}
+
+ ~JobRecordDmaBuf() {
+ if (mapped_addr_) {
+ const int ret = munmap(mapped_addr_, size());
+ DPCHECK(ret == 0);
+ }
+ }
+
+ int32_t task_id() const override { return task_id_; }
+ size_t size() const override { return size_; }
+ off_t offset() const override { return offset_; }
+
+ bool map() override {
+ if (mapped_addr_)
+ return true;
+ // The DMA-buf FD should be mapped as read-only since it may only have read
+ // permission, e.g. when it comes from camera driver.
+ DCHECK(dmabuf_fd_.is_valid());
+ DCHECK_GT(size(), 0u);
+ void* addr = mmap(nullptr, size(), PROT_READ, MAP_SHARED, dmabuf_fd_.get(),
+ offset());
+ if (addr == MAP_FAILED)
+ return false;
+ mapped_addr_ = addr;
+ return true;
+ }
+
+ const void* memory() const override {
+ DCHECK(mapped_addr_);
+ return mapped_addr_;
+ }
+
+ const scoped_refptr<VideoFrame>& out_frame() override { return out_frame_; }
+
+ private:
+ int32_t task_id_;
+ base::ScopedFD dmabuf_fd_;
+ size_t size_;
+ off_t offset_;
+ void* mapped_addr_;
+ scoped_refptr<VideoFrame> out_frame_;
+
+ DISALLOW_COPY_AND_ASSIGN(JobRecordDmaBuf);
+};
+
V4L2MjpegDecodeAccelerator::BufferRecord::BufferRecord() : at_device(false) {
memset(address, 0, sizeof(address));
memset(length, 0, sizeof(length));
@@ -127,18 +253,6 @@ V4L2MjpegDecodeAccelerator::BufferRecord::BufferRecord() : at_device(false) {
V4L2MjpegDecodeAccelerator::BufferRecord::~BufferRecord() {}
-V4L2MjpegDecodeAccelerator::JobRecord::JobRecord(
- BitstreamBuffer bitstream_buffer,
- scoped_refptr<VideoFrame> video_frame)
- : bitstream_buffer_id(bitstream_buffer.id()),
- shm(bitstream_buffer.TakeRegion(),
- bitstream_buffer.size(),
- false /* read_only */),
- offset(bitstream_buffer.offset()),
- out_frame(video_frame) {}
-
-V4L2MjpegDecodeAccelerator::JobRecord::~JobRecord() {}
-
V4L2MjpegDecodeAccelerator::V4L2MjpegDecodeAccelerator(
const scoped_refptr<V4L2Device>& device,
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
@@ -183,24 +297,21 @@ void V4L2MjpegDecodeAccelerator::DestroyTask() {
DestroyOutputBuffers();
}
-void V4L2MjpegDecodeAccelerator::VideoFrameReady(int32_t bitstream_buffer_id) {
+void V4L2MjpegDecodeAccelerator::VideoFrameReady(int32_t task_id) {
DCHECK(child_task_runner_->BelongsToCurrentThread());
- client_->VideoFrameReady(bitstream_buffer_id);
+ client_->VideoFrameReady(task_id);
}
-void V4L2MjpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id,
- Error error) {
+void V4L2MjpegDecodeAccelerator::NotifyError(int32_t task_id, Error error) {
DCHECK(child_task_runner_->BelongsToCurrentThread());
- VLOGF(1) << "Notifying of error " << error << " for buffer id "
- << bitstream_buffer_id;
- client_->NotifyError(bitstream_buffer_id, error);
+ VLOGF(1) << "Notifying of error " << error << " for task id " << task_id;
+ client_->NotifyError(task_id, error);
}
-void V4L2MjpegDecodeAccelerator::PostNotifyError(int32_t bitstream_buffer_id,
- Error error) {
+void V4L2MjpegDecodeAccelerator::PostNotifyError(int32_t task_id, Error error) {
child_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&V4L2MjpegDecodeAccelerator::NotifyError,
- weak_ptr_, bitstream_buffer_id, error));
+ weak_ptr_, task_id, error));
}
bool V4L2MjpegDecodeAccelerator::Initialize(
@@ -262,18 +373,82 @@ void V4L2MjpegDecodeAccelerator::Decode(BitstreamBuffer bitstream_buffer,
return;
}
- if (video_frame->format() != PIXEL_FORMAT_I420) {
- PostNotifyError(bitstream_buffer.id(), UNSUPPORTED_JPEG);
+ // Validate output video frame.
+ if (!video_frame->IsMappable() && !video_frame->HasDmaBufs()) {
+ VLOGF(1) << "Unsupported output frame storage type";
+ PostNotifyError(bitstream_buffer.id(), INVALID_ARGUMENT);
+ return;
+ }
+ if ((video_frame->visible_rect().width() & 1) ||
+ (video_frame->visible_rect().height() & 1)) {
+ VLOGF(1) << "Output frame visible size has odd dimension";
+ PostNotifyError(bitstream_buffer.id(), PLATFORM_FAILURE);
+ return;
+ }
+
+ std::unique_ptr<JobRecord> job_record(new JobRecordBitstreamBuffer(
+ std::move(bitstream_buffer), std::move(video_frame)));
+
+ decoder_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&V4L2MjpegDecodeAccelerator::DecodeTask,
+ base::Unretained(this), std::move(job_record)));
+}
+
+void V4L2MjpegDecodeAccelerator::Decode(
+ int32_t task_id,
+ base::ScopedFD src_dmabuf_fd,
+ size_t src_size,
+ off_t src_offset,
+ scoped_refptr<media::VideoFrame> dst_frame) {
+ DVLOGF(4) << "task_id=" << task_id;
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
+
+ if (task_id < 0) {
+ VLOGF(1) << "Invalid task id: " << task_id;
+ PostNotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+
+ // Validate input arguments.
+ if (!src_dmabuf_fd.is_valid()) {
+ VLOGF(1) << "Invalid input buffer FD";
+ PostNotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+ if (src_size == 0) {
+ VLOGF(1) << "Input buffer size is zero";
+ PostNotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+ const size_t page_size = base::GetPageSize();
+ if (src_offset < 0 || src_offset % page_size != 0) {
+ VLOGF(1) << "Input buffer offset (" << src_offset
+ << ") should be non-negative and aligned to page size ("
+ << page_size << ")";
+ PostNotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+
+ // Validate output video frame.
+ if (!dst_frame->IsMappable() && !dst_frame->HasDmaBufs()) {
+ VLOGF(1) << "Unsupported output frame storage type";
+ PostNotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+ if ((dst_frame->visible_rect().width() & 1) ||
+ (dst_frame->visible_rect().height() & 1)) {
+ VLOGF(1) << "Output frame visible size has odd dimension";
+ PostNotifyError(task_id, PLATFORM_FAILURE);
return;
}
std::unique_ptr<JobRecord> job_record(
- new JobRecord(std::move(bitstream_buffer), std::move(video_frame)));
+ new JobRecordDmaBuf(task_id, std::move(src_dmabuf_fd), src_size,
+ src_offset, std::move(dst_frame)));
decoder_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&V4L2MjpegDecodeAccelerator::DecodeTask,
- base::Unretained(this), base::Passed(&job_record)));
+ FROM_HERE, base::BindOnce(&V4L2MjpegDecodeAccelerator::DecodeTask,
+ base::Unretained(this), std::move(job_record)));
}
// static
@@ -288,9 +463,9 @@ bool V4L2MjpegDecodeAccelerator::IsSupported() {
void V4L2MjpegDecodeAccelerator::DecodeTask(
std::unique_ptr<JobRecord> job_record) {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
- if (!job_record->shm.MapAt(job_record->offset, job_record->shm.size())) {
- VPLOGF(1) << "could not map bitstream_buffer";
- PostNotifyError(job_record->bitstream_buffer_id, UNREADABLE_INPUT);
+ if (!job_record->map()) {
+ VPLOGF(1) << "could not map input buffer";
+ PostNotifyError(job_record->task_id(), UNREADABLE_INPUT);
return;
}
input_jobs_.push(std::move(job_record));
@@ -313,8 +488,9 @@ bool V4L2MjpegDecodeAccelerator::ShouldRecreateInputBuffers() {
JobRecord* job_record = input_jobs_.front().get();
// Check input buffer size is enough
+ // TODO(kamesan): use safe arithmetic to handle overflows.
return (input_buffer_map_.empty() ||
- (job_record->shm.size() + sizeof(kDefaultDhtSeg)) >
+ (job_record->size() + sizeof(kDefaultDhtSeg)) >
input_buffer_map_.front().length[0]);
}
@@ -359,7 +535,8 @@ bool V4L2MjpegDecodeAccelerator::CreateInputBuffers() {
// The input image may miss huffman table. We didn't parse the image before,
// so we create more to avoid the situation of not enough memory.
// Reserve twice size to avoid recreating input buffer frequently.
- size_t reserve_size = (job_record->shm.size() + sizeof(kDefaultDhtSeg)) * 2;
+ // TODO(kamesan): use safe arithmetic to handle overflows.
+ size_t reserve_size = (job_record->size() + sizeof(kDefaultDhtSeg)) * 2;
struct v4l2_format format;
memset(&format, 0, sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
@@ -402,7 +579,7 @@ bool V4L2MjpegDecodeAccelerator::CreateInputBuffers() {
MAP_SHARED, planes[j].m.mem_offset);
if (address == MAP_FAILED) {
VPLOGF(1) << "mmap() failed";
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return false;
}
input_buffer_map_[i].address[j] = address;
@@ -421,12 +598,12 @@ bool V4L2MjpegDecodeAccelerator::CreateOutputBuffers() {
JobRecord* job_record = running_jobs_.front().get();
size_t frame_size = VideoFrame::AllocationSize(
- PIXEL_FORMAT_I420, job_record->out_frame->coded_size());
+ PIXEL_FORMAT_I420, job_record->out_frame()->coded_size());
struct v4l2_format format;
memset(&format, 0, sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- format.fmt.pix_mp.width = job_record->out_frame->coded_size().width();
- format.fmt.pix_mp.height = job_record->out_frame->coded_size().height();
+ format.fmt.pix_mp.width = job_record->out_frame()->coded_size().width();
+ format.fmt.pix_mp.height = job_record->out_frame()->coded_size().height();
format.fmt.pix_mp.num_planes = 1;
format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_YUV420;
format.fmt.pix_mp.plane_fmt[0].sizeimage = frame_size;
@@ -436,16 +613,15 @@ bool V4L2MjpegDecodeAccelerator::CreateOutputBuffers() {
output_buffer_coded_size_.SetSize(format.fmt.pix_mp.width,
format.fmt.pix_mp.height);
output_buffer_num_planes_ = format.fmt.pix_mp.num_planes;
- for (size_t i = 0; i < output_buffer_num_planes_; ++i) {
- output_bytesperlines_[i] = format.fmt.pix_mp.plane_fmt[i].bytesperline;
- }
+ for (size_t i = 0; i < output_buffer_num_planes_; ++i)
+ output_strides_[i] = format.fmt.pix_mp.plane_fmt[i].bytesperline;
VideoPixelFormat output_format =
V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
if (output_format == PIXEL_FORMAT_UNKNOWN) {
VLOGF(1) << "unknown V4L2 pixel format: "
<< FourccToString(output_buffer_pixelformat_);
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return false;
}
@@ -489,7 +665,7 @@ bool V4L2MjpegDecodeAccelerator::CreateOutputBuffers() {
MAP_SHARED, planes[j].m.mem_offset);
if (address == MAP_FAILED) {
VPLOGF(1) << "mmap() failed";
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return false;
}
output_buffer_map_[i].address[j] = address;
@@ -567,7 +743,7 @@ void V4L2MjpegDecodeAccelerator::DevicePollTask() {
bool event_pending;
if (!device_->Poll(true, &event_pending)) {
VPLOGF(1) << "Poll device error.";
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return;
}
@@ -598,7 +774,7 @@ bool V4L2MjpegDecodeAccelerator::DequeueSourceChangeEvent() {
} else {
VLOGF(1) << "dequeue event failed.";
}
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return false;
}
@@ -675,75 +851,153 @@ void V4L2MjpegDecodeAccelerator::EnqueueOutput() {
bool V4L2MjpegDecodeAccelerator::ConvertOutputImage(
const BufferRecord& output_buffer,
- VideoFrame* dst_frame) {
- uint8_t* dst_y = dst_frame->data(VideoFrame::kYPlane);
- uint8_t* dst_u = dst_frame->data(VideoFrame::kUPlane);
- uint8_t* dst_v = dst_frame->data(VideoFrame::kVPlane);
- size_t dst_y_stride = dst_frame->stride(VideoFrame::kYPlane);
- size_t dst_u_stride = dst_frame->stride(VideoFrame::kUPlane);
- size_t dst_v_stride = dst_frame->stride(VideoFrame::kVPlane);
-
- // It is assumed that |dst_frame| is backed by enough memory that it is safe
- // to store an I420 frame of |dst_width|x|dst_height| in it using the data
- // pointers and strides from above.
- int dst_width = dst_frame->coded_size().width();
- int dst_height = dst_frame->coded_size().height();
-
- // The video frame's coded dimensions should be even for the I420 format.
- DCHECK_EQ(0, dst_width % 2);
- DCHECK_EQ(0, dst_height % 2);
-
+ scoped_refptr<VideoFrame> dst_frame) {
// The coded size of the hardware buffer should be at least as large as the
- // video frame's coded size.
+ // video frame's visible size.
+ const int dst_width = dst_frame->visible_rect().width();
+ const int dst_height = dst_frame->visible_rect().height();
DCHECK_GE(output_buffer_coded_size_.width(), dst_width);
DCHECK_GE(output_buffer_coded_size_.height(), dst_height);
- if (output_buffer_num_planes_ == 1) {
- // Use ConvertToI420 to convert all splane buffers.
- // If the source format is I420, ConvertToI420 will simply copy the frame.
- VideoPixelFormat format =
+ // Dmabuf-backed frame needs to be mapped for SW access.
+ if (dst_frame->HasDmaBufs()) {
+ std::unique_ptr<VideoFrameMapper> frame_mapper =
+ VideoFrameMapperFactory::CreateMapper(dst_frame->format());
+ if (!frame_mapper) {
+ VLOGF(1) << "Failed to create video frame mapper";
+ return false;
+ }
+ dst_frame = frame_mapper->Map(std::move(dst_frame));
+ if (!dst_frame) {
+ VLOGF(1) << "Failed to map DMA-buf video frame";
+ return false;
+ }
+ }
+
+ // Extract destination pointers and strides.
+ std::array<uint8_t*, VideoFrame::kMaxPlanes> dst_ptrs{};
+ std::array<int, VideoFrame::kMaxPlanes> dst_strides{};
+ for (size_t i = 0; i < dst_frame->layout().num_planes(); i++) {
+ dst_ptrs[i] = dst_frame->visible_data(i);
+ dst_strides[i] = base::checked_cast<int>(dst_frame->stride(i));
+ }
+
+ // Use ConvertToI420 to convert all splane formats to I420.
+ if (output_buffer_num_planes_ == 1 &&
+ dst_frame->format() == PIXEL_FORMAT_I420) {
+ DCHECK_EQ(dst_frame->layout().num_planes(), 3u);
+ const VideoPixelFormat format =
V4L2Device::V4L2PixFmtToVideoPixelFormat(output_buffer_pixelformat_);
- size_t src_size =
+ if (format == PIXEL_FORMAT_UNKNOWN) {
+ VLOGF(1) << "Unknown V4L2 format: "
+ << FourccToString(output_buffer_pixelformat_);
+ return false;
+ }
+ const size_t src_size =
VideoFrame::AllocationSize(format, output_buffer_coded_size_);
if (libyuv::ConvertToI420(
- static_cast<uint8_t*>(output_buffer.address[0]), src_size, dst_y,
- dst_y_stride, dst_u, dst_u_stride, dst_v, dst_v_stride, 0, 0,
+ static_cast<uint8_t*>(output_buffer.address[0]), src_size,
+ dst_ptrs[0], dst_strides[0], dst_ptrs[1], dst_strides[1],
+ dst_ptrs[2], dst_strides[2], 0 /*x*/, 0 /*y*/,
output_buffer_coded_size_.width(),
output_buffer_coded_size_.height(), dst_width, dst_height,
libyuv::kRotate0, output_buffer_pixelformat_)) {
VLOGF(1) << "ConvertToI420 failed. Source format: "
- << output_buffer_pixelformat_;
+ << FourccToString(output_buffer_pixelformat_);
return false;
}
- } else if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M ||
- output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M) {
- DCHECK(output_buffer_num_planes_ == 3);
- uint8_t* src_y = static_cast<uint8_t*>(output_buffer.address[0]);
- uint8_t* src_u = static_cast<uint8_t*>(output_buffer.address[1]);
- uint8_t* src_v = static_cast<uint8_t*>(output_buffer.address[2]);
- size_t src_y_stride = output_bytesperlines_[0];
- size_t src_u_stride = output_bytesperlines_[1];
- size_t src_v_stride = output_bytesperlines_[2];
- if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M) {
- if (libyuv::I420Copy(src_y, src_y_stride, src_u, src_u_stride, src_v,
- src_v_stride, dst_y, dst_y_stride, dst_u,
- dst_u_stride, dst_v, dst_v_stride, dst_width,
- dst_height)) {
- VLOGF(1) << "I420Copy failed";
+ return true;
+ }
+
+ // Extract source pointers and strides.
+ std::array<const uint8_t*, VideoFrame::kMaxPlanes> src_ptrs{};
+ std::array<int, VideoFrame::kMaxPlanes> src_strides{};
+ for (size_t i = 0; i < output_buffer_num_planes_; i++) {
+ src_ptrs[i] = static_cast<uint8_t*>(output_buffer.address[i]);
+ src_strides[i] = output_strides_[i];
+ }
+
+ if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV420M) {
+ DCHECK_EQ(output_buffer_num_planes_, 3u);
+ switch (dst_frame->format()) {
+ case PIXEL_FORMAT_I420:
+ DCHECK_EQ(dst_frame->layout().num_planes(), 3u);
+ if (libyuv::I420Copy(src_ptrs[0], src_strides[0], src_ptrs[1],
+ src_strides[1], src_ptrs[2], src_strides[2],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[1],
+ dst_strides[1], dst_ptrs[2], dst_strides[2],
+ dst_width, dst_height)) {
+ VLOGF(1) << "I420Copy failed";
+ return false;
+ }
+ break;
+ case PIXEL_FORMAT_YV12:
+ DCHECK_EQ(dst_frame->layout().num_planes(), 3u);
+ if (libyuv::I420Copy(src_ptrs[0], src_strides[0], src_ptrs[1],
+ src_strides[1], src_ptrs[2], src_strides[2],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[2],
+ dst_strides[2], dst_ptrs[1], dst_strides[1],
+ dst_width, dst_height)) {
+ VLOGF(1) << "I420Copy failed";
+ return false;
+ }
+ break;
+ case PIXEL_FORMAT_NV12:
+ DCHECK_EQ(dst_frame->layout().num_planes(), 2u);
+ if (libyuv::I420ToNV12(src_ptrs[0], src_strides[0], src_ptrs[1],
+ src_strides[1], src_ptrs[2], src_strides[2],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[1],
+ dst_strides[1], dst_width, dst_height)) {
+ VLOGF(1) << "I420ToNV12 failed";
+ return false;
+ }
+ break;
+ default:
+ VLOGF(1) << "Can't convert image from I420 to " << dst_frame->format();
return false;
- }
- } else { // output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M
- if (libyuv::I422ToI420(src_y, src_y_stride, src_u, src_u_stride, src_v,
- src_v_stride, dst_y, dst_y_stride, dst_u,
- dst_u_stride, dst_v, dst_v_stride, dst_width,
- dst_height)) {
- VLOGF(1) << "I422ToI420 failed";
+ }
+ } else if (output_buffer_pixelformat_ == V4L2_PIX_FMT_YUV422M) {
+ DCHECK_EQ(output_buffer_num_planes_, 3u);
+ switch (dst_frame->format()) {
+ case PIXEL_FORMAT_I420:
+ DCHECK_EQ(dst_frame->layout().num_planes(), 3u);
+ if (libyuv::I422ToI420(src_ptrs[0], src_strides[0], src_ptrs[1],
+ src_strides[1], src_ptrs[2], src_strides[2],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[1],
+ dst_strides[1], dst_ptrs[2], dst_strides[2],
+ dst_width, dst_height)) {
+ VLOGF(1) << "I422ToI420 failed";
+ return false;
+ }
+ break;
+ case PIXEL_FORMAT_YV12:
+ DCHECK_EQ(dst_frame->layout().num_planes(), 3u);
+ if (libyuv::I422ToI420(src_ptrs[0], src_strides[0], src_ptrs[1],
+ src_strides[1], src_ptrs[2], src_strides[2],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[2],
+ dst_strides[2], dst_ptrs[1], dst_strides[1],
+ dst_width, dst_height)) {
+ VLOGF(1) << "I422ToI420 failed";
+ return false;
+ }
+ break;
+ case PIXEL_FORMAT_NV12:
+ DCHECK_EQ(dst_frame->layout().num_planes(), 2u);
+ if (libyuv::I422ToNV21(src_ptrs[0], src_strides[0], src_ptrs[2],
+ src_strides[2], src_ptrs[1], src_strides[1],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[1],
+ dst_strides[1], dst_width, dst_height)) {
+ VLOGF(1) << "I422ToNV21 failed";
+ return false;
+ }
+ break;
+ default:
+ VLOGF(1) << "Can't convert image from I422 to " << dst_frame->format();
return false;
- }
}
} else {
VLOGF(1) << "Unsupported source buffer format: "
- << output_buffer_pixelformat_;
+ << FourccToString(output_buffer_pixelformat_);
return false;
}
return true;
@@ -770,7 +1024,7 @@ void V4L2MjpegDecodeAccelerator::Dequeue() {
break;
}
VPLOGF(1) << "ioctl() failed: input buffer VIDIOC_DQBUF failed.";
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return;
}
BufferRecord& input_record = input_buffer_map_[dqbuf.index];
@@ -780,7 +1034,7 @@ void V4L2MjpegDecodeAccelerator::Dequeue() {
if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
VLOGF(1) << "Error in dequeued input buffer.";
- PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
+ PostNotifyError(kInvalidTaskId, UNSUPPORTED_JPEG);
running_jobs_.pop();
}
}
@@ -807,7 +1061,7 @@ void V4L2MjpegDecodeAccelerator::Dequeue() {
break;
}
VPLOGF(1) << "ioctl() failed: output buffer VIDIOC_DQBUF failed.";
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return;
}
BufferRecord& output_record = output_buffer_map_[dqbuf.index];
@@ -821,22 +1075,26 @@ void V4L2MjpegDecodeAccelerator::Dequeue() {
if (dqbuf.flags & V4L2_BUF_FLAG_ERROR) {
VLOGF(1) << "Error in dequeued output buffer.";
- PostNotifyError(kInvalidBitstreamBufferId, UNSUPPORTED_JPEG);
+ PostNotifyError(kInvalidTaskId, UNSUPPORTED_JPEG);
} else {
// Copy the decoded data from output buffer to the buffer provided by the
// client. Do format conversion when output format is not
// V4L2_PIX_FMT_YUV420.
- if (!ConvertOutputImage(output_record, job_record->out_frame.get())) {
- PostNotifyError(job_record->bitstream_buffer_id, PLATFORM_FAILURE);
+ if (!ConvertOutputImage(output_record, job_record->out_frame())) {
+ PostNotifyError(job_record->task_id(), PLATFORM_FAILURE);
return;
}
DVLOGF(4) << "Decoding finished, returning bitstream buffer, id="
- << job_record->bitstream_buffer_id;
+ << job_record->task_id();
+ // Destroy |job_record| before posting VideoFrameReady to the client to
+ // prevent race condition on the buffers.
+ const int32_t task_id = job_record->task_id();
+ job_record.reset();
child_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&V4L2MjpegDecodeAccelerator::VideoFrameReady,
- weak_ptr_, job_record->bitstream_buffer_id));
+ weak_ptr_, task_id));
}
}
}
@@ -933,9 +1191,9 @@ bool V4L2MjpegDecodeAccelerator::EnqueueInputRecord() {
DCHECK(!input_record.at_device);
// It will add default huffman segment if it's missing.
- if (!AddHuffmanTable(job_record->shm.memory(), job_record->shm.size(),
+ if (!AddHuffmanTable(job_record->memory(), job_record->size(),
input_record.address[0], input_record.length[0])) {
- PostNotifyError(job_record->bitstream_buffer_id, PARSE_JPEG_FAILED);
+ PostNotifyError(job_record->task_id(), PARSE_JPEG_FAILED);
return false;
}
@@ -952,11 +1210,10 @@ bool V4L2MjpegDecodeAccelerator::EnqueueInputRecord() {
qbuf.m.planes = planes;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
input_record.at_device = true;
+
+ DVLOGF(3) << "enqueued frame id=" << job_record->task_id() << " to device.";
running_jobs_.push(std::move(job_record));
free_input_buffers_.pop_back();
-
- DVLOGF(3) << "enqueued frame id=" << job_record->bitstream_buffer_id
- << " to device.";
return true;
}
@@ -990,7 +1247,7 @@ void V4L2MjpegDecodeAccelerator::StartDevicePoll() {
if (!device_poll_thread_.Start()) {
VLOGF(1) << "Device thread failed to start";
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return;
}
device_poll_task_runner_ = device_poll_thread_.task_runner();
@@ -1001,7 +1258,7 @@ bool V4L2MjpegDecodeAccelerator::StopDevicePoll() {
// Signal the DevicePollTask() to stop, and stop the device poll thread.
if (!device_->SetDevicePollInterrupt()) {
VLOGF(1) << "SetDevicePollInterrupt failed.";
- PostNotifyError(kInvalidBitstreamBufferId, PLATFORM_FAILURE);
+ PostNotifyError(kInvalidTaskId, PLATFORM_FAILURE);
return false;
}
diff --git a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h
index 3cdc97ee20b..76ee0afd9bc 100644
--- a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.h
@@ -18,17 +18,24 @@
#include "base/single_thread_task_runner.h"
#include "base/threading/thread.h"
#include "components/chromeos_camera/mjpeg_decode_accelerator.h"
-#include "media/base/bitstream_buffer.h"
-#include "media/base/unaligned_shared_memory.h"
-#include "media/base/video_frame.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/v4l2/v4l2_device.h"
namespace media {
+class VideoFrame;
+
class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
: public chromeos_camera::MjpegDecodeAccelerator {
public:
+ // Job record. Jobs are processed in a FIFO order. This is separate from
+ // BufferRecord of input, because a BufferRecord of input may be returned
+ // before we dequeue the corresponding output buffer. It can't always be
+ // associated with a BufferRecord of output immediately either, because at
+ // the time of submission we may not have one available (and don't need one
+ // to submit input to the device).
+ class JobRecord;
+
V4L2MjpegDecodeAccelerator(
const scoped_refptr<V4L2Device>& device,
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
@@ -39,6 +46,11 @@ class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
chromeos_camera::MjpegDecodeAccelerator::Client* client) override;
void Decode(BitstreamBuffer bitstream_buffer,
scoped_refptr<VideoFrame> video_frame) override;
+ void Decode(int32_t task_id,
+ base::ScopedFD src_dmabuf_fd,
+ size_t src_size,
+ off_t src_offset,
+ scoped_refptr<media::VideoFrame> dst_frame) override;
bool IsSupported() override;
private:
@@ -53,27 +65,6 @@ class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
bool at_device;
};
- // Job record. Jobs are processed in a FIFO order. This is separate from
- // BufferRecord of input, because a BufferRecord of input may be returned
- // before we dequeue the corresponding output buffer. It can't always be
- // associated with a BufferRecord of output immediately either, because at
- // the time of submission we may not have one available (and don't need one
- // to submit input to the device).
- struct JobRecord {
- JobRecord(BitstreamBuffer bitstream_buffer,
- scoped_refptr<VideoFrame> video_frame);
- ~JobRecord();
-
- // Input image buffer ID.
- int32_t bitstream_buffer_id;
- // Memory mapped from |bitstream_buffer|.
- UnalignedSharedMemory shm;
- // Offset used for shm.
- off_t offset;
- // Output frame buffer.
- scoped_refptr<VideoFrame> out_frame;
- };
-
void EnqueueInput();
void EnqueueOutput();
void Dequeue();
@@ -84,13 +75,12 @@ class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
void DestroyInputBuffers();
void DestroyOutputBuffers();
- // Convert |output_buffer| to I420 and copy the result to |dst_frame|.
- // The function can convert to I420 from the following formats:
- // - All splane formats that libyuv::ConvertToI420 can handle.
- // - V4L2_PIX_FMT_YUV_420M
- // - V4L2_PIX_FMT_YUV_422M
+ // Convert |output_buffer| to |dst_frame|. The function supports the following
+ // formats:
+ // - All formats that libyuv::ConvertToI420 can handle.
+ // - V4L2_PIX_FMT_YUV_420M, V4L2_PIX_FMT_YUV_422M to I420, YV12, and NV12.
bool ConvertOutputImage(const BufferRecord& output_buffer,
- VideoFrame* dst_frame);
+ scoped_refptr<VideoFrame> dst_frame);
// Return the number of input/output buffers enqueued to the device.
size_t InputBufferQueuedCount();
@@ -103,9 +93,9 @@ class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
// Destroy and create output buffers. Return false on error.
bool RecreateOutputBuffers();
- void VideoFrameReady(int32_t bitstream_buffer_id);
- void NotifyError(int32_t bitstream_buffer_id, Error error);
- void PostNotifyError(int32_t bitstream_buffer_id, Error error);
+ void VideoFrameReady(int32_t task_id);
+ void NotifyError(int32_t task_id, Error error);
+ void PostNotifyError(int32_t task_id, Error error);
// Run on |decoder_thread_| to enqueue the coming frame.
void DecodeTask(std::unique_ptr<JobRecord> job_record);
@@ -139,7 +129,9 @@ class MEDIA_GPU_EXPORT V4L2MjpegDecodeAccelerator
// Number of physical planes the output buffers have.
size_t output_buffer_num_planes_;
- size_t output_bytesperlines_[VIDEO_MAX_PLANES];
+
+ // Strides of the output buffers.
+ size_t output_strides_[VIDEO_MAX_PLANES];
// ChildThread's task runner.
scoped_refptr<base::SingleThreadTaskRunner> child_task_runner_;
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
index 070a5dc3b7e..3651fd89b81 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
@@ -32,12 +32,17 @@
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/media_switches.h"
+#include "media/base/scopedfd_helper.h"
#include "media/base/unaligned_shared_memory.h"
#include "media/base/video_types.h"
#include "media/gpu/macros.h"
#include "media/gpu/v4l2/v4l2_decode_surface.h"
#include "media/gpu/v4l2/v4l2_h264_accelerator.h"
+#include "media/gpu/v4l2/v4l2_h264_accelerator_legacy.h"
+#include "media/gpu/v4l2/v4l2_image_processor.h"
+#include "media/gpu/v4l2/v4l2_vda_helpers.h"
#include "media/gpu/v4l2/v4l2_vp8_accelerator.h"
+#include "media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h"
#include "media/gpu/v4l2/v4l2_vp9_accelerator.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/gl_image.h"
@@ -76,20 +81,11 @@ const uint32_t V4L2SliceVideoDecodeAccelerator::supported_input_fourccs_[] = {
V4L2_PIX_FMT_H264_SLICE, V4L2_PIX_FMT_VP8_FRAME, V4L2_PIX_FMT_VP9_FRAME,
};
-V4L2SliceVideoDecodeAccelerator::InputRecord::InputRecord()
- : input_id(-1),
- address(nullptr),
- length(0),
- bytes_used(0),
- at_device(false) {}
-
V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord()
- : at_device(false),
- at_client(false),
- num_times_sent_to_client(0),
- picture_id(-1),
+ : picture_id(-1),
texture_id(0),
- cleared(false) {}
+ cleared(false),
+ num_times_sent_to_client(0) {}
V4L2SliceVideoDecodeAccelerator::OutputRecord::OutputRecord(OutputRecord&&) =
default;
@@ -151,10 +147,6 @@ V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator(
device_(device),
decoder_thread_("V4L2SliceVideoDecodeAcceleratorThread"),
device_poll_thread_("V4L2SliceVideoDecodeAcceleratorDevicePollThread"),
- input_streamon_(false),
- input_buffer_queued_count_(0),
- output_streamon_(false),
- output_buffer_queued_count_(0),
video_profile_(VIDEO_CODEC_PROFILE_UNKNOWN),
input_format_fourcc_(0),
output_format_fourcc_(0),
@@ -167,6 +159,8 @@ V4L2SliceVideoDecodeAccelerator::V4L2SliceVideoDecodeAccelerator(
egl_display_(egl_display),
bind_image_cb_(bind_image_cb),
make_context_current_cb_(make_context_current_cb),
+ gl_image_format_fourcc_(0),
+ gl_image_planes_count_(0),
weak_this_factory_(this) {
weak_this_ = weak_this_factory_.GetWeakPtr();
}
@@ -178,7 +172,7 @@ V4L2SliceVideoDecodeAccelerator::~V4L2SliceVideoDecodeAccelerator() {
DCHECK(!decoder_thread_.IsRunning());
DCHECK(!device_poll_thread_.IsRunning());
- DCHECK(input_buffer_map_.empty());
+ DCHECK(requests_.empty());
DCHECK(output_buffer_map_.empty());
}
@@ -249,14 +243,13 @@ bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config,
video_profile_ = config.profile;
- // TODO(posciak): This needs to be queried once supported.
input_planes_count_ = 1;
- output_planes_count_ = 1;
input_format_fourcc_ =
V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, true);
- if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
+ if (!input_format_fourcc_ ||
+ !device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
VLOGF(1) << "Failed to open device for profile: " << config.profile
<< " fourcc: " << FourccToString(input_format_fourcc_);
return false;
@@ -286,12 +279,22 @@ bool V4L2SliceVideoDecodeAccelerator::Initialize(const Config& config,
}
if (video_profile_ >= H264PROFILE_MIN && video_profile_ <= H264PROFILE_MAX) {
- decoder_.reset(new H264Decoder(
- std::make_unique<V4L2H264Accelerator>(this, device_.get())));
+ if (supports_requests_) {
+ decoder_.reset(new H264Decoder(
+ std::make_unique<V4L2H264Accelerator>(this, device_.get())));
+ } else {
+ decoder_.reset(new H264Decoder(
+ std::make_unique<V4L2LegacyH264Accelerator>(this, device_.get())));
+ }
} else if (video_profile_ >= VP8PROFILE_MIN &&
video_profile_ <= VP8PROFILE_MAX) {
- decoder_.reset(new VP8Decoder(
- std::make_unique<V4L2VP8Accelerator>(this, device_.get())));
+ if (supports_requests_) {
+ decoder_.reset(new VP8Decoder(
+ std::make_unique<V4L2VP8Accelerator>(this, device_.get())));
+ } else {
+ decoder_.reset(new VP8Decoder(
+ std::make_unique<V4L2LegacyVP8Accelerator>(this, device_.get())));
+ }
} else if (video_profile_ >= VP9PROFILE_MIN &&
video_profile_ <= VP9PROFILE_MAX) {
decoder_.reset(new VP9Decoder(
@@ -345,6 +348,13 @@ void V4L2SliceVideoDecodeAccelerator::InitializeTask() {
if (IsDestroyPending())
return;
+ input_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
+ output_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
+ if (!input_queue_ || !output_queue_) {
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return;
+ }
+
if (!CreateInputBuffers())
NOTIFY_ERROR(PLATFORM_FAILURE);
@@ -361,10 +371,27 @@ void V4L2SliceVideoDecodeAccelerator::Destroy() {
// avoid waiting too long for the decoder_thread_ to Stop().
destroy_pending_.Signal();
+ weak_this_factory_.InvalidateWeakPtrs();
+
if (decoder_thread_.IsRunning()) {
decoder_thread_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&V4L2SliceVideoDecodeAccelerator::DestroyTask,
- base::Unretained(this)));
+ FROM_HERE,
+ // The image processor's destructor may post new tasks to
+ // |decoder_thread_task_runner_|. In order to make sure that
+ // DestroyTask() runs last, we perform shutdown in two stages:
+ // 1) Destroy image processor so that no new task it posted by it
+ // 2) Post DestroyTask to |decoder_thread_task_runner_| so that it
+ // executes after all the tasks potentially posted by the IP.
+ base::BindOnce(
+ [](V4L2SliceVideoDecodeAccelerator* vda) {
+ vda->image_processor_ = nullptr;
+ vda->surfaces_at_ip_ = {};
+ vda->decoder_thread_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::DestroyTask,
+ base::Unretained(vda)));
+ },
+ base::Unretained(this)));
// Wait for tasks to finish/early-exit.
decoder_thread_.Stop();
@@ -395,6 +422,9 @@ void V4L2SliceVideoDecodeAccelerator::DestroyTask() {
media_fd_.reset();
+ input_queue_ = nullptr;
+ output_queue_ = nullptr;
+
base::trace_event::MemoryDumpManager::GetInstance()->UnregisterDumpProvider(
this);
@@ -456,9 +486,40 @@ bool V4L2SliceVideoDecodeAccelerator::SetupFormats() {
++fmtdesc.index;
}
+ DCHECK(!image_processor_device_);
if (output_format_fourcc_ == 0) {
- VLOGF(1) << "Could not find a usable output format";
- return false;
+ VLOGF(2) << "Could not find a usable output format. Trying image processor";
+ if (!V4L2ImageProcessor::IsSupported()) {
+ VLOGF(1) << "Image processor not available";
+ return false;
+ }
+ image_processor_device_ = V4L2Device::Create();
+ if (!image_processor_device_) {
+ VLOGF(1) << "Could not create a V4L2Device for image processor";
+ return false;
+ }
+ output_format_fourcc_ =
+ v4l2_vda_helpers::FindImageProcessorInputFormat(device_.get());
+ if (output_format_fourcc_ == 0) {
+ VLOGF(1) << "Can't find a usable input format from image processor";
+ return false;
+ }
+ gl_image_format_fourcc_ = v4l2_vda_helpers::FindImageProcessorOutputFormat(
+ image_processor_device_.get());
+ if (gl_image_format_fourcc_ == 0) {
+ VLOGF(1) << "Can't find a usable output format from image processor";
+ return false;
+ }
+ gl_image_planes_count_ =
+ V4L2Device::GetNumPlanesOfV4L2PixFmt(gl_image_format_fourcc_);
+ output_planes_count_ =
+ V4L2Device::GetNumPlanesOfV4L2PixFmt(output_format_fourcc_);
+ gl_image_device_ = image_processor_device_;
+ } else {
+ gl_image_format_fourcc_ = output_format_fourcc_;
+ output_planes_count_ = gl_image_planes_count_ =
+ V4L2Device::GetNumPlanesOfV4L2PixFmt(output_format_fourcc_);
+ gl_image_device_ = device_;
}
// Only set fourcc for output; resolution, etc., will come from the
@@ -466,70 +527,91 @@ bool V4L2SliceVideoDecodeAccelerator::SetupFormats() {
memset(&format, 0, sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
format.fmt.pix_mp.pixelformat = output_format_fourcc_;
- format.fmt.pix_mp.num_planes = output_planes_count_;
+ format.fmt.pix_mp.num_planes =
+ V4L2Device::GetNumPlanesOfV4L2PixFmt(output_format_fourcc_);
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
DCHECK_EQ(format.fmt.pix_mp.pixelformat, output_format_fourcc_);
+ DCHECK_EQ(static_cast<size_t>(format.fmt.pix_mp.num_planes),
+ output_planes_count_);
+
return true;
}
+bool V4L2SliceVideoDecodeAccelerator::ResetImageProcessor() {
+ VLOGF(2);
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+ DCHECK(image_processor_);
+
+ if (!image_processor_->Reset())
+ return false;
+
+ surfaces_at_ip_ = {};
+
+ return true;
+}
+
+bool V4L2SliceVideoDecodeAccelerator::CreateImageProcessor() {
+ VLOGF(2);
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+ DCHECK(!image_processor_);
+ const ImageProcessor::OutputMode image_processor_output_mode =
+ (output_mode_ == Config::OutputMode::ALLOCATE
+ ? ImageProcessor::OutputMode::ALLOCATE
+ : ImageProcessor::OutputMode::IMPORT);
+
+ image_processor_ = v4l2_vda_helpers::CreateImageProcessor(
+ output_format_fourcc_, gl_image_format_fourcc_, coded_size_,
+ gl_image_size_, decoder_->GetVisibleRect().size(),
+ output_buffer_map_.size(), image_processor_device_,
+ image_processor_output_mode,
+ // Unretained(this) is safe for ErrorCB because |decoder_thread_| is owned
+ // by this V4L2VideoDecodeAccelerator and |this| must be valid when
+ // ErrorCB is executed.
+ base::BindRepeating(&V4L2SliceVideoDecodeAccelerator::ImageProcessorError,
+ base::Unretained(this)));
+
+ if (!image_processor_) {
+ VLOGF(1) << "Error creating image processor";
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return false;
+ }
+
+ DCHECK_EQ(gl_image_size_, image_processor_->output_layout().coded_size());
+
+ return true;
+}
bool V4L2SliceVideoDecodeAccelerator::CreateInputBuffers() {
VLOGF(2);
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- DCHECK(!input_streamon_);
- DCHECK(input_buffer_map_.empty());
+ DCHECK(!input_queue_->IsStreaming());
- struct v4l2_requestbuffers reqbufs;
- memset(&reqbufs, 0, sizeof(reqbufs));
- reqbufs.count = kNumInputBuffers;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- reqbufs.memory = V4L2_MEMORY_MMAP;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
- if (reqbufs.count < kNumInputBuffers) {
- VLOGF(1) << "Could not allocate enough output buffers";
+ if (input_queue_->AllocateBuffers(kNumInputBuffers, V4L2_MEMORY_MMAP) <
+ kNumInputBuffers) {
+ NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
- input_buffer_map_.resize(reqbufs.count);
- for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
- free_input_buffers_.push_back(i);
-
- // Query for the MEMORY_MMAP pointer.
- struct v4l2_plane planes[VIDEO_MAX_PLANES];
- struct v4l2_buffer buffer;
- memset(&buffer, 0, sizeof(buffer));
- memset(planes, 0, sizeof(planes));
- buffer.index = i;
- buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- buffer.memory = V4L2_MEMORY_MMAP;
- buffer.m.planes = planes;
- buffer.length = input_planes_count_;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
- void* address = device_->Mmap(nullptr,
- buffer.m.planes[0].length,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- buffer.m.planes[0].m.mem_offset);
- if (address == MAP_FAILED) {
- VPLOGF(1) << "mmap() failed";
+ // The remainder of this method only applies if requests are used.
+ if (!supports_requests_)
+ return true;
+
+ DCHECK(requests_.empty());
+
+ DCHECK(media_fd_.is_valid());
+ for (size_t i = 0; i < input_queue_->AllocatedBuffersCount(); i++) {
+ int request_fd;
+
+ int ret = HANDLE_EINTR(
+ ioctl(media_fd_.get(), MEDIA_IOC_REQUEST_ALLOC, &request_fd));
+ if (ret < 0) {
+ VPLOGF(1) << "Failed to create request: ";
return false;
}
- input_buffer_map_[i].address = address;
- input_buffer_map_[i].length = buffer.m.planes[0].length;
- if (supports_requests_) {
- int request_fd;
-
- DCHECK(media_fd_.is_valid());
- int ret = HANDLE_EINTR(
- ioctl(media_fd_.get(), MEDIA_IOC_REQUEST_ALLOC, &request_fd));
- if (ret < 0) {
- VPLOGF(1) << "Failed to create request: ";
- return false;
- }
- input_buffer_map_[i].request_fd = base::ScopedFD(request_fd);
- }
+ requests_.push(base::ScopedFD(request_fd));
}
+ DCHECK_EQ(requests_.size(), input_queue_->AllocatedBuffersCount());
return true;
}
@@ -537,9 +619,10 @@ bool V4L2SliceVideoDecodeAccelerator::CreateInputBuffers() {
bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
VLOGF(2);
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- DCHECK(!output_streamon_);
+ DCHECK(!output_queue_->IsStreaming());
DCHECK(output_buffer_map_.empty());
DCHECK(surfaces_at_display_.empty());
+ DCHECK(surfaces_at_ip_.empty());
DCHECK(surfaces_at_device_.empty());
gfx::Size pic_size = decoder_->GetPicSize();
@@ -548,18 +631,32 @@ bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
DCHECK_GT(num_pictures, 0u);
DCHECK(!pic_size.IsEmpty());
- // Since VdaVideoDeecoder doesn't allocate PictureBuffer with size adjusted by
+ // Since VdaVideoDecoder doesn't allocate PictureBuffer with size adjusted by
// itself, we have to adjust here.
struct v4l2_format format;
memset(&format, 0, sizeof(format));
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- format.fmt.pix_mp.pixelformat = output_format_fourcc_;
+ format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+
+ if (device_->Ioctl(VIDIOC_G_FMT, &format) != 0) {
+ VPLOGF(1) << "Failed getting OUTPUT format";
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return false;
+ }
+
format.fmt.pix_mp.width = pic_size.width();
format.fmt.pix_mp.height = pic_size.height();
- format.fmt.pix_mp.num_planes = output_planes_count_;
if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
- VPLOGF(1) << "Failed setting format to: " << output_format_fourcc_;
+ VPLOGF(1) << "Failed setting OUTPUT format to: " << input_format_fourcc_;
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return false;
+ }
+
+ // Get the coded size from the CAPTURE queue
+ memset(&format, 0, sizeof(format));
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ if (device_->Ioctl(VIDIOC_G_FMT, &format) != 0) {
+ VPLOGF(1) << "Failed getting CAPTURE format";
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
@@ -569,6 +666,29 @@ bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
DCHECK_EQ(coded_size_.width() % 16, 0);
DCHECK_EQ(coded_size_.height() % 16, 0);
+ // Now that we know the desired buffers resolution, ask the image processor
+ // what it supports so we can request the correct picture buffers.
+ if (image_processor_device_) {
+ // Try to get an image size as close as possible to the final one (i.e.
+ // coded_size_ may include padding required by the decoder).
+ gl_image_size_ = pic_size;
+ size_t planes_count;
+ if (!V4L2ImageProcessor::TryOutputFormat(output_format_fourcc_,
+ gl_image_format_fourcc_,
+ &gl_image_size_, &planes_count)) {
+ VLOGF(1) << "Failed to get output size and plane count of IP";
+ return false;
+ }
+ if (gl_image_planes_count_ != planes_count) {
+ VLOGF(1) << "IP buffers planes count returned by V4L2 (" << planes_count
+ << ") doesn't match the computed number ("
+ << gl_image_planes_count_ << ")";
+ return false;
+ }
+ } else {
+ gl_image_size_ = coded_size_;
+ }
+
if (!gfx::Rect(coded_size_).Contains(gfx::Rect(pic_size))) {
VLOGF(1) << "Got invalid adjusted coded size: " << coded_size_.ToString();
return false;
@@ -579,12 +699,12 @@ bool V4L2SliceVideoDecodeAccelerator::CreateOutputBuffers() {
<< ", coded size=" << coded_size_.ToString();
VideoPixelFormat pixel_format =
- V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_);
+ V4L2Device::V4L2PixFmtToVideoPixelFormat(gl_image_format_fourcc_);
child_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
&VideoDecodeAccelerator::Client::ProvidePictureBuffersWithVisibleRect,
- client_, num_pictures, pixel_format, 1, coded_size_,
+ client_, num_pictures, pixel_format, 1, gl_image_size_,
decoder_->GetVisibleRect(), device_->GetTextureTarget()));
// Go into kAwaitingPictureBuffers to prevent us from doing any more decoding
@@ -603,25 +723,16 @@ void V4L2SliceVideoDecodeAccelerator::DestroyInputBuffers() {
VLOGF(2);
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread() ||
!decoder_thread_.IsRunning());
- DCHECK(!input_streamon_);
- if (input_buffer_map_.empty())
+ if (!input_queue_)
return;
- for (auto& input_record : input_buffer_map_) {
- if (input_record.address != nullptr)
- device_->Munmap(input_record.address, input_record.length);
- }
+ DCHECK(!input_queue_->IsStreaming());
- struct v4l2_requestbuffers reqbufs;
- memset(&reqbufs, 0, sizeof(reqbufs));
- reqbufs.count = 0;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- reqbufs.memory = V4L2_MEMORY_MMAP;
- IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
+ input_queue_->DeallocateBuffers();
- input_buffer_map_.clear();
- free_input_buffers_.clear();
+ if (supports_requests_)
+ requests_ = {};
}
void V4L2SliceVideoDecodeAccelerator::DismissPictures(
@@ -677,9 +788,11 @@ void V4L2SliceVideoDecodeAccelerator::SchedulePollIfNeeded() {
return;
}
- DCHECK(input_streamon_ || output_streamon_);
+ DCHECK(input_queue_->IsStreaming() || output_queue_->IsStreaming());
- if (input_buffer_queued_count_ + output_buffer_queued_count_ == 0) {
+ if (input_queue_->QueuedBuffersCount() +
+ output_queue_->QueuedBuffersCount() ==
+ 0) {
DVLOGF(4) << "No buffers queued, will not schedule poll";
return;
}
@@ -693,12 +806,11 @@ void V4L2SliceVideoDecodeAccelerator::SchedulePollIfNeeded() {
DVLOGF(3) << "buffer counts: "
<< "INPUT[" << decoder_input_queue_.size() << "]"
- << " => DEVICE["
- << free_input_buffers_.size() << "+"
- << input_buffer_queued_count_ << "/"
- << input_buffer_map_.size() << "]->["
- << free_output_buffers_.size() << "+"
- << output_buffer_queued_count_ << "/"
+ << " => DEVICE[" << input_queue_->FreeBuffersCount() << "+"
+ << input_queue_->QueuedBuffersCount() << "/"
+ << input_queue_->AllocatedBuffersCount() << "]->["
+ << output_queue_->FreeBuffersCount() << "+"
+ << output_queue_->QueuedBuffersCount() << "/"
<< output_buffer_map_.size() << "]"
<< " => DISPLAYQ[" << decoder_display_queue_.size() << "]"
<< " => CLIENT[" << surfaces_at_display_.size() << "]";
@@ -708,8 +820,8 @@ void V4L2SliceVideoDecodeAccelerator::Enqueue(
const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- const int old_inputs_queued = input_buffer_queued_count_;
- const int old_outputs_queued = output_buffer_queued_count_;
+ const int old_inputs_queued = input_queue_->QueuedBuffersCount();
+ const int old_outputs_queued = output_queue_->QueuedBuffersCount();
if (!EnqueueInputRecord(dec_surface.get())) {
VLOGF(1) << "Failed queueing an input buffer";
@@ -717,17 +829,13 @@ void V4L2SliceVideoDecodeAccelerator::Enqueue(
return;
}
- if (!EnqueueOutputRecord(dec_surface->output_record())) {
+ if (!EnqueueOutputRecord(dec_surface.get())) {
VLOGF(1) << "Failed queueing an output buffer";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
- bool inserted =
- surfaces_at_device_
- .insert(std::make_pair(dec_surface->output_record(), dec_surface))
- .second;
- DCHECK(inserted);
+ surfaces_at_device_.push(dec_surface);
if (old_inputs_queued == 0 && old_outputs_queued == 0)
SchedulePollIfNeeded();
@@ -737,70 +845,59 @@ void V4L2SliceVideoDecodeAccelerator::Dequeue() {
DVLOGF(4);
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- struct v4l2_buffer dqbuf;
- struct v4l2_plane planes[VIDEO_MAX_PLANES];
- while (input_buffer_queued_count_ > 0) {
- DCHECK(input_streamon_);
- memset(&dqbuf, 0, sizeof(dqbuf));
- memset(&planes, 0, sizeof(planes));
- dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- dqbuf.memory = V4L2_MEMORY_MMAP;
- dqbuf.m.planes = planes;
- dqbuf.length = input_planes_count_;
- if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
- if (errno == EAGAIN) {
- // EAGAIN if we're just out of buffers to dequeue.
- break;
- }
- VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
- NOTIFY_ERROR(PLATFORM_FAILURE);
- return;
- }
- InputRecord& input_record = input_buffer_map_[dqbuf.index];
- DCHECK(input_record.at_device);
- input_record.at_device = false;
- ReuseInputBuffer(dqbuf.index);
- input_buffer_queued_count_--;
- DVLOGF(4) << "Dequeued input=" << dqbuf.index
- << " count: " << input_buffer_queued_count_;
- }
-
- while (output_buffer_queued_count_ > 0) {
- DCHECK(output_streamon_);
- memset(&dqbuf, 0, sizeof(dqbuf));
- memset(&planes, 0, sizeof(planes));
- dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- dqbuf.memory =
- (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
- : V4L2_MEMORY_DMABUF);
- dqbuf.m.planes = planes;
- dqbuf.length = output_planes_count_;
- if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
- if (errno == EAGAIN) {
- // EAGAIN if we're just out of buffers to dequeue.
- break;
- }
- VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
+ while (input_queue_->QueuedBuffersCount() > 0) {
+ DCHECK(input_queue_->IsStreaming());
+ auto ret = input_queue_->DequeueBuffer();
+
+ if (ret.first == false) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
+ } else if (!ret.second) {
+ // we're just out of buffers to dequeue.
+ break;
}
- OutputRecord& output_record = output_buffer_map_[dqbuf.index];
- DCHECK(output_record.at_device);
- output_record.at_device = false;
- output_buffer_queued_count_--;
- DVLOGF(4) << "Dequeued output=" << dqbuf.index << " count "
- << output_buffer_queued_count_;
-
- V4L2DecodeSurfaceByOutputId::iterator it =
- surfaces_at_device_.find(dqbuf.index);
- if (it == surfaces_at_device_.end()) {
- VLOGF(1) << "Got invalid surface from device.";
+
+ DVLOGF(4) << "Dequeued input=" << ret.second->BufferId()
+ << " count: " << input_queue_->QueuedBuffersCount();
+ }
+
+ while (output_queue_->QueuedBuffersCount() > 0) {
+ DCHECK(output_queue_->IsStreaming());
+ auto ret = output_queue_->DequeueBuffer();
+ if (ret.first == false) {
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
+ } else if (!ret.second) {
+ // we're just out of buffers to dequeue.
+ break;
}
- it->second->SetDecoded();
- surfaces_at_device_.erase(it);
+ const size_t buffer_id = ret.second->BufferId();
+
+ DVLOGF(4) << "Dequeued output=" << buffer_id << " count "
+ << output_queue_->QueuedBuffersCount();
+
+ DCHECK(!surfaces_at_device_.empty());
+ auto surface = std::move(surfaces_at_device_.front());
+ surfaces_at_device_.pop();
+ DCHECK_EQ(static_cast<size_t>(surface->output_record()), buffer_id);
+
+ // If using an image processor, process the image before considering it
+ // decoded.
+ if (image_processor_) {
+ if (!ProcessFrame(std::move(ret.second), std::move(surface))) {
+ VLOGF(1) << "Processing frame failed";
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ }
+ } else {
+ DCHECK_EQ(decoded_buffer_map_.count(buffer_id), 0u);
+ decoded_buffer_map_.emplace(buffer_id, buffer_id);
+ surface->SetDecoded();
+
+ surface->SetReleaseCallback(
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
+ base::Unretained(this), std::move(ret.second)));
+ }
}
// A frame was decoded, see if we can output it.
@@ -852,134 +949,71 @@ void V4L2SliceVideoDecodeAccelerator::ProcessPendingEventsIfNeeded() {
}
}
-void V4L2SliceVideoDecodeAccelerator::ReuseInputBuffer(int index) {
- DVLOGF(4) << "Reusing input buffer, index=" << index;
+void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(
+ V4L2ReadableBufferRef buffer) {
+ DVLOGF(4) << "Reusing output buffer, index=" << buffer->BufferId();
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
- InputRecord& input_record = input_buffer_map_[index];
-
- DCHECK(!input_record.at_device);
- input_record.input_id = -1;
- input_record.bytes_used = 0;
-
- DCHECK_EQ(
- std::count(free_input_buffers_.begin(), free_input_buffers_.end(), index),
- 0);
- free_input_buffers_.push_back(index);
-}
-
-void V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer(int index) {
- DVLOGF(4) << "Reusing output buffer, index=" << index;
- DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
-
- DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
- OutputRecord& output_record = output_buffer_map_[index];
- DCHECK(!output_record.at_device);
- DCHECK(!output_record.at_client);
-
- DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
- index),
- 0);
- free_output_buffers_.push_back(index);
+ DCHECK_EQ(decoded_buffer_map_.count(buffer->BufferId()), 1u);
+ decoded_buffer_map_.erase(buffer->BufferId());
ScheduleDecodeBufferTaskIfNeeded();
}
bool V4L2SliceVideoDecodeAccelerator::EnqueueInputRecord(
- const V4L2DecodeSurface* dec_surface) {
+ V4L2DecodeSurface* dec_surface) {
DVLOGF(4);
DCHECK_NE(dec_surface, nullptr);
- const int index = dec_surface->input_record();
- DCHECK_LT(index, static_cast<int>(input_buffer_map_.size()));
// Enqueue an input (VIDEO_OUTPUT) buffer for an input video frame.
- InputRecord& input_record = input_buffer_map_[index];
- DCHECK(!input_record.at_device);
- struct v4l2_buffer qbuf;
- struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
- memset(&qbuf, 0, sizeof(qbuf));
- memset(qbuf_planes, 0, sizeof(qbuf_planes));
- qbuf.index = index;
- qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- qbuf.memory = V4L2_MEMORY_MMAP;
- qbuf.m.planes = qbuf_planes;
- qbuf.m.planes[0].bytesused = input_record.bytes_used;
- qbuf.length = input_planes_count_;
- dec_surface->PrepareQueueBuffer(&qbuf);
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
- input_record.at_device = true;
- input_buffer_queued_count_++;
- DVLOGF(4) << "Enqueued input=" << qbuf.index
- << " count: " << input_buffer_queued_count_;
+ V4L2WritableBufferRef input_buffer = std::move(dec_surface->input_buffer());
+ DCHECK(input_buffer.IsValid());
+ const int index = input_buffer.BufferId();
+ input_buffer.PrepareQueueBuffer(*dec_surface);
+ if (!std::move(input_buffer).QueueMMap()) {
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return false;
+ }
+
+ DVLOGF(4) << "Enqueued input=" << index
+ << " count: " << input_queue_->QueuedBuffersCount();
return true;
}
-bool V4L2SliceVideoDecodeAccelerator::EnqueueOutputRecord(int index) {
+bool V4L2SliceVideoDecodeAccelerator::EnqueueOutputRecord(
+ V4L2DecodeSurface* dec_surface) {
DVLOGF(4);
- DCHECK_LT(index, static_cast<int>(output_buffer_map_.size()));
// Enqueue an output (VIDEO_CAPTURE) buffer.
+ V4L2WritableBufferRef output_buffer = std::move(dec_surface->output_buffer());
+ DCHECK(output_buffer.IsValid());
+ size_t index = output_buffer.BufferId();
OutputRecord& output_record = output_buffer_map_[index];
- DCHECK(!output_record.at_device);
- DCHECK(!output_record.at_client);
DCHECK_NE(output_record.picture_id, -1);
- if (output_record.egl_fence) {
- TRACE_EVENT0("media,gpu",
- "V4L2SVDA::EnqueueOutputRecord: "
- "GLFenceEGL::ClientWaitWithTimeoutNanos");
-
- // If we have to wait for completion, wait. Note that free_output_buffers_
- // is a FIFO queue, so we always wait on the buffer that has been in the
- // queue the longest. Every 100ms we check whether the decoder is shutting
- // down, or we might get stuck waiting on a fence that will never come:
- // https://crbug.com/845645
- while (!IsDestroyPending()) {
- const EGLTimeKHR wait_ns =
- base::TimeDelta::FromMilliseconds(100).InNanoseconds();
- EGLint result =
- output_record.egl_fence->ClientWaitWithTimeoutNanos(wait_ns);
- if (result == EGL_CONDITION_SATISFIED_KHR) {
- break;
- } else if (result == EGL_FALSE) {
- // This will cause tearing, but is safe otherwise.
- DVLOGF(1) << "GLFenceEGL::ClientWaitWithTimeoutNanos failed!";
- break;
- }
- DCHECK_EQ(result, EGL_TIMEOUT_EXPIRED_KHR);
+ bool ret = false;
+ switch (output_buffer.Memory()) {
+ case V4L2_MEMORY_MMAP:
+ ret = std::move(output_buffer).QueueMMap();
+ break;
+ case V4L2_MEMORY_DMABUF: {
+ const auto& fds = output_record.output_frame->DmabufFds();
+ DCHECK_EQ(output_planes_count_, fds.size());
+ ret = std::move(output_buffer).QueueDMABuf(fds);
+ break;
}
-
- if (IsDestroyPending())
- return false;
-
- output_record.egl_fence.reset();
+ default:
+ NOTREACHED();
}
- struct v4l2_buffer qbuf;
- struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
- memset(&qbuf, 0, sizeof(qbuf));
- memset(qbuf_planes, 0, sizeof(qbuf_planes));
- qbuf.index = index;
- qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- if (output_mode_ == Config::OutputMode::ALLOCATE) {
- qbuf.memory = V4L2_MEMORY_MMAP;
- } else {
- qbuf.memory = V4L2_MEMORY_DMABUF;
- DCHECK_EQ(output_planes_count_, output_record.dmabuf_fds.size());
- for (size_t i = 0; i < output_record.dmabuf_fds.size(); ++i) {
- DCHECK(output_record.dmabuf_fds[i].is_valid());
- qbuf_planes[i].m.fd = output_record.dmabuf_fds[i].get();
- }
+ if (!ret) {
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return false;
}
- qbuf.m.planes = qbuf_planes;
- qbuf.length = output_planes_count_;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
- output_record.at_device = true;
- output_buffer_queued_count_++;
- DVLOGF(4) << "Enqueued output=" << qbuf.index
- << " count: " << output_buffer_queued_count_;
+
+ DVLOGF(4) << "Enqueued output=" << index
+ << " count: " << output_queue_->QueuedBuffersCount();
return true;
}
@@ -995,17 +1029,12 @@ bool V4L2SliceVideoDecodeAccelerator::StartDevicePoll() {
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
- if (!input_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
- input_streamon_ = true;
- }
- if (!output_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMON, &type);
- output_streamon_ = true;
- }
+ if (!input_queue_->Streamon())
+ return false;
+
+ if (!output_queue_->Streamon())
+ return false;
device_poll_thread_.task_runner()->PostTask(
FROM_HERE,
@@ -1035,43 +1064,25 @@ bool V4L2SliceVideoDecodeAccelerator::StopDevicePoll() {
return false;
}
- if (input_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
- }
- input_streamon_ = false;
+ // We may be called before the queue is acquired.
+ if (input_queue_) {
+ if (!input_queue_->Streamoff())
+ return false;
- if (output_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
+ DCHECK_EQ(input_queue_->QueuedBuffersCount(), 0u);
}
- output_streamon_ = false;
- for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
- InputRecord& input_record = input_buffer_map_[i];
- if (input_record.at_device) {
- input_record.at_device = false;
- ReuseInputBuffer(i);
- input_buffer_queued_count_--;
- }
- }
- DCHECK_EQ(input_buffer_queued_count_, 0);
+ // We may be called before the queue is acquired.
+ if (output_queue_) {
+ if (!output_queue_->Streamoff())
+ return false;
- // STREAMOFF makes the driver drop all buffers without decoding and DQBUFing,
- // so we mark them all as at_device = false and clear surfaces_at_device_.
- for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
- OutputRecord& output_record = output_buffer_map_[i];
- if (output_record.at_device) {
- output_record.at_device = false;
- output_buffer_queued_count_--;
- }
+ DCHECK_EQ(output_queue_->QueuedBuffersCount(), 0u);
}
+
// Mark as decoded to allow reuse.
- for (auto kv : surfaces_at_device_) {
- kv.second->SetDecoded();
- }
- surfaces_at_device_.clear();
- DCHECK_EQ(output_buffer_queued_count_, 0);
+ while (!surfaces_at_device_.empty())
+ surfaces_at_device_.pop();
// Drop all surfaces that were awaiting decode before being displayed,
// since we've just cancelled all outstanding decodes.
@@ -1240,7 +1251,7 @@ bool V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChange() {
// All output buffers should've been returned from decoder and device by now.
// The only remaining owner of surfaces may be display (client), and we will
// dismiss them when destroying output buffers below.
- DCHECK_EQ(free_output_buffers_.size() + surfaces_at_display_.size(),
+ DCHECK_EQ(output_queue_->FreeBuffersCount() + surfaces_at_display_.size(),
output_buffer_map_.size());
if (!StopDevicePoll()) {
@@ -1248,6 +1259,9 @@ bool V4L2SliceVideoDecodeAccelerator::FinishSurfaceSetChange() {
return false;
}
+ image_processor_ = nullptr;
+ surfaces_at_ip_ = {};
+
// Dequeued decoded surfaces may be pended in pending_picture_ready_ if they
// are waiting for some pictures to be cleared. We should post them right away
// because they are about to be dismissed and destroyed for surface set
@@ -1283,10 +1297,6 @@ bool V4L2SliceVideoDecodeAccelerator::DestroyOutputs(bool dismiss) {
return true;
for (auto& output_record : output_buffer_map_) {
- DCHECK(!output_record.at_device);
-
- output_record.egl_fence.reset();
-
picture_buffers_to_dismiss.push_back(output_record.picture_id);
}
@@ -1310,40 +1320,32 @@ bool V4L2SliceVideoDecodeAccelerator::DestroyOutputBuffers() {
VLOGF(2);
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread() ||
!decoder_thread_.IsRunning());
- DCHECK(!output_streamon_);
DCHECK(surfaces_at_device_.empty());
DCHECK(decoder_display_queue_.empty());
- DCHECK_EQ(surfaces_at_display_.size() + free_output_buffers_.size(),
- output_buffer_map_.size());
- if (output_buffer_map_.empty())
+ if (!output_queue_ || output_buffer_map_.empty())
return true;
+ DCHECK(!output_queue_->IsStreaming());
+ DCHECK_EQ(output_queue_->QueuedBuffersCount(), 0u);
+
+ // Release all buffers waiting for an import buffer event.
+ output_wait_map_.clear();
+
+ // Release all buffers awaiting a fence since we are about to destroy them.
+ surfaces_awaiting_fence_ = {};
+
// It's ok to do this, client will retain references to textures, but we are
// not interested in reusing the surfaces anymore.
// This will prevent us from reusing old surfaces in case we have some
// ReusePictureBuffer() pending on ChildThread already. It's ok to ignore
// them, because we have already dismissed them (in DestroyOutputs()).
- for (const auto& surface_at_display : surfaces_at_display_) {
- size_t index = surface_at_display.second->output_record();
- DCHECK_LT(index, output_buffer_map_.size());
- OutputRecord& output_record = output_buffer_map_[index];
- DCHECK(output_record.at_client);
- output_record.at_client = false;
- output_record.num_times_sent_to_client = 0;
- }
surfaces_at_display_.clear();
- DCHECK_EQ(free_output_buffers_.size(), output_buffer_map_.size());
+ DCHECK_EQ(output_queue_->FreeBuffersCount(), output_buffer_map_.size());
- free_output_buffers_.clear();
output_buffer_map_.clear();
- struct v4l2_requestbuffers reqbufs;
- memset(&reqbufs, 0, sizeof(reqbufs));
- reqbufs.count = 0;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- reqbufs.memory = V4L2_MEMORY_MMAP;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
+ output_queue_->DeallocateBuffers();
return true;
}
@@ -1362,7 +1364,7 @@ void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffers(
void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask(
const std::vector<PictureBuffer>& buffers) {
VLOGF(2);
- DCHECK(!output_streamon_);
+ DCHECK(!output_queue_->IsStreaming());
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kAwaitingPictureBuffers);
TRACE_EVENT1("media,gpu", "V4L2SVDA::AssignPictureBuffersTask",
@@ -1383,7 +1385,7 @@ void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask(
// If a client allocate a different frame size, S_FMT should be called with
// the size.
- if (coded_size_ != buffers[0].size()) {
+ if (!image_processor_device_ && coded_size_ != buffers[0].size()) {
const auto& new_frame_size = buffers[0].size();
v4l2_format format = {};
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
@@ -1414,34 +1416,47 @@ void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask(
NOTIFY_ERROR(INVALID_ARGUMENT);
return;
}
+
+ gl_image_size_ = coded_size_;
}
- // Allocate the output buffers.
- struct v4l2_requestbuffers reqbufs;
- memset(&reqbufs, 0, sizeof(reqbufs));
- reqbufs.count = buffers.size();
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- reqbufs.memory =
- (output_mode_ == Config::OutputMode::ALLOCATE ? V4L2_MEMORY_MMAP
- : V4L2_MEMORY_DMABUF);
- IOCTL_OR_ERROR_RETURN(VIDIOC_REQBUFS, &reqbufs);
-
- if (reqbufs.count != buffers.size()) {
+ const v4l2_memory memory =
+ (image_processor_device_ || output_mode_ == Config::OutputMode::ALLOCATE
+ ? V4L2_MEMORY_MMAP
+ : V4L2_MEMORY_DMABUF);
+ if (output_queue_->AllocateBuffers(buffers.size(), memory) !=
+ buffers.size()) {
VLOGF(1) << "Could not allocate enough output buffers";
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
- DCHECK(free_output_buffers_.empty());
DCHECK(output_buffer_map_.empty());
+ DCHECK(output_wait_map_.empty());
output_buffer_map_.resize(buffers.size());
- for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
+
+ if (image_processor_device_ && !CreateImageProcessor()) {
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return;
+ }
+
+ // Reserve all buffers until ImportBufferForPictureTask() is called
+ while (output_queue_->FreeBuffersCount() > 0) {
+ V4L2WritableBufferRef buffer = output_queue_->GetFreeBuffer();
+ DCHECK(buffer.IsValid());
+ int i = buffer.BufferId();
+
+ DCHECK_EQ(output_wait_map_.count(buffers[i].id()), 0u);
+ // The buffer will remain here until ImportBufferForPicture is called,
+ // either by the client, or by ourselves, if we are allocating.
+ output_wait_map_.emplace(buffers[i].id(), std::move(buffer));
+ }
+ // All available buffers should be in the wait map now.
+ DCHECK_EQ(output_buffer_map_.size(), output_wait_map_.size());
+
+ for (size_t i = 0; i < buffers.size(); i++) {
OutputRecord& output_record = output_buffer_map_[i];
- DCHECK(!output_record.at_device);
- DCHECK(!output_record.at_client);
- DCHECK(!output_record.egl_fence);
DCHECK_EQ(output_record.picture_id, -1);
- DCHECK(output_record.dmabuf_fds.empty());
DCHECK_EQ(output_record.cleared, false);
output_record.picture_id = buffers[i].id();
@@ -1453,16 +1468,22 @@ void V4L2SliceVideoDecodeAccelerator::AssignPictureBuffersTask(
? 0
: buffers[i].client_texture_ids()[0];
- // This will remain true until ImportBufferForPicture is called, either by
- // the client, or by ourselves, if we are allocating.
- output_record.at_client = true;
+ // If we are in allocate mode, then we can already call
+ // ImportBufferForPictureTask().
if (output_mode_ == Config::OutputMode::ALLOCATE) {
- std::vector<base::ScopedFD> passed_dmabuf_fds =
- device_->GetDmabufsForV4L2Buffer(i, output_planes_count_,
- V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
- if (passed_dmabuf_fds.empty()) {
- NOTIFY_ERROR(PLATFORM_FAILURE);
- return;
+ std::vector<base::ScopedFD> passed_dmabuf_fds;
+
+ // If we are using an image processor, the DMABufs that we need to import
+ // are those of the image processor's buffers, not the decoders. So
+ // pass an empty FDs array in that case.
+
+ if (!image_processor_) {
+ passed_dmabuf_fds = gl_image_device_->GetDmabufsForV4L2Buffer(
+ i, gl_image_planes_count_, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
+ if (passed_dmabuf_fds.empty()) {
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return;
+ }
}
ImportBufferForPictureTask(output_record.picture_id,
@@ -1509,63 +1530,19 @@ void V4L2SliceVideoDecodeAccelerator::CreateGLImageFor(
}
scoped_refptr<gl::GLImage> gl_image =
- device_->CreateGLImage(size, fourcc, passed_dmabuf_fds);
+ gl_image_device_->CreateGLImage(size, fourcc, passed_dmabuf_fds);
if (!gl_image) {
VLOGF(1) << "Could not create GLImage,"
<< " index=" << buffer_index << " texture_id=" << texture_id;
NOTIFY_ERROR(PLATFORM_FAILURE);
return;
}
- gl::ScopedTextureBinder bind_restore(device_->GetTextureTarget(), texture_id);
- bool ret = gl_image->BindTexImage(device_->GetTextureTarget());
+ gl::ScopedTextureBinder bind_restore(gl_image_device_->GetTextureTarget(),
+ texture_id);
+ bool ret = gl_image->BindTexImage(gl_image_device_->GetTextureTarget());
DCHECK(ret);
- bind_image_cb_.Run(client_texture_id, device_->GetTextureTarget(), gl_image,
- true);
- decoder_thread_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&V4L2SliceVideoDecodeAccelerator::AssignDmaBufs,
- base::Unretained(this), buffer_index, picture_buffer_id,
- std::move(passed_dmabuf_fds)));
-}
-
-void V4L2SliceVideoDecodeAccelerator::AssignDmaBufs(
- size_t buffer_index,
- int32_t picture_buffer_id,
- std::vector<base::ScopedFD> passed_dmabuf_fds) {
- DVLOGF(3) << "index=" << buffer_index;
- DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
-
- if (IsDestroyPending())
- return;
-
- // It's possible that while waiting for the EGLImages to be allocated and
- // assigned, we have already decoded more of the stream and saw another
- // resolution change. This is a normal situation, in such a case either there
- // is no output record with this index awaiting an EGLImage to be assigned to
- // it, or the record is already updated to use a newer PictureBuffer and is
- // awaiting an EGLImage associated with a different picture_buffer_id. If so,
- // just discard this image, we will get the one we are waiting for later.
- if (buffer_index >= output_buffer_map_.size() ||
- output_buffer_map_[buffer_index].picture_id != picture_buffer_id) {
- DVLOGF(4) << "Picture set already changed, dropping EGLImage";
- return;
- }
-
- OutputRecord& output_record = output_buffer_map_[buffer_index];
- DCHECK(!output_record.egl_fence);
- DCHECK(!output_record.at_client);
- DCHECK(!output_record.at_device);
-
- if (output_mode_ == Config::OutputMode::IMPORT) {
- DCHECK(output_record.dmabuf_fds.empty());
- output_record.dmabuf_fds = std::move(passed_dmabuf_fds);
- }
-
- DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
- buffer_index),
- 0);
- free_output_buffers_.push_back(buffer_index);
- ScheduleDecodeBufferTaskIfNeeded();
+ bind_image_cb_.Run(client_texture_id, gl_image_device_->GetTextureTarget(),
+ gl_image, true);
}
void V4L2SliceVideoDecodeAccelerator::ImportBufferForPicture(
@@ -1587,7 +1564,7 @@ void V4L2SliceVideoDecodeAccelerator::ImportBufferForPicture(
for (auto& plane : gpu_memory_buffer_handle.native_pixmap_handle.planes) {
dmabuf_fds.push_back(std::move(plane.fd));
}
- for (size_t i = dmabuf_fds.size() - 1; i >= output_planes_count_; i--) {
+ for (size_t i = dmabuf_fds.size() - 1; i >= gl_image_planes_count_; i--) {
if (gpu_memory_buffer_handle.native_pixmap_handle.planes[i].offset == 0) {
VLOGF(1) << "The dmabuf fd points to a new buffer, ";
NOTIFY_ERROR(INVALID_ARGUMENT);
@@ -1606,7 +1583,7 @@ void V4L2SliceVideoDecodeAccelerator::ImportBufferForPicture(
}
if (pixel_format !=
- V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_)) {
+ V4L2Device::V4L2PixFmtToVideoPixelFormat(gl_image_format_fourcc_)) {
VLOGF(1) << "Unsupported import format: "
<< VideoPixelFormatToString(pixel_format);
NOTIFY_ERROR(INVALID_ARGUMENT);
@@ -1644,33 +1621,54 @@ void V4L2SliceVideoDecodeAccelerator::ImportBufferForPictureTask(
return;
}
- if (!iter->at_client) {
- VLOGF(1) << "Cannot import buffer that not owned by client";
+ if (!output_wait_map_.count(iter->picture_id)) {
+ VLOGF(1) << "Passed buffer is not waiting to be imported";
NOTIFY_ERROR(INVALID_ARGUMENT);
return;
}
- size_t index = iter - output_buffer_map_.begin();
- DCHECK_EQ(std::count(free_output_buffers_.begin(), free_output_buffers_.end(),
- index),
- 0);
+ // If in import mode, build output_frame from the passed DMABUF FDs.
+ if (output_mode_ == Config::OutputMode::IMPORT) {
+ DCHECK_EQ(gl_image_planes_count_, passed_dmabuf_fds.size());
+ DCHECK(!iter->output_frame);
+
+ // TODO(acourbot): Create a more accurate layout from the GMBhandle instead
+ // of assuming the image size will be enough (we may have extra information
+ // between planes).
+ auto layout = VideoFrameLayout::Create(
+ V4L2Device::V4L2PixFmtToVideoPixelFormat(gl_image_format_fourcc_),
+ gl_image_size_);
+ if (!layout) {
+ VLOGF(1) << "Cannot create layout!";
+ NOTIFY_ERROR(INVALID_ARGUMENT);
+ return;
+ }
+ const gfx::Rect visible_rect = decoder_->GetVisibleRect();
+ iter->output_frame = VideoFrame::WrapExternalDmabufs(
+ *layout, visible_rect, visible_rect.size(),
+ DuplicateFDs(passed_dmabuf_fds), base::TimeDelta());
+ }
+
+ // We should only create the GL image if rendering is enabled
+ // (texture_id !=0). Moreover, if an image processor is in use, we will
+ // create the GL image when its buffer becomes visible in FrameProcessed().
+ if (iter->texture_id != 0 && !image_processor_) {
+ DCHECK_EQ(gl_image_planes_count_, passed_dmabuf_fds.size());
+ size_t index = iter - output_buffer_map_.begin();
- DCHECK(!iter->at_device);
- iter->at_client = false;
- if (iter->texture_id != 0) {
child_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&V4L2SliceVideoDecodeAccelerator::CreateGLImageFor,
weak_this_, index, picture_buffer_id,
std::move(passed_dmabuf_fds), iter->client_texture_id,
- iter->texture_id, coded_size_, output_format_fourcc_));
- } else {
- // No need for a GLImage, start using this buffer now.
- DCHECK_EQ(output_planes_count_, passed_dmabuf_fds.size());
- iter->dmabuf_fds = std::move(passed_dmabuf_fds);
- free_output_buffers_.push_back(index);
- ScheduleDecodeBufferTaskIfNeeded();
+ iter->texture_id, gl_image_size_,
+ gl_image_format_fourcc_));
}
+
+ // Buffer is now ready to be used.
+ DCHECK_EQ(output_wait_map_.count(picture_buffer_id), 1u);
+ output_wait_map_.erase(picture_buffer_id);
+ ScheduleDecodeBufferTaskIfNeeded();
}
void V4L2SliceVideoDecodeAccelerator::ReusePictureBuffer(
@@ -1724,21 +1722,25 @@ void V4L2SliceVideoDecodeAccelerator::ReusePictureBufferTask(
return;
}
- OutputRecord& output_record = output_buffer_map_[it->second->output_record()];
- if (output_record.at_device || !output_record.at_client) {
+ DCHECK_EQ(decoded_buffer_map_.count(it->second->output_record()), 1u);
+ const size_t output_map_index =
+ decoded_buffer_map_[it->second->output_record()];
+ DCHECK_LT(output_map_index, output_buffer_map_.size());
+ OutputRecord& output_record = output_buffer_map_[output_map_index];
+ if (!output_record.at_client()) {
VLOGF(1) << "picture_buffer_id not reusable";
NOTIFY_ERROR(INVALID_ARGUMENT);
return;
}
- DCHECK(!output_record.at_device);
--output_record.num_times_sent_to_client;
// A output buffer might be sent multiple times. We only use the last fence.
// When the last fence is signaled, all the previous fences must be executed.
- if (output_record.num_times_sent_to_client == 0) {
- output_record.at_client = false;
+ if (!output_record.at_client()) {
// Take ownership of the EGL fence.
- output_record.egl_fence = std::move(egl_fence);
+ if (egl_fence)
+ surfaces_awaiting_fence_.push(
+ std::make_pair(std::move(egl_fence), std::move(it->second)));
surfaces_at_display_.erase(it);
}
@@ -1799,6 +1801,15 @@ bool V4L2SliceVideoDecodeAccelerator::FinishFlush() {
if (!surfaces_at_device_.empty())
return false;
+ // Even if all output buffers have been returned, the decoder may still
+ // be holding on an input device. Wait until the queue is actually drained.
+ if (input_queue_->QueuedBuffersCount() != 0)
+ return false;
+
+ // Wait until all pending image processor tasks are completed.
+ if (image_processor_ && !surfaces_at_ip_.empty())
+ return false;
+
DCHECK_EQ(state_, kIdle);
// At this point, all remaining surfaces are decoded and dequeued, and since
@@ -1811,7 +1822,8 @@ bool V4L2SliceVideoDecodeAccelerator::FinishFlush() {
// Decoder should have already returned all surfaces and all surfaces are
// out of hardware. There can be no other owners of input buffers.
- DCHECK_EQ(free_input_buffers_.size(), input_buffer_map_.size());
+ DCHECK_EQ(input_queue_->FreeBuffersCount(),
+ input_queue_->AllocatedBuffersCount());
SendPictureReady();
@@ -1871,6 +1883,13 @@ bool V4L2SliceVideoDecodeAccelerator::FinishReset() {
if (!surfaces_at_device_.empty())
return false;
+ // Drop all buffers in image processor.
+ if (image_processor_ && !ResetImageProcessor()) {
+ VLOGF(1) << "Fail to reset image processor";
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+ return false;
+ }
+
DCHECK_EQ(state_, kIdle);
DCHECK(!decoder_flushing_);
SendPictureReady();
@@ -1885,12 +1904,7 @@ bool V4L2SliceVideoDecodeAccelerator::FinishReset() {
// we just checked that surfaces_at_device_.empty(), and inputs are tied
// to surfaces. Since there can be no other owners of input buffers, we can
// simply mark them all as available.
- DCHECK_EQ(input_buffer_queued_count_, 0);
- free_input_buffers_.clear();
- for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
- DCHECK(!input_buffer_map_[i].at_device);
- ReuseInputBuffer(i);
- }
+ DCHECK_EQ(input_queue_->QueuedBuffersCount(), 0u);
decoder_resetting_ = false;
VLOGF(2) << "Reset finished";
@@ -1933,16 +1947,21 @@ bool V4L2SliceVideoDecodeAccelerator::SubmitSlice(
size_t size) {
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- InputRecord& input_record = input_buffer_map_[dec_surface->input_record()];
+ V4L2WritableBufferRef& input_buffer = dec_surface->input_buffer();
+ DCHECK(input_buffer.IsValid());
- if (input_record.bytes_used + size > input_record.length) {
+ const size_t plane_size = input_buffer.GetPlaneSize(0);
+ const size_t bytes_used = input_buffer.GetPlaneBytesUsed(0);
+
+ if (bytes_used + size > plane_size) {
VLOGF(1) << "Input buffer too small";
return false;
}
- memcpy(static_cast<uint8_t*>(input_record.address) + input_record.bytes_used,
- data, size);
- input_record.bytes_used += size;
+ uint8_t* mapping = static_cast<uint8_t*>(input_buffer.GetPlaneMapping(0));
+ DCHECK_NE(mapping, nullptr);
+ memcpy(mapping + bytes_used, data, size);
+ input_buffer.SetPlaneBytesUsed(0, bytes_used + size);
return true;
}
@@ -1992,12 +2011,13 @@ void V4L2SliceVideoDecodeAccelerator::OutputSurface(
const scoped_refptr<V4L2DecodeSurface>& dec_surface) {
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
- OutputRecord& output_record =
- output_buffer_map_[dec_surface->output_record()];
+ DCHECK_EQ(decoded_buffer_map_.count(dec_surface->output_record()), 1u);
+ const size_t output_map_index =
+ decoded_buffer_map_[dec_surface->output_record()];
+ DCHECK_LT(output_map_index, output_buffer_map_.size());
+ OutputRecord& output_record = output_buffer_map_[output_map_index];
- if (output_record.num_times_sent_to_client == 0) {
- DCHECK(!output_record.at_client);
- output_record.at_client = true;
+ if (!output_record.at_client()) {
bool inserted =
surfaces_at_display_
.insert(std::make_pair(output_record.picture_id, dec_surface))
@@ -2005,14 +2025,12 @@ void V4L2SliceVideoDecodeAccelerator::OutputSurface(
DCHECK(inserted);
} else {
// The surface is already sent to client, and not returned back yet.
- DCHECK(output_record.at_client);
DCHECK(surfaces_at_display_.find(output_record.picture_id) !=
surfaces_at_display_.end());
CHECK(surfaces_at_display_[output_record.picture_id].get() ==
dec_surface.get());
}
- DCHECK(!output_record.at_device);
DCHECK_NE(output_record.picture_id, -1);
++output_record.num_times_sent_to_client;
@@ -2029,51 +2047,89 @@ void V4L2SliceVideoDecodeAccelerator::OutputSurface(
output_record.cleared = true;
}
+void V4L2SliceVideoDecodeAccelerator::CheckGLFences() {
+ DVLOGF(4);
+ DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
+
+ while (!surfaces_awaiting_fence_.empty() &&
+ surfaces_awaiting_fence_.front().first->HasCompleted()) {
+ // Buffer at the front of the queue goes back to V4L2Queue's free list
+ // and can be reused.
+ surfaces_awaiting_fence_.pop();
+ }
+
+ // If we have no free buffers available, then preemptively schedule a
+ // call to DecodeBufferTask() in a short time, otherwise we may starve out
+ // of buffers because fences will not call back into us once they are
+ // signaled. The delay chosen roughly corresponds to the time a frame is
+ // displayed, which should be optimal in most cases.
+ if (output_queue_->FreeBuffersCount() == 0) {
+ constexpr int64_t kRescheduleDelayMs = 17;
+
+ decoder_thread_.task_runner()->PostDelayedTask(
+ FROM_HERE,
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::DecodeBufferTask,
+ base::Unretained(this)),
+ base::TimeDelta::FromMilliseconds(kRescheduleDelayMs));
+ }
+}
+
scoped_refptr<V4L2DecodeSurface>
V4L2SliceVideoDecodeAccelerator::CreateSurface() {
DCHECK(decoder_thread_task_runner_->BelongsToCurrentThread());
DCHECK_EQ(state_, kDecoding);
- TRACE_COUNTER_ID2("media,gpu", "V4L2 input buffers", this, "free",
- free_input_buffers_.size(), "in use",
- input_buffer_map_.size() - free_input_buffers_.size());
+ TRACE_COUNTER_ID2(
+ "media,gpu", "V4L2 input buffers", this, "free",
+ input_queue_->FreeBuffersCount(), "in use",
+ input_queue_->AllocatedBuffersCount() - input_queue_->FreeBuffersCount());
TRACE_COUNTER_ID2("media,gpu", "V4L2 output buffers", this, "free",
- free_output_buffers_.size(), "in use",
- output_buffer_map_.size() - free_output_buffers_.size());
+ output_queue_->FreeBuffersCount(), "in use",
+ output_queue_->AllocatedBuffersCount() -
+ output_queue_->AllocatedBuffersCount());
TRACE_COUNTER_ID2("media,gpu", "V4L2 output buffers", this, "at client",
GetNumOfOutputRecordsAtClient(), "at device",
GetNumOfOutputRecordsAtDevice());
- if (free_input_buffers_.empty() || free_output_buffers_.empty())
+ // Release some output buffers if their fence has been signaled.
+ CheckGLFences();
+
+ if (input_queue_->FreeBuffersCount() == 0 ||
+ output_queue_->FreeBuffersCount() == 0)
return nullptr;
- int input = free_input_buffers_.front();
- free_input_buffers_.pop_front();
- int output = free_output_buffers_.front();
- free_output_buffers_.pop_front();
+ V4L2WritableBufferRef input_buffer = input_queue_->GetFreeBuffer();
+ DCHECK(input_buffer.IsValid());
+ // All buffers that are returned to the output free queue have their GL
+ // fence signaled, so we can use them directly.
+ V4L2WritableBufferRef output_buffer = output_queue_->GetFreeBuffer();
+ DCHECK(output_buffer.IsValid());
- InputRecord& input_record = input_buffer_map_[input];
- DCHECK_EQ(input_record.bytes_used, 0u);
- DCHECK_EQ(input_record.input_id, -1);
- DCHECK(decoder_current_bitstream_buffer_ != nullptr);
- input_record.input_id = decoder_current_bitstream_buffer_->input_id;
+ int input = input_buffer.BufferId();
+ int output = output_buffer.BufferId();
scoped_refptr<V4L2DecodeSurface> dec_surface;
if (supports_requests_) {
- auto ret = V4L2RequestDecodeSurface::Create(
- input, output, input_record.request_fd.get(),
- base::BindOnce(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
- base::Unretained(this), output));
-
- if (!ret)
+ // Here we just borrow the older request to use it, before
+ // immediately putting it back at the back of the queue.
+ base::ScopedFD request = std::move(requests_.front());
+ requests_.pop();
+ auto ret = V4L2RequestDecodeSurface::Create(std::move(input_buffer),
+ std::move(output_buffer),
+ nullptr, request.get());
+ requests_.push(std::move(request));
+
+ // Not being able to create the decode surface at this stage is a
+ // fatal error.
+ if (!ret) {
+ NOTIFY_ERROR(PLATFORM_FAILURE);
return nullptr;
+ }
dec_surface = std::move(ret).value();
} else {
dec_surface = new V4L2ConfigStoreDecodeSurface(
- input, output,
- base::BindOnce(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
- base::Unretained(this), output));
+ std::move(input_buffer), std::move(output_buffer), nullptr);
}
DVLOGF(4) << "Created surface " << input << " -> " << output;
@@ -2158,14 +2214,138 @@ V4L2SliceVideoDecodeAccelerator::GetSupportedProfiles() {
size_t V4L2SliceVideoDecodeAccelerator::GetNumOfOutputRecordsAtDevice() const {
DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
- return std::count_if(output_buffer_map_.begin(), output_buffer_map_.end(),
- [](const auto& r) { return r.at_device; });
+ return output_queue_->QueuedBuffersCount();
}
size_t V4L2SliceVideoDecodeAccelerator::GetNumOfOutputRecordsAtClient() const {
DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
return std::count_if(output_buffer_map_.begin(), output_buffer_map_.end(),
- [](const auto& r) { return r.at_client; });
+ [](const auto& r) { return r.at_client(); });
+}
+
+void V4L2SliceVideoDecodeAccelerator::ImageProcessorError() {
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+ VLOGF(1) << "Image processor error";
+ NOTIFY_ERROR(PLATFORM_FAILURE);
+}
+
+bool V4L2SliceVideoDecodeAccelerator::ProcessFrame(
+ V4L2ReadableBufferRef buffer,
+ scoped_refptr<V4L2DecodeSurface> surface) {
+ DVLOGF(4);
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ scoped_refptr<VideoFrame> input_frame = buffer->GetVideoFrame();
+ DCHECK(input_frame);
+
+ if (image_processor_->output_mode() == ImageProcessor::OutputMode::IMPORT) {
+ // In IMPORT mode we can decide ourselves which IP buffer to use, so choose
+ // the one with the same index number as our decoded buffer.
+ const OutputRecord& output_record = output_buffer_map_[buffer->BufferId()];
+ const scoped_refptr<VideoFrame>& output_frame = output_record.output_frame;
+
+ // We will set a destruction observer to the output frame, so wrap the
+ // imported frame into another one that we can destruct.
+ scoped_refptr<VideoFrame> wrapped_frame = VideoFrame::WrapVideoFrame(
+ *output_frame.get(), output_frame->format(),
+ output_frame->visible_rect(), output_frame->coded_size());
+ DCHECK(output_frame != nullptr);
+
+ image_processor_->Process(
+ std::move(input_frame), std::move(wrapped_frame),
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::FrameProcessed,
+ base::Unretained(this), surface, buffer->BufferId()));
+ } else {
+ // In ALLOCATE mode we cannot choose which IP buffer to use. We will get
+ // the surprise when FrameProcessed() is invoked...
+ if (!image_processor_->Process(
+ std::move(input_frame),
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::FrameProcessed,
+ base::Unretained(this), surface)))
+ return false;
+ }
+
+ surfaces_at_ip_.push(std::make_pair(std::move(surface), std::move(buffer)));
+
+ return true;
+}
+
+void V4L2SliceVideoDecodeAccelerator::FrameProcessed(
+ scoped_refptr<V4L2DecodeSurface> surface,
+ size_t ip_buffer_index,
+ scoped_refptr<VideoFrame> frame) {
+ DVLOGF(4);
+ DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ if (IsDestroyPending())
+ return;
+
+ // TODO(crbug.com/921825): Remove this workaround once reset callback is
+ // implemented.
+ if (surfaces_at_ip_.empty() || surfaces_at_ip_.front().first != surface ||
+ output_buffer_map_.empty()) {
+ // This can happen if image processor is reset.
+ // V4L2SliceVideoDecodeAccelerator::Reset() makes
+ // |buffers_at_ip_| empty.
+ // During ImageProcessor::Reset(), some FrameProcessed() can have been
+ // posted to |decoder_thread|. |bitsream_buffer_id| is pushed to
+ // |buffers_at_ip_| in ProcessFrame(). Although we
+ // are not sure a new bitstream buffer id is pushed after Reset() and before
+ // FrameProcessed(), We should skip the case of mismatch of bitstream buffer
+ // id for safety.
+ // For |output_buffer_map_|, it is cleared in Destroy(). Destroy() destroys
+ // ImageProcessor which may call FrameProcessed() in parallel similar to
+ // Reset() case.
+ DVLOGF(4) << "Ignore processed frame after reset";
+ return;
+ }
+
+ DCHECK_LT(ip_buffer_index, output_buffer_map_.size());
+ OutputRecord& ip_output_record = output_buffer_map_[ip_buffer_index];
+
+ // If the picture has not been cleared yet, this means it is the first time
+ // we are seeing this buffer from the image processor. Schedule a call to
+ // CreateGLImageFor before the picture is sent to the client. It is
+ // guaranteed that CreateGLImageFor will complete before the picture is sent
+ // to the client as both events happen on the child thread due to the picture
+ // uncleared status.
+ if (ip_output_record.texture_id != 0 && !ip_output_record.cleared) {
+ DCHECK(frame->HasDmaBufs());
+ child_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::CreateGLImageFor,
+ weak_this_, ip_buffer_index, ip_output_record.picture_id,
+ media::DuplicateFDs(frame->DmabufFds()),
+ ip_output_record.client_texture_id,
+ ip_output_record.texture_id, gl_image_size_,
+ gl_image_format_fourcc_));
+ }
+
+ DCHECK(!surfaces_at_ip_.empty());
+ DCHECK_EQ(surfaces_at_ip_.front().first, surface);
+ V4L2ReadableBufferRef decoded_buffer =
+ std::move(surfaces_at_ip_.front().second);
+ surfaces_at_ip_.pop();
+ DCHECK_EQ(decoded_buffer->BufferId(),
+ static_cast<size_t>(surface->output_record()));
+
+ // Keep the decoder buffer until the IP frame is itself released.
+ // We need to keep this V4L2 frame because the decode surface still references
+ // its index and we will use its OutputRecord to reference the IP buffer.
+ frame->AddDestructionObserver(
+ base::BindOnce(&V4L2SliceVideoDecodeAccelerator::ReuseOutputBuffer,
+ base::Unretained(this), decoded_buffer));
+
+ // This holds the IP video frame until everyone is done with it
+ surface->SetReleaseCallback(
+ base::BindOnce([](scoped_refptr<VideoFrame> frame) {}, frame));
+ DCHECK_EQ(decoded_buffer_map_.count(decoded_buffer->BufferId()), 0u);
+ decoded_buffer_map_.emplace(decoded_buffer->BufferId(), ip_buffer_index);
+ surface->SetDecoded();
+
+ TryOutputSurfaces();
+ ProcessPendingEventsIfNeeded();
+ ScheduleDecodeBufferTaskIfNeeded();
}
// base::trace_event::MemoryDumpProvider implementation.
@@ -2176,21 +2356,18 @@ bool V4L2SliceVideoDecodeAccelerator::OnMemoryDump(
DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
// VIDEO_OUTPUT queue's memory usage.
- const size_t input_queue_buffers_count = input_buffer_map_.size();
+ const size_t input_queue_buffers_count =
+ input_queue_->AllocatedBuffersCount();
size_t input_queue_memory_usage = 0;
std::string input_queue_buffers_memory_type =
- V4L2Device::V4L2MemoryToString(V4L2_MEMORY_MMAP);
- for (const auto& input_record : input_buffer_map_) {
- input_queue_memory_usage += input_record.length;
- }
+ V4L2Device::V4L2MemoryToString(input_queue_->GetMemoryType());
+ input_queue_memory_usage += input_queue_->GetMemoryUsage();
// VIDEO_CAPTURE queue's memory usage.
const size_t output_queue_buffers_count = output_buffer_map_.size();
size_t output_queue_memory_usage = 0;
std::string output_queue_buffers_memory_type =
- output_mode_ == Config::OutputMode::ALLOCATE
- ? V4L2Device::V4L2MemoryToString(V4L2_MEMORY_MMAP)
- : V4L2Device::V4L2MemoryToString(V4L2_MEMORY_DMABUF);
+ V4L2Device::V4L2MemoryToString(output_queue_->GetMemoryType());
if (output_mode_ == Config::OutputMode::ALLOCATE) {
// Call QUERY_BUF here because the length of buffers on VIDIOC_CATURE queue
// are not recorded nowhere in V4L2VideoDecodeAccelerator.
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
index f26c553bcb5..3149d59d579 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.h
@@ -34,6 +34,7 @@
namespace media {
class V4L2DecodeSurface;
+class ImageProcessor;
// An implementation of VideoDecodeAccelerator that utilizes the V4L2 slice
// level codec API for decoding. The slice level API provides only a low-level
@@ -77,32 +78,27 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
base::trace_event::ProcessMemoryDump* pmd) override;
private:
- // Record for input buffers.
- struct InputRecord {
- InputRecord();
- int32_t input_id;
- void* address;
- size_t length;
- size_t bytes_used;
- bool at_device;
- // Request fd used for this input buffer if request API is used.
- base::ScopedFD request_fd;
- };
// Record for output buffers.
struct OutputRecord {
OutputRecord();
OutputRecord(OutputRecord&&);
~OutputRecord();
- bool at_device;
- bool at_client;
- size_t num_times_sent_to_client;
+
+ // Final output frame (i.e. processed if an image processor is used).
+ // Used only when OutputMode is IMPORT.
+ scoped_refptr<VideoFrame> output_frame;
+
+ // The members below are referring to the displayed buffer - this may
+ // be the decoder buffer, or the IP buffer if an IP is in use. In this case,
+ // ip_buffer_index contains the entry number of the IP buffer.
int32_t picture_id;
GLuint client_texture_id;
GLuint texture_id;
- std::unique_ptr<gl::GLFenceEGL> egl_fence;
- std::vector<base::ScopedFD> dmabuf_fds;
bool cleared;
+ size_t num_times_sent_to_client;
+
+ bool at_client() const { return num_times_sent_to_client > 0; }
};
// Decoder state enum.
@@ -140,6 +136,10 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
// Below methods are used by accelerator implementations.
//
// V4L2DecodeSurfaceHandler implementation.
+
+ // Release surfaces awaiting for their fence to be signaled.
+ void CheckGLFences();
+
scoped_refptr<V4L2DecodeSurface> CreateSurface() override;
// SurfaceReady() uses |decoder_display_queue_| to guarantee that decoding
// of |dec_surface| happens in order.
@@ -156,11 +156,8 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
//
// Internal methods of this class.
//
- // Recycle a V4L2 input buffer with |index| after dequeuing from device.
- void ReuseInputBuffer(int index);
-
// Recycle V4L2 output buffer with |index|. Used as surface release callback.
- void ReuseOutputBuffer(int index);
+ void ReuseOutputBuffer(V4L2ReadableBufferRef buffer);
// Queue a |dec_surface| to device for decoding.
void Enqueue(const scoped_refptr<V4L2DecodeSurface>& dec_surface);
@@ -169,11 +166,15 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
void Dequeue();
// V4L2 QBUF helpers.
- bool EnqueueInputRecord(const V4L2DecodeSurface* dec_surface);
- bool EnqueueOutputRecord(int index);
+ bool EnqueueInputRecord(V4L2DecodeSurface* dec_surface);
+ bool EnqueueOutputRecord(V4L2DecodeSurface* dec_surface);
// Set input and output formats in hardware.
bool SetupFormats();
+ // Reset image processor and drop all processing frames.
+ bool ResetImageProcessor();
+
+ bool CreateImageProcessor();
// Create input and output buffers.
bool CreateInputBuffers();
@@ -281,13 +282,6 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
const gfx::Size& size,
uint32_t fourcc);
- // Take the dmabuf |passed_dmabuf_fds|, for |picture_buffer_id|, and use it
- // for OutputRecord at |buffer_index|. The buffer is backed by
- // |passed_dmabuf_fds|, and the OutputRecord takes ownership of them.
- void AssignDmaBufs(size_t buffer_index,
- int32_t picture_buffer_id,
- std::vector<base::ScopedFD> passed_dmabuf_fds);
-
// Performed on decoder_thread_ as a consequence of poll() on decoder_thread_
// returning an event.
void ServiceDeviceTask();
@@ -351,6 +345,15 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
size_t GetNumOfOutputRecordsAtClient() const;
size_t GetNumOfOutputRecordsAtDevice() const;
+ // Image processor notifies an error.
+ void ImageProcessorError();
+
+ bool ProcessFrame(V4L2ReadableBufferRef buffer,
+ scoped_refptr<V4L2DecodeSurface>);
+ void FrameProcessed(scoped_refptr<V4L2DecodeSurface> surface,
+ size_t ip_buffer_index,
+ scoped_refptr<VideoFrame> frame);
+
size_t input_planes_count_;
size_t output_planes_count_;
@@ -383,27 +386,25 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
// Thread used to poll the device for events.
base::Thread device_poll_thread_;
- // Input queue state.
- bool input_streamon_;
- // Number of input buffers enqueued to the device.
- int input_buffer_queued_count_;
- // Input buffers ready to use; LIFO since we don't care about ordering.
- std::list<int> free_input_buffers_;
- // Mapping of int index to an input buffer record.
- std::vector<InputRecord> input_buffer_map_;
+ scoped_refptr<V4L2Queue> input_queue_;
// Set to true by CreateInputBuffers() if the codec driver supports requests
bool supports_requests_ = false;
// Stores the media file descriptor if request API is used
base::ScopedFD media_fd_;
- // Output queue state.
- bool output_streamon_;
- // Number of output buffers enqueued to the device.
- int output_buffer_queued_count_;
- // Output buffers ready to use.
- std::list<int> free_output_buffers_;
+ scoped_refptr<V4L2Queue> output_queue_;
+ // Buffers that have been allocated but are awaiting an ImportBuffer
+ // or AssignDmabufs event.
+ std::map<int32_t, V4L2WritableBufferRef> output_wait_map_;
// Mapping of int index to an output buffer record.
std::vector<OutputRecord> output_buffer_map_;
+ // Maps a decoded buffer index to the output record of the buffer to be
+ // displayed. Both indices are the same in most cases, except when we use
+ // an image processor in ALLOCATE mode in which case the index of the IP
+ // buffer may not match the one of the decoder.
+ std::map<int32_t, int32_t> decoded_buffer_map_;
+ // FIFO queue of requests, only used if supports_requests_ == true.
+ std::queue<base::ScopedFD> requests_;
VideoCodecProfile video_profile_;
uint32_t input_format_fourcc_;
@@ -445,15 +446,23 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
std::unique_ptr<AcceleratedVideoDecoder> decoder_;
// Surfaces queued to device to keep references to them while decoded.
- using V4L2DecodeSurfaceByOutputId =
- std::map<int, scoped_refptr<V4L2DecodeSurface>>;
- V4L2DecodeSurfaceByOutputId surfaces_at_device_;
+ std::queue<scoped_refptr<V4L2DecodeSurface>> surfaces_at_device_;
+
+ // Surfaces currently being processed by IP.
+ std::queue<std::pair<scoped_refptr<V4L2DecodeSurface>, V4L2ReadableBufferRef>>
+ surfaces_at_ip_;
// Surfaces sent to client to keep references to them while displayed.
using V4L2DecodeSurfaceByPictureBufferId =
std::map<int32_t, scoped_refptr<V4L2DecodeSurface>>;
V4L2DecodeSurfaceByPictureBufferId surfaces_at_display_;
+ // Queue of surfaces that have been returned by the client, but which fence
+ // hasn't been signaled yet.
+ std::queue<std::pair<std::unique_ptr<gl::GLFenceEGL>,
+ scoped_refptr<V4L2DecodeSurface>>>
+ surfaces_awaiting_fence_;
+
// Record for decoded pictures that can be sent to PictureReady.
struct PictureRecord {
PictureRecord(bool cleared, const Picture& picture);
@@ -476,6 +485,20 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecodeAccelerator
// Callback to set the correct gl context.
MakeGLContextCurrentCallback make_context_current_cb_;
+ // Image processor device, if one is in use.
+ scoped_refptr<V4L2Device> image_processor_device_;
+ // Image processor. Accessed on |decoder_thread_|.
+ std::unique_ptr<ImageProcessor> image_processor_;
+
+ // The V4L2Device GLImage is created from.
+ scoped_refptr<V4L2Device> gl_image_device_;
+ // The format of GLImage.
+ uint32_t gl_image_format_fourcc_;
+ // The logical dimensions of GLImage buffer in pixels.
+ gfx::Size gl_image_size_;
+ // Number of planes for GLImage.
+ size_t gl_image_planes_count_;
+
// The WeakPtrFactory for |weak_this_|.
base::WeakPtrFactory<V4L2SliceVideoDecodeAccelerator> weak_this_factory_;
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc
index 57048f19601..0b5286e6ee8 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.cc
@@ -17,8 +17,8 @@
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/linux/dmabuf_video_frame_pool.h"
#include "media/gpu/macros.h"
-#include "media/gpu/v4l2/v4l2_h264_accelerator.h"
-#include "media/gpu/v4l2/v4l2_vp8_accelerator.h"
+#include "media/gpu/v4l2/v4l2_h264_accelerator_legacy.h"
+#include "media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h"
#include "media/gpu/v4l2/v4l2_vp9_accelerator.h"
namespace media {
@@ -68,6 +68,18 @@ bool IsValidFrameForQueueDMABuf(const VideoFrame* frame,
} // namespace
+V4L2SliceVideoDecoder::DecodeRequest::DecodeRequest(
+ scoped_refptr<DecoderBuffer> buf,
+ DecodeCB cb,
+ int32_t id)
+ : buffer(std::move(buf)), decode_cb(std::move(cb)), bitstream_id(id) {}
+
+V4L2SliceVideoDecoder::DecodeRequest::DecodeRequest(DecodeRequest&&) = default;
+V4L2SliceVideoDecoder::DecodeRequest& V4L2SliceVideoDecoder::DecodeRequest::
+operator=(DecodeRequest&&) = default;
+
+V4L2SliceVideoDecoder::DecodeRequest::~DecodeRequest() = default;
+
struct V4L2SliceVideoDecoder::OutputRequest {
enum OutputRequestType {
// The surface to be outputted.
@@ -116,9 +128,10 @@ struct V4L2SliceVideoDecoder::OutputRequest {
// static
std::unique_ptr<VideoDecoder> V4L2SliceVideoDecoder::Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<DmabufVideoFramePool> frame_pool) {
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ GetFramePoolCB get_pool_cb) {
DCHECK(client_task_runner->RunsTasksInCurrentSequence());
- DCHECK(frame_pool);
+ DCHECK(get_pool_cb);
scoped_refptr<V4L2Device> device = V4L2Device::Create();
if (!device) {
@@ -127,7 +140,8 @@ std::unique_ptr<VideoDecoder> V4L2SliceVideoDecoder::Create(
}
return base::WrapUnique<VideoDecoder>(new V4L2SliceVideoDecoder(
- std::move(client_task_runner), std::move(device), std::move(frame_pool)));
+ std::move(client_task_runner), std::move(decoder_task_runner),
+ std::move(device), std::move(get_pool_cb)));
}
// static
@@ -144,21 +158,19 @@ SupportedVideoDecoderConfigs V4L2SliceVideoDecoder::GetSupportedConfigs() {
V4L2SliceVideoDecoder::V4L2SliceVideoDecoder(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
scoped_refptr<V4L2Device> device,
- std::unique_ptr<DmabufVideoFramePool> frame_pool)
+ GetFramePoolCB get_pool_cb)
: device_(std::move(device)),
- frame_pool_(std::move(frame_pool)),
+ get_pool_cb_(std::move(get_pool_cb)),
client_task_runner_(std::move(client_task_runner)),
- decoder_task_runner_(base::CreateSequencedTaskRunnerWithTraits(
- {base::WithBaseSyncPrimitives(), base::TaskPriority::USER_VISIBLE})),
+ decoder_task_runner_(std::move(decoder_task_runner)),
device_poll_thread_("V4L2SliceVideoDecoderDevicePollThread"),
weak_this_factory_(this) {
DETACH_FROM_SEQUENCE(client_sequence_checker_);
DETACH_FROM_SEQUENCE(decoder_sequence_checker_);
VLOGF(2);
weak_this_ = weak_this_factory_.GetWeakPtr();
-
- frame_pool_->set_parent_task_runner(decoder_task_runner_);
}
V4L2SliceVideoDecoder::~V4L2SliceVideoDecoder() {
@@ -221,11 +233,18 @@ void V4L2SliceVideoDecoder::DestroyTask() {
// Stop and Destroy device.
StopStreamV4L2Queue();
- input_queue_->DeallocateBuffers();
- output_queue_->DeallocateBuffers();
+ if (input_queue_) {
+ input_queue_->DeallocateBuffers();
+ input_queue_ = nullptr;
+ }
+ if (output_queue_) {
+ output_queue_->DeallocateBuffers();
+ output_queue_ = nullptr;
+ }
DCHECK(surfaces_at_device_.empty());
weak_this_factory_.InvalidateWeakPtrs();
+
delete this;
VLOGF(2) << "Destroyed";
}
@@ -299,11 +318,15 @@ void V4L2SliceVideoDecoder::InitializeTask(const VideoDecoderConfig& config,
SetState(State::kUninitialized);
}
+ // Setup frame pool.
+ frame_pool_ = get_pool_cb_.Run();
+
// Open V4L2 device.
VideoCodecProfile profile = config.profile();
uint32_t input_format_fourcc =
V4L2Device::VideoCodecProfileToV4L2PixFmt(profile, true);
- if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc)) {
+ if (!input_format_fourcc ||
+ !device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc)) {
VLOGF(1) << "Failed to open device for profile: " << profile
<< " fourcc: " << FourccToString(input_format_fourcc);
client_task_runner_->PostTask(FROM_HERE,
@@ -326,10 +349,10 @@ void V4L2SliceVideoDecoder::InitializeTask(const VideoDecoderConfig& config,
// TODO(akahuang): Check the profile is supported.
if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX) {
avd_.reset(new H264Decoder(
- std::make_unique<V4L2H264Accelerator>(this, device_.get())));
+ std::make_unique<V4L2LegacyH264Accelerator>(this, device_.get())));
} else if (profile >= VP8PROFILE_MIN && profile <= VP8PROFILE_MAX) {
avd_.reset(new VP8Decoder(
- std::make_unique<V4L2VP8Accelerator>(this, device_.get())));
+ std::make_unique<V4L2LegacyVP8Accelerator>(this, device_.get())));
} else if (profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX) {
avd_.reset(new VP9Decoder(
std::make_unique<V4L2VP9Accelerator>(this, device_.get())));
@@ -420,8 +443,8 @@ bool V4L2SliceVideoDecoder::SetupInputFormat(uint32_t input_format_fourcc) {
}
base::Optional<struct v4l2_format>
-V4L2SliceVideoDecoder::SetFormatOnOutputQueue(uint32_t format_fourcc,
- const gfx::Size& size) {
+V4L2SliceVideoDecoder::SetV4L2FormatOnOutputQueue(uint32_t format_fourcc,
+ const gfx::Size& size) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
struct v4l2_format format = {};
@@ -451,19 +474,26 @@ base::Optional<VideoFrameLayout> V4L2SliceVideoDecoder::SetupOutputFormat(
if (!device_->CanCreateEGLImageFrom(format_fourcc))
continue;
+ base::Optional<struct v4l2_format> format =
+ SetV4L2FormatOnOutputQueue(format_fourcc, size);
+ if (!format)
+ continue;
+
+ // S_FMT is successful. Next make sure VFPool can allocate video frames with
+ // width and height adjusted by a video driver.
+ gfx::Size adjusted_size(format->fmt.pix_mp.width,
+ format->fmt.pix_mp.height);
+
// Make sure VFPool can allocate video frames with width and height.
auto frame_layout =
- UpdateVideoFramePoolFormat(format_fourcc, size, visible_rect);
- if (!frame_layout) {
- continue;
- }
+ UpdateVideoFramePoolFormat(format_fourcc, adjusted_size, visible_rect);
+ if (frame_layout) {
+ if (frame_layout->coded_size() != adjusted_size) {
+ VLOGF(1) << "The size adjusted by VFPool is different from one "
+ << "adjusted by a video driver";
+ continue;
+ }
- // Next S_FMT with the size adjusted by VFPool.
- gfx::Size adjusted_size(frame_layout->planes()[0].stride,
- frame_layout->coded_size().height());
- base::Optional<struct v4l2_format> format =
- SetFormatOnOutputQueue(format_fourcc, adjusted_size);
- if (!format) {
num_output_planes_ = format->fmt.pix_mp.num_planes;
return frame_layout;
}
@@ -795,8 +825,7 @@ scoped_refptr<V4L2DecodeSurface> V4L2SliceVideoDecoder::CreateSurface() {
}
return scoped_refptr<V4L2DecodeSurface>(new V4L2ConfigStoreDecodeSurface(
- std::move(input_buf), std::move(output_buf), std::move(frame),
- base::DoNothing()));
+ std::move(input_buf), std::move(output_buf), std::move(frame)));
}
void V4L2SliceVideoDecoder::ReuseOutputBuffer(V4L2ReadableBufferRef buffer) {
@@ -840,7 +869,7 @@ void V4L2SliceVideoDecoder::DecodeSurface(
DVLOGF(3);
// Enqueue input_buf and output_buf
- dec_surface->input_buffer().PrepareQueueBuffer(dec_surface);
+ dec_surface->input_buffer().PrepareQueueBuffer(*dec_surface);
if (!std::move(dec_surface->input_buffer()).QueueMMap()) {
SetState(State::kError);
return;
@@ -911,7 +940,6 @@ bool V4L2SliceVideoDecoder::StartStreamV4L2Queue() {
bool V4L2SliceVideoDecoder::StopStreamV4L2Queue() {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
- DCHECK_NE(state_, State::kUninitialized);
DVLOGF(3);
if (!device_poll_thread_.IsRunning())
@@ -931,13 +959,12 @@ bool V4L2SliceVideoDecoder::StopStreamV4L2Queue() {
return false;
}
- // Streamoff input queue.
- if (input_queue_->IsStreaming())
+ // Streamoff input and output queue.
+ if (input_queue_)
input_queue_->Streamoff();
-
- // Streamoff output queue.
- if (output_queue_->IsStreaming())
+ if (output_queue_)
output_queue_->Streamoff();
+
while (!surfaces_at_device_.empty())
surfaces_at_device_.pop();
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h b/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h
index a552e4b163f..8d5d4fd4c27 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decoder.h
@@ -38,12 +38,15 @@ class V4L2DecodeSurface;
class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder : public VideoDecoder,
public V4L2DecodeSurfaceHandler {
public:
+ using GetFramePoolCB = base::RepeatingCallback<DmabufVideoFramePool*()>;
+
// Create V4L2SliceVideoDecoder instance. The success of the creation doesn't
// ensure V4L2SliceVideoDecoder is available on the device. It will be
// determined in Initialize().
static std::unique_ptr<VideoDecoder> Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<DmabufVideoFramePool> frame_pool);
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ GetFramePoolCB get_pool_cb);
static SupportedVideoDecoderConfigs GetSupportedConfigs();
@@ -80,8 +83,9 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder : public VideoDecoder,
V4L2SliceVideoDecoder(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
scoped_refptr<V4L2Device> device,
- std::unique_ptr<DmabufVideoFramePool> frame_pool);
+ GetFramePoolCB get_pool_cb);
~V4L2SliceVideoDecoder() override;
void Destroy() override;
@@ -94,12 +98,13 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder : public VideoDecoder,
// The identifier for the decoder buffer.
int32_t bitstream_id;
- DecodeRequest(scoped_refptr<DecoderBuffer> buf, DecodeCB cb, int32_t id)
- : buffer(std::move(buf)), decode_cb(std::move(cb)), bitstream_id(id) {}
+ DecodeRequest(scoped_refptr<DecoderBuffer> buf, DecodeCB cb, int32_t id);
// Allow move, but not copy
- DecodeRequest(DecodeRequest&&) = default;
- DecodeRequest& operator=(DecodeRequest&&) = default;
+ DecodeRequest(DecodeRequest&&);
+ DecodeRequest& operator=(DecodeRequest&&);
+
+ ~DecodeRequest();
DISALLOW_COPY_AND_ASSIGN(DecodeRequest);
};
@@ -143,7 +148,7 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder : public VideoDecoder,
// Call VIDIOC_S_FMT with |format_fourcc| and |size|. Returns v4l2_format
// returned by VIDIOC_S_FMT on success, otherwise returns base::nullopt.
// This should be called only from SetupOutputFormat().
- base::Optional<struct v4l2_format> SetFormatOnOutputQueue(
+ base::Optional<struct v4l2_format> SetV4L2FormatOnOutputQueue(
uint32_t format_fourcc,
const gfx::Size& size);
// Setup format for output queue. This function sets output format on output
@@ -217,7 +222,8 @@ class MEDIA_GPU_EXPORT V4L2SliceVideoDecoder : public VideoDecoder,
// V4L2 device in use.
scoped_refptr<V4L2Device> device_;
// VideoFrame manager used to allocate and recycle video frame.
- std::unique_ptr<DmabufVideoFramePool> frame_pool_;
+ GetFramePoolCB get_pool_cb_;
+ DmabufVideoFramePool* frame_pool_ = nullptr;
// Video decoder used to parse stream headers by software.
std::unique_ptr<AcceleratedVideoDecoder> avd_;
diff --git a/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc b/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc
index 8ce8798497a..ceab3a8f450 100644
--- a/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc
+++ b/chromium/media/gpu/v4l2/v4l2_stateful_workaround.cc
@@ -76,6 +76,10 @@ SupportResolutionChecker::CreateIfNeeded(V4L2Device::Type device_type,
constexpr uint32_t supported_input_fourccs[] = {
V4L2_PIX_FMT_VP8,
};
+
+ // Recreate the V4L2 device in order to close the opened decoder, since
+ // we are about to query the supported decode profiles.
+ device = V4L2Device::Create();
auto supported_profiles = device->GetSupportedDecodeProfiles(
base::size(supported_input_fourccs), supported_input_fourccs);
SupportedProfileMap supported_profile_map;
diff --git a/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
new file mode 100644
index 00000000000..5d9743f9100
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
@@ -0,0 +1,152 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/v4l2/v4l2_vda_helpers.h"
+
+#include "media/gpu/macros.h"
+#include "media/gpu/v4l2/v4l2_device.h"
+#include "media/gpu/v4l2/v4l2_image_processor.h"
+
+namespace media {
+namespace v4l2_vda_helpers {
+
+namespace {
+base::Optional<VideoFrameLayout> CreateLayout(uint32_t fourcc,
+ const gfx::Size& size) {
+ // V4L2 specific format hack:
+ // If VDA's output format is V4L2_PIX_FMT_MT21C, which is a platform specific
+ // format and now is only used for MT8173 VDA output and its image processor
+ // input, we set VideoFrameLayout for image processor's input with format
+ // PIXEL_FORMAT_NV12 as NV12's layout is the same as MT21.
+ size_t num_planes;
+ switch (fourcc) {
+ case V4L2_PIX_FMT_MT21C:
+ case V4L2_PIX_FMT_MM21:
+ num_planes = 2;
+ return VideoFrameLayout::CreateMultiPlanar(
+ PIXEL_FORMAT_NV12, size,
+ std::vector<VideoFrameLayout::Plane>(num_planes));
+
+ default:
+ VideoPixelFormat pixel_format =
+ V4L2Device::V4L2PixFmtToVideoPixelFormat(fourcc);
+ if (pixel_format == PIXEL_FORMAT_UNKNOWN)
+ return base::nullopt;
+ num_planes = V4L2Device::GetNumPlanesOfV4L2PixFmt(fourcc);
+ if (num_planes == 1)
+ return VideoFrameLayout::Create(pixel_format, size);
+ else
+ return VideoFrameLayout::CreateMultiPlanar(
+ pixel_format, size,
+ std::vector<VideoFrameLayout::Plane>(num_planes));
+ break;
+ }
+}
+} // namespace
+
+uint32_t FindImageProcessorInputFormat(V4L2Device* vda_device) {
+ std::vector<uint32_t> processor_input_formats =
+ V4L2ImageProcessor::GetSupportedInputFormats();
+
+ struct v4l2_fmtdesc fmtdesc = {};
+ fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ while (vda_device->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
+ if (std::find(processor_input_formats.begin(),
+ processor_input_formats.end(),
+ fmtdesc.pixelformat) != processor_input_formats.end()) {
+ DVLOGF(3) << "Image processor input format=" << fmtdesc.description;
+ return fmtdesc.pixelformat;
+ }
+ ++fmtdesc.index;
+ }
+ return 0;
+}
+
+uint32_t FindImageProcessorOutputFormat(V4L2Device* ip_device) {
+ // Prefer YVU420 and NV12 because ArcGpuVideoDecodeAccelerator only supports
+ // single physical plane.
+ static constexpr uint32_t kPreferredFormats[] = {V4L2_PIX_FMT_NV12,
+ V4L2_PIX_FMT_YVU420};
+ auto preferred_formats_first = [](uint32_t a, uint32_t b) -> bool {
+ auto* iter_a = std::find(std::begin(kPreferredFormats),
+ std::end(kPreferredFormats), a);
+ auto* iter_b = std::find(std::begin(kPreferredFormats),
+ std::end(kPreferredFormats), b);
+ return iter_a < iter_b;
+ };
+
+ std::vector<uint32_t> processor_output_formats =
+ V4L2ImageProcessor::GetSupportedOutputFormats();
+
+ // Move the preferred formats to the front.
+ std::sort(processor_output_formats.begin(), processor_output_formats.end(),
+ preferred_formats_first);
+
+ for (uint32_t processor_output_format : processor_output_formats) {
+ if (ip_device->CanCreateEGLImageFrom(processor_output_format)) {
+ DVLOGF(3) << "Image processor output format=" << processor_output_format;
+ return processor_output_format;
+ }
+ }
+
+ return 0;
+}
+
+std::unique_ptr<ImageProcessor> CreateImageProcessor(
+ uint32_t vda_output_format,
+ uint32_t ip_output_format,
+ const gfx::Size& vda_output_coded_size,
+ const gfx::Size& ip_output_coded_size,
+ const gfx::Size& visible_size,
+ size_t nb_buffers,
+ scoped_refptr<V4L2Device> image_processor_device,
+ ImageProcessor::OutputMode image_processor_output_mode,
+ ImageProcessor::ErrorCB error_cb) {
+ base::Optional<VideoFrameLayout> input_layout =
+ CreateLayout(vda_output_format, vda_output_coded_size);
+ if (!input_layout) {
+ VLOGF(1) << "Invalid input layout";
+ return nullptr;
+ }
+
+ base::Optional<VideoFrameLayout> output_layout =
+ CreateLayout(ip_output_format, ip_output_coded_size);
+ if (!output_layout) {
+ VLOGF(1) << "Invalid output layout";
+ return nullptr;
+ }
+
+ // TODO(crbug.com/917798): Use ImageProcessorFactory::Create() once we remove
+ // |image_processor_device_| from V4L2VideoDecodeAccelerator.
+ auto image_processor = V4L2ImageProcessor::Create(
+ image_processor_device,
+ ImageProcessor::PortConfig(*input_layout, vda_output_format, visible_size,
+ {VideoFrame::STORAGE_DMABUFS}),
+ ImageProcessor::PortConfig(*output_layout, visible_size,
+ {VideoFrame::STORAGE_DMABUFS}),
+ image_processor_output_mode, nb_buffers, std::move(error_cb));
+ if (!image_processor)
+ return nullptr;
+
+ if (image_processor->output_layout().coded_size() != ip_output_coded_size) {
+ VLOGF(1) << "Image processor should be able to use the requested output "
+ << "coded size " << ip_output_coded_size.ToString()
+ << " without adjusting to "
+ << image_processor->output_layout().coded_size().ToString();
+ return nullptr;
+ }
+
+ if (image_processor->input_layout().coded_size() != vda_output_coded_size) {
+ VLOGF(1) << "Image processor should be able to take the output coded "
+ << "size of decoder " << vda_output_coded_size.ToString()
+ << " without adjusting to "
+ << image_processor->input_layout().coded_size().ToString();
+ return nullptr;
+ }
+
+ return image_processor;
+}
+
+} // namespace v4l2_vda_helpers
+} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_vda_helpers.h b/chromium/media/gpu/v4l2/v4l2_vda_helpers.h
new file mode 100644
index 00000000000..f63be23732e
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_vda_helpers.h
@@ -0,0 +1,58 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_V4L2_V4L2_VDA_HELPERS_H_
+#define MEDIA_GPU_V4L2_V4L2_VDA_HELPERS_H_
+
+#include <memory>
+
+#include "base/memory/scoped_refptr.h"
+#include "media/gpu/image_processor.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+class V4L2Device;
+
+// Helper static methods to be shared between V4L2VideoDecodeAccelerator and
+// V4L2SliceVideoDecodeAccelerator. This avoids some code duplication between
+// these very similar classes.
+// Note: this namespace can be removed once the V4L2VDA is deprecated.
+namespace v4l2_vda_helpers {
+
+// Returns a usable input format of image processor. Return 0 if not found.
+uint32_t FindImageProcessorInputFormat(V4L2Device* vda_device);
+// Return a usable output format of image processor. Return 0 if not found.
+uint32_t FindImageProcessorOutputFormat(V4L2Device* ip_device);
+
+// Create and return an image processor for the given parameters, or nullptr
+// if it cannot be created.
+//
+// |vda_output_format| is the output format of the VDA, i.e. the IP's input
+// format.
+// |ip_output_format| is the output format that the IP must produce.
+// |vda_output_coded_size| is the coded size of the VDA output buffers (i.e.
+// the input coded size for the IP).
+// |ip_output_coded_size| is the coded size of the output buffers that the IP
+// must produce.
+// |visible_size| is the visible size of both the input and output buffers.
+// |nb_buffers| is the exact number of output buffers that the IP must create.
+// |image_processor_output_mode| specifies whether the IP must allocate its
+// own buffers or rely on imported ones.
+// |error_cb| is the error callback passed to V4L2ImageProcessor::Create().
+std::unique_ptr<ImageProcessor> CreateImageProcessor(
+ uint32_t vda_output_format,
+ uint32_t ip_output_format,
+ const gfx::Size& vda_output_coded_size,
+ const gfx::Size& ip_output_coded_size,
+ const gfx::Size& visible_size,
+ size_t nb_buffers,
+ scoped_refptr<V4L2Device> image_processor_device,
+ ImageProcessor::OutputMode image_processor_output_mode,
+ ImageProcessor::ErrorCB error_cb);
+
+} // namespace v4l2_vda_helpers
+} // namespace media
+
+#endif // MEDIA_GPU_V4L2_V4L2_VDA_HELPERS_H_
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
index 65e967639d4..e5c7e942803 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.cc
@@ -35,6 +35,7 @@
#include "media/gpu/macros.h"
#include "media/gpu/v4l2/v4l2_image_processor.h"
#include "media/gpu/v4l2/v4l2_stateful_workaround.h"
+#include "media/gpu/v4l2/v4l2_vda_helpers.h"
#include "media/video/h264_parser.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gl/gl_context.h"
@@ -290,7 +291,8 @@ bool V4L2VideoDecodeAccelerator::CheckConfig(const Config& config) {
input_format_fourcc_ =
V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_, false);
- if (!device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
+ if (!input_format_fourcc_ ||
+ !device_->Open(V4L2Device::Type::kDecoder, input_format_fourcc_)) {
VLOGF(1) << "Failed to open device for profile: " << config.profile
<< " fourcc: " << FourccToString(input_format_fourcc_);
return false;
@@ -2328,12 +2330,14 @@ bool V4L2VideoDecodeAccelerator::SetupFormats() {
VLOGF(1) << "Image processor not available";
return false;
}
- output_format_fourcc_ = FindImageProcessorInputFormat();
+ output_format_fourcc_ =
+ v4l2_vda_helpers::FindImageProcessorInputFormat(device_.get());
if (output_format_fourcc_ == 0) {
VLOGF(1) << "Can't find a usable input format from image processor";
return false;
}
- egl_image_format_fourcc_ = FindImageProcessorOutputFormat();
+ egl_image_format_fourcc_ =
+ v4l2_vda_helpers::FindImageProcessorOutputFormat(device_.get());
if (egl_image_format_fourcc_ == 0) {
VLOGF(1) << "Can't find a usable output format from image processor";
return false;
@@ -2361,56 +2365,6 @@ bool V4L2VideoDecodeAccelerator::SetupFormats() {
return true;
}
-uint32_t V4L2VideoDecodeAccelerator::FindImageProcessorInputFormat() {
- std::vector<uint32_t> processor_input_formats =
- V4L2ImageProcessor::GetSupportedInputFormats();
-
- struct v4l2_fmtdesc fmtdesc;
- memset(&fmtdesc, 0, sizeof(fmtdesc));
- fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- while (device_->Ioctl(VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
- if (std::find(processor_input_formats.begin(),
- processor_input_formats.end(),
- fmtdesc.pixelformat) != processor_input_formats.end()) {
- DVLOGF(3) << "Image processor input format=" << fmtdesc.description;
- return fmtdesc.pixelformat;
- }
- ++fmtdesc.index;
- }
- return 0;
-}
-
-uint32_t V4L2VideoDecodeAccelerator::FindImageProcessorOutputFormat() {
- // Prefer YVU420 and NV12 because ArcGpuVideoDecodeAccelerator only supports
- // single physical plane. Prefer YVU420 over NV12 because chrome rendering
- // supports YV12 only.
- static const uint32_t kPreferredFormats[] = {V4L2_PIX_FMT_YVU420,
- V4L2_PIX_FMT_NV12};
- auto preferred_formats_first = [](uint32_t a, uint32_t b) -> bool {
- auto* iter_a = std::find(std::begin(kPreferredFormats),
- std::end(kPreferredFormats), a);
- auto* iter_b = std::find(std::begin(kPreferredFormats),
- std::end(kPreferredFormats), b);
- return iter_a < iter_b;
- };
-
- std::vector<uint32_t> processor_output_formats =
- V4L2ImageProcessor::GetSupportedOutputFormats();
-
- // Move the preferred formats to the front.
- std::sort(processor_output_formats.begin(), processor_output_formats.end(),
- preferred_formats_first);
-
- for (uint32_t processor_output_format : processor_output_formats) {
- if (device_->CanCreateEGLImageFrom(processor_output_format)) {
- DVLOGF(3) << "Image processor output format=" << processor_output_format;
- return processor_output_format;
- }
- }
-
- return 0;
-}
-
bool V4L2VideoDecodeAccelerator::ResetImageProcessor() {
VLOGF(2);
DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread());
@@ -2432,68 +2386,23 @@ bool V4L2VideoDecodeAccelerator::CreateImageProcessor() {
(output_mode_ == Config::OutputMode::ALLOCATE
? ImageProcessor::OutputMode::ALLOCATE
: ImageProcessor::OutputMode::IMPORT);
- size_t num_planes =
- V4L2Device::GetNumPlanesOfV4L2PixFmt(output_format_fourcc_);
- base::Optional<VideoFrameLayout> input_layout;
- if (num_planes == 1) {
- input_layout = VideoFrameLayout::Create(
- V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
- coded_size_);
- } else {
- input_layout = VideoFrameLayout::CreateMultiPlanar(
- V4L2Device::V4L2PixFmtToVideoPixelFormat(output_format_fourcc_),
- coded_size_, std::vector<VideoFrameLayout::Plane>(num_planes));
- }
- if (!input_layout) {
- VLOGF(1) << "Invalid input layout";
- return false;
- }
-
- base::Optional<VideoFrameLayout> output_layout;
- num_planes = V4L2Device::GetNumPlanesOfV4L2PixFmt(egl_image_format_fourcc_);
- if (num_planes == 1) {
- output_layout = VideoFrameLayout::Create(
- V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
- egl_image_size_);
- } else {
- output_layout = VideoFrameLayout::CreateMultiPlanar(
- V4L2Device::V4L2PixFmtToVideoPixelFormat(egl_image_format_fourcc_),
- egl_image_size_, std::vector<VideoFrameLayout::Plane>(num_planes));
- }
- if (!output_layout) {
- VLOGF(1) << "Invalid output layout";
- return false;
- }
- // Unretained(this) is safe for ErrorCB because |decoder_thread_| is owned by
- // this V4L2VideoDecodeAccelerator and |this| must be valid when ErrorCB is
- // executed.
- // TODO(crbug.com/917798): Use ImageProcessorFactory::Create() once we remove
- // |image_processor_device_| from V4L2VideoDecodeAccelerator.
- image_processor_ = V4L2ImageProcessor::Create(
- image_processor_device_,
- ImageProcessor::PortConfig(*input_layout, visible_size_,
- {VideoFrame::STORAGE_DMABUFS}),
- ImageProcessor::PortConfig(*output_layout, visible_size_,
- {VideoFrame::STORAGE_DMABUFS}),
- image_processor_output_mode, output_buffer_map_.size(),
+ image_processor_ = v4l2_vda_helpers::CreateImageProcessor(
+ output_format_fourcc_, egl_image_format_fourcc_, coded_size_,
+ egl_image_size_, visible_size_, output_buffer_map_.size(),
+ image_processor_device_, image_processor_output_mode,
+ // Unretained(this) is safe for ErrorCB because |decoder_thread_| is owned
+ // by this V4L2VideoDecodeAccelerator and |this| must be valid when
+ // ErrorCB is executed.
base::BindRepeating(&V4L2VideoDecodeAccelerator::ImageProcessorError,
base::Unretained(this)));
if (!image_processor_) {
- VLOGF(1) << "Initialize image processor failed";
- NOTIFY_ERROR(PLATFORM_FAILURE);
- return false;
- }
- DCHECK(image_processor_->output_layout().coded_size() == egl_image_size_);
- if (image_processor_->input_layout().coded_size() != coded_size_) {
- VLOGF(1) << "Image processor should be able to take the output coded "
- << "size of decoder " << coded_size_.ToString()
- << " without adjusting to "
- << image_processor_->input_layout().coded_size().ToString();
+ VLOGF(1) << "Error creating image processor";
NOTIFY_ERROR(PLATFORM_FAILURE);
return false;
}
+
return true;
}
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
index e42749a1fa1..f99d0d69507 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decode_accelerator.h
@@ -391,10 +391,6 @@ class MEDIA_GPU_EXPORT V4L2VideoDecodeAccelerator
// Set input and output formats before starting decode.
bool SetupFormats();
- // Return a usable input format of image processor. Return 0 if not found.
- uint32_t FindImageProcessorInputFormat();
- // Return a usable output format of image processor. Return 0 if not found.
- uint32_t FindImageProcessorOutputFormat();
// Reset image processor and drop all processing frames.
bool ResetImageProcessor();
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
index efe7fc71acb..87998bc892a 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
@@ -16,7 +16,9 @@
#include <utility>
#include "base/bind.h"
+#include "base/bits.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/numerics/safe_conversions.h"
#include "base/single_thread_task_runner.h"
@@ -32,6 +34,7 @@
#include "media/gpu/gpu_video_encode_accelerator_helpers.h"
#include "media/gpu/image_processor_factory.h"
#include "media/gpu/macros.h"
+#include "media/video/h264_level_limits.h"
#include "media/video/h264_parser.h"
#define NOTIFY_ERROR(x) \
@@ -97,17 +100,16 @@ struct V4L2VideoEncodeAccelerator::BitstreamBufferRef {
const std::unique_ptr<UnalignedSharedMemory> shm;
};
-V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) {}
+V4L2VideoEncodeAccelerator::InputRecord::InputRecord() = default;
V4L2VideoEncodeAccelerator::InputRecord::InputRecord(const InputRecord&) =
default;
-V4L2VideoEncodeAccelerator::InputRecord::~InputRecord() {}
+V4L2VideoEncodeAccelerator::InputRecord::~InputRecord() = default;
-V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord()
- : at_device(false), address(nullptr), length(0) {}
+V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord() = default;
-V4L2VideoEncodeAccelerator::OutputRecord::~OutputRecord() {}
+V4L2VideoEncodeAccelerator::OutputRecord::~OutputRecord() = default;
V4L2VideoEncodeAccelerator::InputFrameInfo::InputFrameInfo()
: InputFrameInfo(nullptr, false) {}
@@ -137,11 +139,7 @@ V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator(
output_format_fourcc_(0),
encoder_state_(kUninitialized),
device_(device),
- input_streamon_(false),
- input_buffer_queued_count_(0),
input_memory_type_(V4L2_MEMORY_USERPTR),
- output_streamon_(false),
- output_buffer_queued_count_(0),
is_flush_supported_(false),
encoder_thread_("V4L2EncoderThread"),
device_poll_thread_("V4L2EncoderDevicePollThread"),
@@ -217,10 +215,22 @@ void V4L2VideoEncodeAccelerator::InitializeTask(const Config& config,
bool* result,
base::WaitableEvent* done) {
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
+
+ // Signal the event when leaving the method.
+ base::ScopedClosureRunner signal_event(
+ base::BindOnce(&base::WaitableEvent::Signal, base::Unretained(done)));
*result = false;
+
+ input_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
+ output_queue_ = device_->GetQueue(V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
+ if (!input_queue_ || !output_queue_) {
+ VLOGF(1) << "Failed to get V4L2Queue.";
+ NOTIFY_ERROR(kPlatformFailureError);
+ return;
+ }
+
if (!SetFormats(config.input_format, config.output_profile)) {
VLOGF(1) << "Failed setting up formats";
- done->Signal();
return;
}
@@ -236,30 +246,22 @@ void V4L2VideoEncodeAccelerator::InitializeTask(const Config& config,
VideoFrame::NumPlanes(config.input_format)));
if (!input_layout) {
VLOGF(1) << "Invalid image processor input layout";
- done->Signal();
return;
}
if (!CreateImageProcessor(*input_layout, *device_input_layout_,
visible_size_)) {
VLOGF(1) << "Failed to create image processor";
- done->Signal();
return;
}
}
- if (!InitInputMemoryType(config)) {
- done->Signal();
+ if (!InitInputMemoryType(config))
return;
- }
- if (!InitControls(config)) {
- done->Signal();
+ if (!InitControls(config))
return;
- }
- if (!CreateOutputBuffers()) {
- done->Signal();
+ if (!CreateOutputBuffers())
return;
- }
encoder_state_ = kInitialized;
RequestEncodingParametersChangeTask(
@@ -275,7 +277,6 @@ void V4L2VideoEncodeAccelerator::InitializeTask(const Config& config,
// Finish initialization.
*result = true;
- done->Signal();
}
bool V4L2VideoEncodeAccelerator::CreateImageProcessor(
@@ -613,6 +614,18 @@ void V4L2VideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
return;
}
+ if (frame &&
+ !ReconfigureFormatIfNeeded(frame->format(), frame->coded_size())) {
+ NOTIFY_ERROR(kInvalidArgumentError);
+ encoder_state_ = kError;
+ return;
+ }
+
+ // If a video frame to be encoded is fed, then call VIDIOC_REQBUFS if it has
+ // not been called yet.
+ if (frame && input_buffer_map_.empty() && !CreateInputBuffers())
+ return;
+
if (image_processor_) {
image_processor_input_queue_.emplace(std::move(frame), force_keyframe);
InputImageProcessorTask();
@@ -622,6 +635,80 @@ void V4L2VideoEncodeAccelerator::EncodeTask(scoped_refptr<VideoFrame> frame,
}
}
+bool V4L2VideoEncodeAccelerator::ReconfigureFormatIfNeeded(
+ VideoPixelFormat format,
+ const gfx::Size& new_frame_size) {
+ // We should apply the frame size change to ImageProcessor if there is.
+ if (image_processor_) {
+ // Stride is the same. There is no need of executing S_FMT again.
+ if (image_processor_->input_layout().coded_size() == new_frame_size) {
+ return true;
+ }
+
+ VLOGF(2) << "Call S_FMT with a new size=" << new_frame_size.ToString()
+ << ", the previous size ="
+ << device_input_layout_->coded_size().ToString();
+ if (!input_buffer_map_.empty()) {
+ VLOGF(1) << "Input frame size is changed during encoding";
+ NOTIFY_ERROR(kInvalidArgumentError);
+ return false;
+ }
+
+ // TODO(hiroh): Decide the appropriate planar in some way.
+ auto input_layout = VideoFrameLayout::CreateMultiPlanar(
+ format, new_frame_size,
+ std::vector<VideoFrameLayout::Plane>(VideoFrame::NumPlanes(format)));
+ if (!input_layout) {
+ VLOGF(1) << "Invalid image processor input layout";
+ return false;
+ }
+
+ if (!CreateImageProcessor(*input_layout, *device_input_layout_,
+ visible_size_)) {
+ NOTIFY_ERROR(kPlatformFailureError);
+ return false;
+ }
+ if (image_processor_->input_layout().coded_size().width() !=
+ new_frame_size.width()) {
+ NOTIFY_ERROR(kPlatformFailureError);
+ return false;
+ }
+
+ return true;
+ }
+
+ // Here we should compare |device_input_layout_->coded_size()|. However, VEA
+ // requests a client |input_allocated_size_|, which might be a larger size
+ // than |device_input_layout_->coded_size()|. The size is larger if there is
+ // an extra data in planes, that happens on MediaTek.
+ // This comparison will work because VEAClient within Chrome gives the buffer
+ // whose frame size as |input_allocated_size_|. VEAClient for ARC++ might give
+ // a different frame size but |input_allocated_size_| is always the same as
+ // |device_input_layout_->coded_size()|.
+ if (new_frame_size != input_allocated_size_) {
+ VLOGF(2) << "Call S_FMT with a new size=" << new_frame_size.ToString()
+ << ", the previous size ="
+ << device_input_layout_->coded_size().ToString()
+ << " (the size requested to client="
+ << input_allocated_size_.ToString();
+ if (!input_buffer_map_.empty()) {
+ VLOGF(1) << "Input frame size is changed during encoding";
+ NOTIFY_ERROR(kInvalidArgumentError);
+ return false;
+ }
+ if (!NegotiateInputFormat(device_input_layout_->format(), new_frame_size)) {
+ NOTIFY_ERROR(kPlatformFailureError);
+ return false;
+ }
+ if (device_input_layout_->coded_size().width() != new_frame_size.width()) {
+ NOTIFY_ERROR(kPlatformFailureError);
+ return false;
+ }
+ }
+
+ return true;
+}
+
void V4L2VideoEncodeAccelerator::InputImageProcessorTask() {
if (free_image_processor_output_buffer_indices_.empty())
return;
@@ -683,10 +770,6 @@ void V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask(
Enqueue();
if (encoder_state_ == kInitialized) {
- // Finish setting up our OUTPUT queue. See: Initialize().
- // VIDIOC_REQBUFS on OUTPUT queue.
- if (!CreateInputBuffers())
- return;
if (!StartDevicePoll())
return;
encoder_state_ = kEncoding;
@@ -708,6 +791,8 @@ void V4L2VideoEncodeAccelerator::DestroyTask() {
encoder_thread_.task_runner()->BelongsToCurrentThread()) {
DestroyInputBuffers();
DestroyOutputBuffers();
+ input_queue_ = nullptr;
+ output_queue_ = nullptr;
image_processor_ = nullptr;
}
}
@@ -731,8 +816,9 @@ void V4L2VideoEncodeAccelerator::ServiceDeviceTask() {
return;
// Device can be polled as soon as either input or output buffers are queued.
- bool poll_device =
- (input_buffer_queued_count_ + output_buffer_queued_count_ > 0);
+ bool poll_device = (input_queue_->QueuedBuffersCount() +
+ output_queue_->QueuedBuffersCount() >
+ 0);
// ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
// so either:
@@ -747,12 +833,12 @@ void V4L2VideoEncodeAccelerator::ServiceDeviceTask() {
base::Unretained(this), poll_device));
DVLOGF(3) << encoder_input_queue_.size() << "] => DEVICE["
- << free_input_buffers_.size() << "+"
- << input_buffer_queued_count_ << "/"
+ << input_queue_->FreeBuffersCount() << "+"
+ << input_queue_->QueuedBuffersCount() << "/"
<< input_buffer_map_.size() << "->"
- << free_output_buffers_.size() << "+"
- << output_buffer_queued_count_ << "/"
- << output_buffer_map_.size() << "] => OUT["
+ << output_queue_->FreeBuffersCount() << "+"
+ << output_queue_->QueuedBuffersCount() << "/"
+ << output_queue_->AllocatedBuffersCount() << "] => OUT["
<< encoder_output_queue_.size() << "]";
}
@@ -760,12 +846,14 @@ void V4L2VideoEncodeAccelerator::Enqueue() {
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
TRACE_EVENT0("media,gpu", "V4L2VEA::Enqueue");
- DVLOGF(4) << "free_input_buffers: " << free_input_buffers_.size()
+ DVLOGF(4) << "free_input_buffers: " << input_queue_->FreeBuffersCount()
<< "input_queue: " << encoder_input_queue_.size();
+ bool do_streamon = false;
// Enqueue all the inputs we can.
- const int old_inputs_queued = input_buffer_queued_count_;
- while (!encoder_input_queue_.empty() && !free_input_buffers_.empty()) {
+ const size_t old_inputs_queued = input_queue_->QueuedBuffersCount();
+ while (!encoder_input_queue_.empty() &&
+ input_queue_->FreeBuffersCount() > 0) {
// A null frame indicates a flush.
if (encoder_input_queue_.front().frame == nullptr) {
DVLOGF(3) << "All input frames needed to be flushed are enqueued.";
@@ -775,7 +863,7 @@ void V4L2VideoEncodeAccelerator::Enqueue() {
// to call V4L2_ENC_CMD_STOP to request a flush. This also means there is
// nothing left to process, so we can return flush success back to the
// client.
- if (!input_streamon_) {
+ if (!input_queue_->IsStreaming()) {
child_task_runner_->PostTask(
FROM_HERE, base::BindOnce(std::move(flush_callback_), true));
return;
@@ -795,36 +883,48 @@ void V4L2VideoEncodeAccelerator::Enqueue() {
if (!EnqueueInputRecord())
return;
}
- if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
+ if (old_inputs_queued == 0 && input_queue_->QueuedBuffersCount() != 0) {
// We just started up a previously empty queue.
// Queue state changed; signal interrupt.
if (!device_->SetDevicePollInterrupt())
return;
- // Start VIDIOC_STREAMON if we haven't yet.
- if (!input_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
- input_streamon_ = true;
- }
+ // Shall call VIDIOC_STREAMON if we haven't yet.
+ do_streamon = !input_queue_->IsStreaming();
+ }
+
+ if (!input_queue_->IsStreaming() && !do_streamon) {
+ // We don't have to enqueue any buffers in the output queue until we enqueue
+ // buffers in the input queue. This enables to call S_FMT in Encode() on
+ // the first frame.
+ return;
}
// Enqueue all the outputs we can.
- const int old_outputs_queued = output_buffer_queued_count_;
- while (!free_output_buffers_.empty() && !encoder_output_queue_.empty()) {
+ const size_t old_outputs_queued = output_queue_->QueuedBuffersCount();
+ while (output_queue_->FreeBuffersCount() > 0 &&
+ !encoder_output_queue_.empty()) {
if (!EnqueueOutputRecord())
return;
}
- if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
+ if (old_outputs_queued == 0 && output_queue_->QueuedBuffersCount() != 0) {
// We just started up a previously empty queue.
// Queue state changed; signal interrupt.
if (!device_->SetDevicePollInterrupt())
return;
- // Start VIDIOC_STREAMON if we haven't yet.
- if (!output_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
- output_streamon_ = true;
- }
+ }
+
+ // STREAMON in CAPTURE queue first and then OUTPUT queue.
+ // This is a workaround of a tegra driver bug that STREAMON in CAPTURE queue
+ // will never return (i.e. blocks |encoder_thread_| forever) if the STREAMON
+ // in CAPTURE queue is called after STREAMON in OUTPUT queue.
+ // Once nyan_kitty, which uses tegra driver, reaches EOL, crrev.com/c/1753982
+ // should be reverted.
+ if (do_streamon) {
+ DCHECK(!output_queue_->IsStreaming() && !input_queue_->IsStreaming());
+ // When VIDIOC_STREAMON can be executed in OUTPUT queue, it is fine to call
+ // STREAMON in CAPTURE queue.
+ output_queue_->Streamon();
+ input_queue_->Streamon();
}
}
@@ -835,85 +935,67 @@ void V4L2VideoEncodeAccelerator::Dequeue() {
// Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
// list.
- struct v4l2_buffer dqbuf;
- struct v4l2_plane planes[VIDEO_MAX_PLANES];
- while (input_buffer_queued_count_ > 0) {
- DVLOGF(4) << "inputs queued: " << input_buffer_queued_count_;
- DCHECK(input_streamon_);
- memset(&dqbuf, 0, sizeof(dqbuf));
- memset(&planes, 0, sizeof(planes));
- dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- dqbuf.memory = input_memory_type_;
- dqbuf.m.planes = planes;
- dqbuf.length = V4L2Device::GetNumPlanesOfV4L2PixFmt(
- V4L2Device::VideoFrameLayoutToV4L2PixFmt(*device_input_layout_));
- if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
- if (errno == EAGAIN) {
- // EAGAIN if we're just out of buffers to dequeue.
- break;
- }
- VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
+ while (input_queue_->QueuedBuffersCount() > 0) {
+ DVLOGF(4) << "inputs queued: " << input_queue_->QueuedBuffersCount();
+ DCHECK(input_queue_->IsStreaming());
+
+ auto ret = input_queue_->DequeueBuffer();
+ if (!ret.first) {
NOTIFY_ERROR(kPlatformFailureError);
return;
}
- InputRecord& input_record = input_buffer_map_[dqbuf.index];
- DCHECK(input_record.at_device);
- input_record.at_device = false;
+ if (!ret.second) {
+ // We're just out of buffers to dequeue.
+ break;
+ }
- input_record.frame = NULL;
+ InputRecord& input_record = input_buffer_map_[ret.second->BufferId()];
+ input_record.frame = nullptr;
if (input_record.ip_output_buffer_index)
ReuseImageProcessorOutputBuffer(*input_record.ip_output_buffer_index);
- free_input_buffers_.push_back(dqbuf.index);
- input_buffer_queued_count_--;
}
// Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the
// free list. Notify the client that an output buffer is complete.
- while (output_buffer_queued_count_ > 0) {
- DCHECK(output_streamon_);
- memset(&dqbuf, 0, sizeof(dqbuf));
- memset(planes, 0, sizeof(planes));
- dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- dqbuf.memory = V4L2_MEMORY_MMAP;
- dqbuf.m.planes = planes;
- dqbuf.length = 1;
- if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
- if (errno == EAGAIN) {
- // EAGAIN if we're just out of buffers to dequeue.
- break;
- }
- VPLOGF(1) << "ioctl() failed: VIDIOC_DQBUF";
+ while (output_queue_->QueuedBuffersCount() > 0) {
+ DCHECK(output_queue_->IsStreaming());
+
+ auto ret = output_queue_->DequeueBuffer();
+ if (!ret.first) {
NOTIFY_ERROR(kPlatformFailureError);
return;
}
- const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0);
- OutputRecord& output_record = output_buffer_map_[dqbuf.index];
- DCHECK(output_record.at_device);
- DCHECK(output_record.buffer_ref);
+ if (!ret.second) {
+ // We're just out of buffers to dequeue.
+ break;
+ }
+ V4L2ReadableBufferRef output_buf = std::move(ret.second);
+ OutputRecord& output_record = output_buffer_map_[output_buf->BufferId()];
+ DCHECK(output_record.buffer_ref);
int32_t bitstream_buffer_id = output_record.buffer_ref->id;
size_t output_data_size = CopyIntoOutputBuffer(
- static_cast<uint8_t*>(output_record.address) +
- dqbuf.m.planes[0].data_offset,
- base::checked_cast<size_t>(dqbuf.m.planes[0].bytesused -
- dqbuf.m.planes[0].data_offset),
+ static_cast<const uint8_t*>(output_buf->GetPlaneMapping(0)) +
+ output_buf->GetPlaneDataOffset(0),
+ base::checked_cast<size_t>(output_buf->GetPlaneBytesUsed(0) -
+ output_buf->GetPlaneDataOffset(0)),
std::move(output_record.buffer_ref));
- DVLOGF(4) << "returning "
- << "bitstream_buffer_id=" << bitstream_buffer_id
- << ", size=" << output_data_size << ", key_frame=" << key_frame;
-
+ DVLOGF(4) << "returning bitstream_buffer_id=" << bitstream_buffer_id
+ << ", size=" << output_data_size
+ << ", key_frame=" << output_buf->IsKeyframe();
child_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&Client::BitstreamBufferReady, client_,
bitstream_buffer_id,
BitstreamBufferMetadata(
- output_data_size, key_frame,
+ output_data_size, output_buf->IsKeyframe(),
base::TimeDelta::FromMicroseconds(
- dqbuf.timestamp.tv_usec +
- dqbuf.timestamp.tv_sec *
+ output_buf->GetTimeStamp().tv_usec +
+ output_buf->GetTimeStamp().tv_sec *
base::Time::kMicrosecondsPerSecond))));
- if ((encoder_state_ == kFlushing) && (dqbuf.flags & V4L2_BUF_FLAG_LAST)) {
+
+ if ((encoder_state_ == kFlushing) && output_buf->IsLast()) {
// Notify client that flush has finished successfully. The flush callback
// should be called after notifying the last buffer is ready.
DVLOGF(3) << "Flush completed. Start the encoder again.";
@@ -925,17 +1007,13 @@ void V4L2VideoEncodeAccelerator::Dequeue() {
cmd.cmd = V4L2_ENC_CMD_START;
IOCTL_OR_ERROR_RETURN(VIDIOC_ENCODER_CMD, &cmd);
}
-
- output_record.at_device = false;
- free_output_buffers_.push_back(dqbuf.index);
- output_buffer_queued_count_--;
}
}
bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
DVLOGF(4);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!free_input_buffers_.empty());
+ DCHECK_GT(input_queue_->FreeBuffersCount(), 0u);
DCHECK(!encoder_input_queue_.empty());
TRACE_EVENT0("media,gpu", "V4L2VEA::EnqueueInputRecord");
@@ -954,106 +1032,106 @@ bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
}
scoped_refptr<VideoFrame> frame = frame_info.frame;
- const int index = free_input_buffers_.back();
- InputRecord& input_record = input_buffer_map_[index];
- DCHECK(!input_record.at_device);
- struct v4l2_buffer qbuf{};
- struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES] = {};
- qbuf.index = index;
- qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- qbuf.m.planes = qbuf_planes;
- qbuf.timestamp.tv_sec = static_cast<time_t>(frame->timestamp().InSeconds());
- qbuf.timestamp.tv_usec =
+
+ V4L2WritableBufferRef input_buf = input_queue_->GetFreeBuffer();
+ DCHECK(input_buf.IsValid());
+ size_t buffer_id = input_buf.BufferId();
+
+ struct timeval timestamp;
+ timestamp.tv_sec = static_cast<time_t>(frame->timestamp().InSeconds());
+ timestamp.tv_usec =
frame->timestamp().InMicroseconds() -
frame->timestamp().InSeconds() * base::Time::kMicrosecondsPerSecond;
+ input_buf.SetTimeStamp(timestamp);
+
DCHECK_EQ(device_input_layout_->format(), frame->format());
size_t num_planes = V4L2Device::GetNumPlanesOfV4L2PixFmt(
V4L2Device::VideoFrameLayoutToV4L2PixFmt(*device_input_layout_));
for (size_t i = 0; i < num_planes; ++i) {
- // Single-buffer input format may have multiple color planes, so bytesused
+ // Single-buffer input format may have multiple color planes, so bytesused
// of the single buffer should be sum of each color planes' size.
+ size_t bytesused = 0;
if (num_planes == 1) {
- qbuf.m.planes[i].bytesused = VideoFrame::AllocationSize(
+ bytesused = VideoFrame::AllocationSize(
frame->format(), device_input_layout_->coded_size());
} else {
- qbuf.m.planes[i].bytesused = base::checked_cast<__u32>(
+ bytesused = base::checked_cast<size_t>(
VideoFrame::PlaneSize(frame->format(), i,
device_input_layout_->coded_size())
.GetArea());
}
- switch (input_memory_type_) {
+ switch (input_buf.Memory()) {
case V4L2_MEMORY_USERPTR:
// Use buffer_size VideoEncodeAccelerator HW requested by S_FMT.
- qbuf.m.planes[i].length = device_input_layout_->planes()[i].size;
- qbuf.m.planes[i].m.userptr =
- reinterpret_cast<unsigned long>(frame->data(i));
- DCHECK(qbuf.m.planes[i].m.userptr);
+ input_buf.SetPlaneSize(i, device_input_layout_->planes()[i].size);
break;
case V4L2_MEMORY_DMABUF: {
- const auto& fds = frame->DmabufFds();
const auto& planes = frame->layout().planes();
- qbuf.m.planes[i].m.fd =
- (i < fds.size()) ? fds[i].get() : fds.back().get();
// TODO(crbug.com/901264): The way to pass an offset within a DMA-buf is
// not defined in V4L2 specification, so we abuse data_offset for now.
// Fix it when we have the right interface, including any necessary
// validation and potential alignment
- qbuf.m.planes[i].data_offset = planes[i].offset;
- qbuf.m.planes[i].bytesused += qbuf.m.planes[i].data_offset;
+ input_buf.SetPlaneDataOffset(i, planes[i].offset);
+ bytesused += planes[i].offset;
// Workaround: filling length should not be needed. This is a bug of
// videobuf2 library.
- qbuf.m.planes[i].length = device_input_layout_->planes()[i].size +
- qbuf.m.planes[i].data_offset;
- DCHECK_NE(qbuf.m.planes[i].m.fd, -1);
+ input_buf.SetPlaneSize(
+ i, device_input_layout_->planes()[i].size + planes[i].offset);
break;
}
default:
NOTREACHED();
return false;
}
+
+ input_buf.SetPlaneBytesUsed(i, bytesused);
}
- qbuf.memory = input_memory_type_;
- qbuf.length = num_planes;
+ switch (input_buf.Memory()) {
+ case V4L2_MEMORY_USERPTR: {
+ std::vector<void*> user_ptrs;
+ for (size_t i = 0; i < num_planes; ++i)
+ user_ptrs.push_back(frame->data(i));
+ std::move(input_buf).QueueUserPtr(std::move(user_ptrs));
+ break;
+ }
+ case V4L2_MEMORY_DMABUF: {
+ std::move(input_buf).QueueDMABuf(frame->DmabufFds());
+ break;
+ }
+ default:
+ NOTREACHED() << "Unknown input memory type: "
+ << static_cast<int>(input_buf.Memory());
+ return false;
+ }
- DVLOGF(4) << "Calling VIDIOC_QBUF: " << V4L2Device::V4L2BufferToString(qbuf);
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
- input_record.at_device = true;
+ InputRecord& input_record = input_buffer_map_[buffer_id];
input_record.frame = frame;
input_record.ip_output_buffer_index = frame_info.ip_output_buffer_index;
encoder_input_queue_.pop();
- free_input_buffers_.pop_back();
- input_buffer_queued_count_++;
return true;
}
bool V4L2VideoEncodeAccelerator::EnqueueOutputRecord() {
DVLOGF(4);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!free_output_buffers_.empty());
+ DCHECK_GT(output_queue_->FreeBuffersCount(), 0u);
DCHECK(!encoder_output_queue_.empty());
TRACE_EVENT0("media,gpu", "V4L2VEA::EnqueueOutputRecord");
// Enqueue an output (VIDEO_CAPTURE) buffer.
- const int index = free_output_buffers_.back();
- OutputRecord& output_record = output_buffer_map_[index];
- DCHECK(!output_record.at_device);
+ V4L2WritableBufferRef output_buf = output_queue_->GetFreeBuffer();
+ DCHECK(output_buf.IsValid());
+ OutputRecord& output_record = output_buffer_map_[output_buf.BufferId()];
DCHECK(!output_record.buffer_ref);
- struct v4l2_buffer qbuf{};
- struct v4l2_plane qbuf_planes[1] = {};
- qbuf.index = index;
- qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- qbuf.memory = V4L2_MEMORY_MMAP;
- qbuf.m.planes = qbuf_planes;
- qbuf.length = 1;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
- output_record.at_device = true;
+ if (!std::move(output_buf).QueueMMap()) {
+ VLOGF(1) << "Failed to QueueMMap.";
+ return false;
+ }
output_record.buffer_ref = std::move(encoder_output_queue_.back());
encoder_output_queue_.pop_back();
- free_output_buffers_.pop_back();
- output_buffer_queued_count_++;
return true;
}
@@ -1088,38 +1166,25 @@ bool V4L2VideoEncodeAccelerator::StopDevicePoll() {
if (!device_->ClearDevicePollInterrupt())
return false;
- if (input_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
- }
- input_streamon_ = false;
+ // Tegra driver cannot call Streamoff() when the stream is off, so we check
+ // IsStreaming() first.
+ if (input_queue_ && input_queue_->IsStreaming() && !input_queue_->Streamoff())
+ return false;
- if (output_streamon_) {
- __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
- }
- output_streamon_ = false;
+ if (output_queue_ && output_queue_->IsStreaming() &&
+ !output_queue_->Streamoff())
+ return false;
// Reset all our accounting info.
while (!encoder_input_queue_.empty())
encoder_input_queue_.pop();
- free_input_buffers_.clear();
for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
InputRecord& input_record = input_buffer_map_[i];
- input_record.at_device = false;
- input_record.frame = NULL;
- free_input_buffers_.push_back(i);
+ input_record.frame = nullptr;
}
- input_buffer_queued_count_ = 0;
- free_output_buffers_.clear();
- for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
- OutputRecord& output_record = output_buffer_map_[i];
- output_record.at_device = false;
+ for (auto& output_record : output_buffer_map_)
output_record.buffer_ref.reset();
- free_output_buffers_.push_back(i);
- }
- output_buffer_queued_count_ = 0;
encoder_output_queue_.clear();
@@ -1214,8 +1279,8 @@ void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
bool V4L2VideoEncodeAccelerator::SetOutputFormat(
VideoCodecProfile output_profile) {
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!input_streamon_);
- DCHECK(!output_streamon_);
+ DCHECK(!input_queue_->IsStreaming());
+ DCHECK(!output_queue_->IsStreaming());
DCHECK(!visible_size_.IsEmpty());
output_buffer_byte_size_ = GetEncodeBitstreamBufferSize(visible_size_);
@@ -1240,15 +1305,19 @@ bool V4L2VideoEncodeAccelerator::SetOutputFormat(
}
bool V4L2VideoEncodeAccelerator::NegotiateInputFormat(
- VideoPixelFormat input_format) {
+ VideoPixelFormat input_format,
+ const gfx::Size& size) {
VLOGF(2);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!input_streamon_);
- DCHECK(!output_streamon_);
+ DCHECK(!input_queue_->IsStreaming());
+ DCHECK(!output_queue_->IsStreaming());
// First see if the device can use the provided format directly.
- std::vector<uint32_t> pix_fmt_candidates = {
- V4L2Device::VideoPixelFormatToV4L2PixFmt(input_format, false)};
+ std::vector<uint32_t> pix_fmt_candidates;
+ uint32_t pix_fmt =
+ V4L2Device::VideoPixelFormatToV4L2PixFmt(input_format, false);
+ if (pix_fmt)
+ pix_fmt_candidates.push_back(pix_fmt);
// Second try preferred input formats for both single-planar and
// multi-planar.
for (auto preferred_format :
@@ -1257,30 +1326,29 @@ bool V4L2VideoEncodeAccelerator::NegotiateInputFormat(
}
for (const auto pix_fmt : pix_fmt_candidates) {
- auto trying_format = V4L2Device::V4L2PixFmtToVideoPixelFormat(pix_fmt);
- DCHECK_NE(trying_format, PIXEL_FORMAT_UNKNOWN);
- size_t planes_count = VideoFrame::NumPlanes(trying_format);
+ size_t planes_count = V4L2Device::GetNumPlanesOfV4L2PixFmt(pix_fmt);
+ DCHECK_GT(planes_count, 0u);
DCHECK_LE(planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
- VLOGF(2) << "Trying S_FMT with " << FourccToString(pix_fmt) << " ("
- << trying_format << ").";
+ DVLOGF(3) << "Trying S_FMT with " << FourccToString(pix_fmt);
+
struct v4l2_format format{};
format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- format.fmt.pix_mp.width = visible_size_.width();
- format.fmt.pix_mp.height = visible_size_.height();
+ format.fmt.pix_mp.width = size.width();
+ format.fmt.pix_mp.height = size.height();
format.fmt.pix_mp.pixelformat = pix_fmt;
format.fmt.pix_mp.num_planes = planes_count;
if (device_->Ioctl(VIDIOC_S_FMT, &format) == 0 &&
format.fmt.pix_mp.pixelformat == pix_fmt) {
- VLOGF(2) << "Success: S_FMT with " << FourccToString(pix_fmt);
+ DVLOGF(3) << "Success: S_FMT with " << FourccToString(pix_fmt);
device_input_layout_ = V4L2Device::V4L2FormatToVideoFrameLayout(format);
if (!device_input_layout_) {
VLOGF(1) << "Invalid device_input_layout_";
return false;
}
- VLOG(2) << "Negotiated device_input_layout_: " << *device_input_layout_;
+ DVLOG(3) << "Negotiated device_input_layout_: " << *device_input_layout_;
if (!gfx::Rect(device_input_layout_->coded_size())
- .Contains(gfx::Rect(visible_size_))) {
- VLOGF(1) << "Input size " << visible_size_.ToString()
+ .Contains(gfx::Rect(size))) {
+ VLOGF(1) << "Input size " << size.ToString()
<< " exceeds encoder capability. Size encoder can handle: "
<< device_input_layout_->coded_size().ToString();
return false;
@@ -1299,13 +1367,13 @@ bool V4L2VideoEncodeAccelerator::SetFormats(VideoPixelFormat input_format,
VideoCodecProfile output_profile) {
VLOGF(2);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!input_streamon_);
- DCHECK(!output_streamon_);
+ DCHECK(!input_queue_->IsStreaming());
+ DCHECK(!output_queue_->IsStreaming());
if (!SetOutputFormat(output_profile))
return false;
- if (!NegotiateInputFormat(input_format))
+ if (!NegotiateInputFormat(input_format, visible_size_))
return false;
struct v4l2_rect visible_rect;
@@ -1424,13 +1492,42 @@ bool V4L2VideoEncodeAccelerator::InitControls(const Config& config) {
ctrls.push_back(ctrl);
// Set H.264 output level from config. Use Level 4.0 as fallback default.
- int32_t level_value = V4L2Device::H264LevelIdcToV4L2H264Level(
- config.h264_output_level.value_or(
- VideoEncodeAccelerator::kDefaultH264Level));
- if (level_value < 0) {
- NOTIFY_ERROR(kInvalidArgumentError);
- return false;
+ uint8_t h264_level =
+ config.h264_output_level.value_or(H264SPS::kLevelIDC4p0);
+ constexpr size_t kH264MacroblockSizeInPixels = 16;
+ const uint32_t framerate = config.initial_framerate.value_or(
+ VideoEncodeAccelerator::kDefaultFramerate);
+ const uint32_t mb_width =
+ base::bits::Align(config.input_visible_size.width(),
+ kH264MacroblockSizeInPixels) /
+ kH264MacroblockSizeInPixels;
+ const uint32_t mb_height =
+ base::bits::Align(config.input_visible_size.height(),
+ kH264MacroblockSizeInPixels) /
+ kH264MacroblockSizeInPixels;
+ const uint32_t framesize_in_mbs = mb_width * mb_height;
+
+ // Check whether the h264 level is valid.
+ if (!CheckH264LevelLimits(config.output_profile, h264_level,
+ config.initial_bitrate, framerate,
+ framesize_in_mbs)) {
+ base::Optional<uint8_t> valid_level =
+ FindValidH264Level(config.output_profile, config.initial_bitrate,
+ framerate, framesize_in_mbs);
+ if (!valid_level) {
+ VLOGF(1) << "Could not find a valid h264 level for"
+ << " profile=" << config.output_profile
+ << " bitrate=" << config.initial_bitrate
+ << " framerate=" << framerate
+ << " size=" << config.input_visible_size.ToString();
+ NOTIFY_ERROR(kInvalidArgumentError);
+ return false;
+ }
+
+ h264_level = *valid_level;
}
+
+ int32_t level_value = V4L2Device::H264LevelIdcToV4L2H264Level(h264_level);
memset(&ctrl, 0, sizeof(ctrl));
ctrl.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
ctrl.value = level_value;
@@ -1483,103 +1580,57 @@ bool V4L2VideoEncodeAccelerator::InitControls(const Config& config) {
bool V4L2VideoEncodeAccelerator::CreateInputBuffers() {
VLOGF(2);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!input_streamon_);
+ DCHECK(!input_queue_->IsStreaming());
- struct v4l2_requestbuffers reqbufs{};
- // Driver will modify to the appropriate number of buffers.
- reqbufs.count = kInputBufferCount;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- reqbufs.memory = input_memory_type_;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
+ if (input_queue_->AllocateBuffers(kInputBufferCount, input_memory_type_) <
+ kInputBufferCount) {
+ VLOGF(1) << "Failed to allocate V4L2 input buffers.";
+ return false;
+ }
DCHECK(input_buffer_map_.empty());
- input_buffer_map_.resize(reqbufs.count);
- for (size_t i = 0; i < input_buffer_map_.size(); ++i)
- free_input_buffers_.push_back(i);
-
+ input_buffer_map_.resize(input_queue_->AllocatedBuffersCount());
return true;
}
bool V4L2VideoEncodeAccelerator::CreateOutputBuffers() {
VLOGF(2);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!output_streamon_);
+ DCHECK(!output_queue_->IsStreaming());
- struct v4l2_requestbuffers reqbufs{};
- reqbufs.count = kOutputBufferCount;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- reqbufs.memory = V4L2_MEMORY_MMAP;
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
-
- DCHECK(output_buffer_map_.empty());
- output_buffer_map_ = std::vector<OutputRecord>(reqbufs.count);
- for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
- struct v4l2_plane planes[1] = {};
- struct v4l2_buffer buffer{};
- buffer.index = i;
- buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- buffer.memory = V4L2_MEMORY_MMAP;
- buffer.m.planes = planes;
- buffer.length = base::size(planes);
- IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
- void* address = device_->Mmap(NULL,
- buffer.m.planes[0].length,
- PROT_READ | PROT_WRITE,
- MAP_SHARED,
- buffer.m.planes[0].m.mem_offset);
- if (address == MAP_FAILED) {
- VPLOGF(1) << "mmap() failed";
- return false;
- }
- output_buffer_map_[i].address = address;
- output_buffer_map_[i].length = buffer.m.planes[0].length;
- free_output_buffers_.push_back(i);
+ if (output_queue_->AllocateBuffers(kOutputBufferCount, V4L2_MEMORY_MMAP) <
+ kOutputBufferCount) {
+ VLOGF(1) << "Failed to allocate V4L2 output buffers.";
+ return false;
}
+ DCHECK(output_buffer_map_.empty());
+ output_buffer_map_ =
+ std::vector<OutputRecord>(output_queue_->AllocatedBuffersCount());
return true;
}
void V4L2VideoEncodeAccelerator::DestroyInputBuffers() {
VLOGF(2);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!input_streamon_);
-
- free_input_buffers_.clear();
- if (input_buffer_map_.empty())
+ if (!input_queue_ || input_queue_->AllocatedBuffersCount() == 0)
return;
- struct v4l2_requestbuffers reqbufs{};
- reqbufs.count = 0;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- reqbufs.memory = input_memory_type_;
- IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
-
+ DCHECK(!input_queue_->IsStreaming());
+ input_queue_->DeallocateBuffers();
input_buffer_map_.clear();
}
void V4L2VideoEncodeAccelerator::DestroyOutputBuffers() {
VLOGF(2);
DCHECK(encoder_thread_.task_runner()->BelongsToCurrentThread());
- DCHECK(!output_streamon_);
-
- free_output_buffers_.clear();
- if (output_buffer_map_.empty())
+ if (!output_queue_ || output_queue_->AllocatedBuffersCount() == 0)
return;
- for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
- if (output_buffer_map_[i].address != NULL)
- device_->Munmap(output_buffer_map_[i].address,
- output_buffer_map_[i].length);
- }
-
- struct v4l2_requestbuffers reqbufs{};
- reqbufs.count = 0;
- reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- reqbufs.memory = V4L2_MEMORY_MMAP;
- IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
-
+ DCHECK(!output_queue_->IsStreaming());
+ output_queue_->DeallocateBuffers();
output_buffer_map_.clear();
}
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
index f52ba96890b..54928920b35 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.h
@@ -68,7 +68,6 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
InputRecord();
InputRecord(const InputRecord&);
~InputRecord();
- bool at_device;
scoped_refptr<VideoFrame> frame;
// This is valid only if image processor is used. The buffer associated with
@@ -80,10 +79,8 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
struct OutputRecord {
OutputRecord();
~OutputRecord();
- bool at_device;
+
std::unique_ptr<BitstreamBufferRef> buffer_ref;
- void* address;
- size_t length;
};
// Store all the information of input frame passed to Encode().
@@ -210,10 +207,16 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
bool SetFormats(VideoPixelFormat input_format,
VideoCodecProfile output_profile);
+ // Reconfigure format of input buffers and image processor if frame size
+ // given by client is different from one set in input buffers.
+ bool ReconfigureFormatIfNeeded(VideoPixelFormat format,
+ const gfx::Size& new_frame_size);
+
// Try to set up the device to the input format we were Initialized() with,
// or if the device doesn't support it, use one it can support, so that we
// can later instantiate an ImageProcessor to convert to it.
- bool NegotiateInputFormat(VideoPixelFormat input_format);
+ bool NegotiateInputFormat(VideoPixelFormat input_format,
+ const gfx::Size& frame_size);
// Set up the device to the output format requested in Initialize().
bool SetOutputFormat(VideoCodecProfile output_profile);
@@ -299,22 +302,13 @@ class MEDIA_GPU_EXPORT V4L2VideoEncodeAccelerator
// Encoder device.
scoped_refptr<V4L2Device> device_;
- // Input queue state.
- bool input_streamon_;
- // Input buffers enqueued to device.
- int input_buffer_queued_count_;
- // Input buffers ready to use; LIFO since we don't care about ordering.
- std::vector<int> free_input_buffers_;
// Mapping of int index to input buffer record.
std::vector<InputRecord> input_buffer_map_;
v4l2_memory input_memory_type_;
- // Output queue state.
- bool output_streamon_;
- // Output buffers enqueued to device.
- int output_buffer_queued_count_;
- // Output buffers ready to use; LIFO since we don't care about ordering.
- std::vector<int> free_output_buffers_;
+ scoped_refptr<V4L2Queue> input_queue_;
+ scoped_refptr<V4L2Queue> output_queue_;
+
// Mapping of int index to output buffer record.
std::vector<OutputRecord> output_buffer_map_;
diff --git a/chromium/media/gpu/v4l2/v4l2_vp8_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_vp8_accelerator.cc
index 4cf4b3ee67d..8935337608e 100644
--- a/chromium/media/gpu/v4l2/v4l2_vp8_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_vp8_accelerator.cc
@@ -4,9 +4,11 @@
#include "media/gpu/v4l2/v4l2_vp8_accelerator.h"
-#include <type_traits>
-
+#define __LINUX_MEDIA_VP8_CTRLS_LEGACY_H
#include <linux/videodev2.h>
+#include <linux/media/vp8-ctrls.h>
+
+#include <type_traits>
#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
@@ -21,18 +23,24 @@
namespace media {
namespace {
-void FillV4L2SegmentationHeader(const Vp8SegmentationHeader& vp8_sgmnt_hdr,
- struct v4l2_vp8_sgmnt_hdr* v4l2_sgmnt_hdr) {
+void FillV4L2SegmentationHeader(
+ const Vp8SegmentationHeader& vp8_sgmnt_hdr,
+ struct v4l2_vp8_segment_header* v4l2_sgmnt_hdr) {
#define SET_V4L2_SGMNT_HDR_FLAG_IF(cond, flag) \
v4l2_sgmnt_hdr->flags |= ((vp8_sgmnt_hdr.cond) ? (flag) : 0)
SET_V4L2_SGMNT_HDR_FLAG_IF(segmentation_enabled,
- V4L2_VP8_SEGMNT_HDR_FLAG_ENABLED);
+ V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED);
SET_V4L2_SGMNT_HDR_FLAG_IF(update_mb_segmentation_map,
- V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_MAP);
+ V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_MAP);
SET_V4L2_SGMNT_HDR_FLAG_IF(update_segment_feature_data,
- V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_FEATURE_DATA);
-#undef SET_V4L2_SPARM_FLAG_IF
- v4l2_sgmnt_hdr->segment_feature_mode = vp8_sgmnt_hdr.segment_feature_mode;
+ V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_FEATURE_DATA);
+ // TODO not sure about this one?
+ SET_V4L2_SGMNT_HDR_FLAG_IF(
+ segment_feature_mode == Vp8SegmentationHeader::FEATURE_MODE_DELTA,
+ V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE);
+ SET_V4L2_SGMNT_HDR_FLAG_IF(segment_feature_mode,
+ V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_FEATURE_DATA);
+#undef SET_V4L2_SGMNT_HDR_FLAG_IF
SafeArrayMemcpy(v4l2_sgmnt_hdr->quant_update,
vp8_sgmnt_hdr.quantizer_update_value);
@@ -40,30 +48,31 @@ void FillV4L2SegmentationHeader(const Vp8SegmentationHeader& vp8_sgmnt_hdr,
SafeArrayMemcpy(v4l2_sgmnt_hdr->segment_probs, vp8_sgmnt_hdr.segment_prob);
}
-void FillV4L2LoopfilterHeader(const Vp8LoopFilterHeader& vp8_loopfilter_hdr,
- struct v4l2_vp8_loopfilter_hdr* v4l2_lf_hdr) {
+void FillV4L2LoopFilterHeader(const Vp8LoopFilterHeader& vp8_loopfilter_hdr,
+ struct v4l2_vp8_loopfilter_header* v4l2_lf_hdr) {
#define SET_V4L2_LF_HDR_FLAG_IF(cond, flag) \
v4l2_lf_hdr->flags |= ((vp8_loopfilter_hdr.cond) ? (flag) : 0)
- SET_V4L2_LF_HDR_FLAG_IF(loop_filter_adj_enable, V4L2_VP8_LF_HDR_ADJ_ENABLE);
+ SET_V4L2_LF_HDR_FLAG_IF(loop_filter_adj_enable,
+ V4L2_VP8_LF_HEADER_ADJ_ENABLE);
SET_V4L2_LF_HDR_FLAG_IF(mode_ref_lf_delta_update,
- V4L2_VP8_LF_HDR_DELTA_UPDATE);
-#undef SET_V4L2_SGMNT_HDR_FLAG_IF
+ V4L2_VP8_LF_HEADER_DELTA_UPDATE);
+ SET_V4L2_LF_HDR_FLAG_IF(type == Vp8LoopFilterHeader::LOOP_FILTER_TYPE_SIMPLE,
+ V4L2_VP8_LF_FILTER_TYPE_SIMPLE);
+#undef SET_V4L2_LF_HDR_FLAG_IF
#define LF_HDR_TO_V4L2_LF_HDR(a) v4l2_lf_hdr->a = vp8_loopfilter_hdr.a;
- LF_HDR_TO_V4L2_LF_HDR(type);
LF_HDR_TO_V4L2_LF_HDR(level);
LF_HDR_TO_V4L2_LF_HDR(sharpness_level);
#undef LF_HDR_TO_V4L2_LF_HDR
- SafeArrayMemcpy(v4l2_lf_hdr->ref_frm_delta_magnitude,
+ SafeArrayMemcpy(v4l2_lf_hdr->ref_frm_delta,
vp8_loopfilter_hdr.ref_frame_delta);
- SafeArrayMemcpy(v4l2_lf_hdr->mb_mode_delta_magnitude,
- vp8_loopfilter_hdr.mb_mode_delta);
+ SafeArrayMemcpy(v4l2_lf_hdr->mb_mode_delta, vp8_loopfilter_hdr.mb_mode_delta);
}
void FillV4L2QuantizationHeader(
const Vp8QuantizationHeader& vp8_quant_hdr,
- struct v4l2_vp8_quantization_hdr* v4l2_quant_hdr) {
+ struct v4l2_vp8_quantization_header* v4l2_quant_hdr) {
v4l2_quant_hdr->y_ac_qi = vp8_quant_hdr.y_ac_qi;
v4l2_quant_hdr->y_dc_delta = vp8_quant_hdr.y_dc_delta;
v4l2_quant_hdr->y2_dc_delta = vp8_quant_hdr.y2_dc_delta;
@@ -72,8 +81,9 @@ void FillV4L2QuantizationHeader(
v4l2_quant_hdr->uv_ac_delta = vp8_quant_hdr.uv_ac_delta;
}
-void FillV4L2Vp8EntropyHeader(const Vp8EntropyHeader& vp8_entropy_hdr,
- struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) {
+void FillV4L2Vp8EntropyHeader(
+ const Vp8EntropyHeader& vp8_entropy_hdr,
+ struct v4l2_vp8_entropy_header* v4l2_entropy_hdr) {
SafeArrayMemcpy(v4l2_entropy_hdr->coeff_probs, vp8_entropy_hdr.coeff_probs);
SafeArrayMemcpy(v4l2_entropy_hdr->y_mode_probs, vp8_entropy_hdr.y_mode_probs);
SafeArrayMemcpy(v4l2_entropy_hdr->uv_mode_probs,
@@ -120,52 +130,55 @@ scoped_refptr<VP8Picture> V4L2VP8Accelerator::CreateVP8Picture() {
bool V4L2VP8Accelerator::SubmitDecode(
scoped_refptr<VP8Picture> pic,
const Vp8ReferenceFrameVector& reference_frames) {
- struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr;
+ struct v4l2_ctrl_vp8_frame_header v4l2_frame_hdr;
memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
const auto& frame_hdr = pic->frame_hdr;
- v4l2_frame_hdr.key_frame = frame_hdr->frame_type;
#define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
FHDR_TO_V4L2_FHDR(version);
FHDR_TO_V4L2_FHDR(width);
FHDR_TO_V4L2_FHDR(horizontal_scale);
FHDR_TO_V4L2_FHDR(height);
FHDR_TO_V4L2_FHDR(vertical_scale);
- FHDR_TO_V4L2_FHDR(sign_bias_golden);
- FHDR_TO_V4L2_FHDR(sign_bias_alternate);
FHDR_TO_V4L2_FHDR(prob_skip_false);
FHDR_TO_V4L2_FHDR(prob_intra);
FHDR_TO_V4L2_FHDR(prob_last);
FHDR_TO_V4L2_FHDR(prob_gf);
- FHDR_TO_V4L2_FHDR(bool_dec_range);
- FHDR_TO_V4L2_FHDR(bool_dec_value);
- FHDR_TO_V4L2_FHDR(bool_dec_count);
#undef FHDR_TO_V4L2_FHDR
+ v4l2_frame_hdr.coder_state.range = frame_hdr->bool_dec_range;
+ v4l2_frame_hdr.coder_state.value = frame_hdr->bool_dec_value;
+ v4l2_frame_hdr.coder_state.bit_count = frame_hdr->bool_dec_count;
#define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
+ SET_V4L2_FRM_HDR_FLAG_IF(frame_type == Vp8FrameHeader::KEYFRAME,
+ V4L2_VP8_FRAME_HEADER_FLAG_KEY_FRAME);
+ SET_V4L2_FRM_HDR_FLAG_IF(sign_bias_golden,
+ V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_GOLDEN);
+ SET_V4L2_FRM_HDR_FLAG_IF(sign_bias_alternate,
+ V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_ALT);
SET_V4L2_FRM_HDR_FLAG_IF(is_experimental,
- V4L2_VP8_FRAME_HDR_FLAG_EXPERIMENTAL);
- SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP8_FRAME_HDR_FLAG_SHOW_FRAME);
+ V4L2_VP8_FRAME_HEADER_FLAG_EXPERIMENTAL);
+ SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP8_FRAME_HEADER_FLAG_SHOW_FRAME);
SET_V4L2_FRM_HDR_FLAG_IF(mb_no_skip_coeff,
- V4L2_VP8_FRAME_HDR_FLAG_MB_NO_SKIP_COEFF);
+ V4L2_VP8_FRAME_HEADER_FLAG_MB_NO_SKIP_COEFF);
#undef SET_V4L2_FRM_HDR_FLAG_IF
FillV4L2SegmentationHeader(frame_hdr->segmentation_hdr,
- &v4l2_frame_hdr.sgmnt_hdr);
+ &v4l2_frame_hdr.segment_header);
- FillV4L2LoopfilterHeader(frame_hdr->loopfilter_hdr, &v4l2_frame_hdr.lf_hdr);
+ FillV4L2LoopFilterHeader(frame_hdr->loopfilter_hdr,
+ &v4l2_frame_hdr.lf_header);
FillV4L2QuantizationHeader(frame_hdr->quantization_hdr,
- &v4l2_frame_hdr.quant_hdr);
+ &v4l2_frame_hdr.quant_header);
- FillV4L2Vp8EntropyHeader(frame_hdr->entropy_hdr, &v4l2_frame_hdr.entropy_hdr);
+ FillV4L2Vp8EntropyHeader(frame_hdr->entropy_hdr,
+ &v4l2_frame_hdr.entropy_header);
v4l2_frame_hdr.first_part_size =
base::checked_cast<__u32>(frame_hdr->first_part_size);
- v4l2_frame_hdr.first_part_offset =
- base::checked_cast<__u32>(frame_hdr->first_part_offset);
- v4l2_frame_hdr.macroblock_bit_offset =
+ v4l2_frame_hdr.first_part_header_bits =
base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset);
v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions;
@@ -185,10 +198,8 @@ bool V4L2VP8Accelerator::SubmitDecode(
if (last_frame) {
scoped_refptr<V4L2DecodeSurface> last_frame_surface =
VP8PictureToV4L2DecodeSurface(last_frame);
- v4l2_frame_hdr.last_frame = last_frame_surface->GetReferenceID();
+ v4l2_frame_hdr.last_frame_ts = last_frame_surface->GetReferenceID();
ref_surfaces.push_back(last_frame_surface);
- } else {
- v4l2_frame_hdr.last_frame = VIDEO_MAX_FRAME;
}
const auto golden_frame =
@@ -196,10 +207,8 @@ bool V4L2VP8Accelerator::SubmitDecode(
if (golden_frame) {
scoped_refptr<V4L2DecodeSurface> golden_frame_surface =
VP8PictureToV4L2DecodeSurface(golden_frame);
- v4l2_frame_hdr.golden_frame = golden_frame_surface->GetReferenceID();
+ v4l2_frame_hdr.golden_frame_ts = golden_frame_surface->GetReferenceID();
ref_surfaces.push_back(golden_frame_surface);
- } else {
- v4l2_frame_hdr.golden_frame = VIDEO_MAX_FRAME;
}
const auto alt_frame =
@@ -207,15 +216,13 @@ bool V4L2VP8Accelerator::SubmitDecode(
if (alt_frame) {
scoped_refptr<V4L2DecodeSurface> alt_frame_surface =
VP8PictureToV4L2DecodeSurface(alt_frame);
- v4l2_frame_hdr.alt_frame = alt_frame_surface->GetReferenceID();
+ v4l2_frame_hdr.alt_frame_ts = alt_frame_surface->GetReferenceID();
ref_surfaces.push_back(alt_frame_surface);
- } else {
- v4l2_frame_hdr.alt_frame = VIDEO_MAX_FRAME;
}
struct v4l2_ext_control ctrl;
memset(&ctrl, 0, sizeof(ctrl));
- ctrl.id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HDR;
+ ctrl.id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER;
ctrl.size = sizeof(v4l2_frame_hdr);
ctrl.ptr = &v4l2_frame_hdr;
diff --git a/chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.cc b/chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.cc
new file mode 100644
index 00000000000..0fec58cd948
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.cc
@@ -0,0 +1,261 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h"
+
+#include <type_traits>
+
+#include <linux/videodev2.h>
+#include <linux/media/vp8-ctrls-legacy.h>
+
+#include "base/logging.h"
+#include "base/numerics/safe_conversions.h"
+#include "base/stl_util.h"
+#include "media/gpu/macros.h"
+#include "media/gpu/v4l2/v4l2_decode_surface.h"
+#include "media/gpu/v4l2/v4l2_decode_surface_handler.h"
+#include "media/gpu/v4l2/v4l2_device.h"
+#include "media/gpu/vp8_picture.h"
+#include "media/parsers/vp8_parser.h"
+
+namespace media {
+namespace {
+
+void FillV4L2SegmentationHeader(const Vp8SegmentationHeader& vp8_sgmnt_hdr,
+ struct v4l2_vp8_sgmnt_hdr* v4l2_sgmnt_hdr) {
+#define SET_V4L2_SGMNT_HDR_FLAG_IF(cond, flag) \
+ v4l2_sgmnt_hdr->flags |= ((vp8_sgmnt_hdr.cond) ? (flag) : 0)
+ SET_V4L2_SGMNT_HDR_FLAG_IF(segmentation_enabled,
+ V4L2_VP8_SEGMNT_HDR_FLAG_ENABLED);
+ SET_V4L2_SGMNT_HDR_FLAG_IF(update_mb_segmentation_map,
+ V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_MAP);
+ SET_V4L2_SGMNT_HDR_FLAG_IF(update_segment_feature_data,
+ V4L2_VP8_SEGMNT_HDR_FLAG_UPDATE_FEATURE_DATA);
+#undef SET_V4L2_SGMNT_HDR_FLAG_IF
+ v4l2_sgmnt_hdr->segment_feature_mode = vp8_sgmnt_hdr.segment_feature_mode;
+
+ SafeArrayMemcpy(v4l2_sgmnt_hdr->quant_update,
+ vp8_sgmnt_hdr.quantizer_update_value);
+ SafeArrayMemcpy(v4l2_sgmnt_hdr->lf_update, vp8_sgmnt_hdr.lf_update_value);
+ SafeArrayMemcpy(v4l2_sgmnt_hdr->segment_probs, vp8_sgmnt_hdr.segment_prob);
+}
+
+void FillV4L2LoopfilterHeader(const Vp8LoopFilterHeader& vp8_loopfilter_hdr,
+ struct v4l2_vp8_loopfilter_hdr* v4l2_lf_hdr) {
+#define SET_V4L2_LF_HDR_FLAG_IF(cond, flag) \
+ v4l2_lf_hdr->flags |= ((vp8_loopfilter_hdr.cond) ? (flag) : 0)
+ SET_V4L2_LF_HDR_FLAG_IF(loop_filter_adj_enable, V4L2_VP8_LF_HDR_ADJ_ENABLE);
+ SET_V4L2_LF_HDR_FLAG_IF(mode_ref_lf_delta_update,
+ V4L2_VP8_LF_HDR_DELTA_UPDATE);
+#undef SET_V4L2_LF_HDR_FLAG_IF
+
+#define LF_HDR_TO_V4L2_LF_HDR(a) v4l2_lf_hdr->a = vp8_loopfilter_hdr.a;
+ LF_HDR_TO_V4L2_LF_HDR(type);
+ LF_HDR_TO_V4L2_LF_HDR(level);
+ LF_HDR_TO_V4L2_LF_HDR(sharpness_level);
+#undef LF_HDR_TO_V4L2_LF_HDR
+
+ SafeArrayMemcpy(v4l2_lf_hdr->ref_frm_delta_magnitude,
+ vp8_loopfilter_hdr.ref_frame_delta);
+ SafeArrayMemcpy(v4l2_lf_hdr->mb_mode_delta_magnitude,
+ vp8_loopfilter_hdr.mb_mode_delta);
+}
+
+void FillV4L2QuantizationHeader(
+ const Vp8QuantizationHeader& vp8_quant_hdr,
+ struct v4l2_vp8_quantization_hdr* v4l2_quant_hdr) {
+ v4l2_quant_hdr->y_ac_qi = vp8_quant_hdr.y_ac_qi;
+ v4l2_quant_hdr->y_dc_delta = vp8_quant_hdr.y_dc_delta;
+ v4l2_quant_hdr->y2_dc_delta = vp8_quant_hdr.y2_dc_delta;
+ v4l2_quant_hdr->y2_ac_delta = vp8_quant_hdr.y2_ac_delta;
+ v4l2_quant_hdr->uv_dc_delta = vp8_quant_hdr.uv_dc_delta;
+ v4l2_quant_hdr->uv_ac_delta = vp8_quant_hdr.uv_ac_delta;
+}
+
+void FillV4L2Vp8EntropyHeader(const Vp8EntropyHeader& vp8_entropy_hdr,
+ struct v4l2_vp8_entropy_hdr* v4l2_entropy_hdr) {
+ SafeArrayMemcpy(v4l2_entropy_hdr->coeff_probs, vp8_entropy_hdr.coeff_probs);
+ SafeArrayMemcpy(v4l2_entropy_hdr->y_mode_probs, vp8_entropy_hdr.y_mode_probs);
+ SafeArrayMemcpy(v4l2_entropy_hdr->uv_mode_probs,
+ vp8_entropy_hdr.uv_mode_probs);
+ SafeArrayMemcpy(v4l2_entropy_hdr->mv_probs, vp8_entropy_hdr.mv_probs);
+}
+
+} // namespace
+
+class V4L2VP8Picture : public VP8Picture {
+ public:
+ explicit V4L2VP8Picture(const scoped_refptr<V4L2DecodeSurface>& dec_surface)
+ : dec_surface_(dec_surface) {}
+
+ V4L2VP8Picture* AsV4L2VP8Picture() override { return this; }
+ scoped_refptr<V4L2DecodeSurface> dec_surface() { return dec_surface_; }
+
+ private:
+ ~V4L2VP8Picture() override {}
+
+ scoped_refptr<V4L2DecodeSurface> dec_surface_;
+
+ DISALLOW_COPY_AND_ASSIGN(V4L2VP8Picture);
+};
+
+V4L2LegacyVP8Accelerator::V4L2LegacyVP8Accelerator(
+ V4L2DecodeSurfaceHandler* surface_handler,
+ V4L2Device* device)
+ : surface_handler_(surface_handler), device_(device) {
+ DCHECK(surface_handler_);
+}
+
+V4L2LegacyVP8Accelerator::~V4L2LegacyVP8Accelerator() {}
+
+scoped_refptr<VP8Picture> V4L2LegacyVP8Accelerator::CreateVP8Picture() {
+ scoped_refptr<V4L2DecodeSurface> dec_surface =
+ surface_handler_->CreateSurface();
+ if (!dec_surface)
+ return nullptr;
+
+ return new V4L2VP8Picture(dec_surface);
+}
+
+bool V4L2LegacyVP8Accelerator::SubmitDecode(
+ scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& reference_frames) {
+ struct v4l2_ctrl_vp8_frame_hdr v4l2_frame_hdr;
+ memset(&v4l2_frame_hdr, 0, sizeof(v4l2_frame_hdr));
+
+ const auto& frame_hdr = pic->frame_hdr;
+ v4l2_frame_hdr.key_frame = frame_hdr->frame_type;
+#define FHDR_TO_V4L2_FHDR(a) v4l2_frame_hdr.a = frame_hdr->a
+ FHDR_TO_V4L2_FHDR(version);
+ FHDR_TO_V4L2_FHDR(width);
+ FHDR_TO_V4L2_FHDR(horizontal_scale);
+ FHDR_TO_V4L2_FHDR(height);
+ FHDR_TO_V4L2_FHDR(vertical_scale);
+ FHDR_TO_V4L2_FHDR(sign_bias_golden);
+ FHDR_TO_V4L2_FHDR(sign_bias_alternate);
+ FHDR_TO_V4L2_FHDR(prob_skip_false);
+ FHDR_TO_V4L2_FHDR(prob_intra);
+ FHDR_TO_V4L2_FHDR(prob_last);
+ FHDR_TO_V4L2_FHDR(prob_gf);
+ FHDR_TO_V4L2_FHDR(bool_dec_range);
+ FHDR_TO_V4L2_FHDR(bool_dec_value);
+ FHDR_TO_V4L2_FHDR(bool_dec_count);
+#undef FHDR_TO_V4L2_FHDR
+
+#define SET_V4L2_FRM_HDR_FLAG_IF(cond, flag) \
+ v4l2_frame_hdr.flags |= ((frame_hdr->cond) ? (flag) : 0)
+ SET_V4L2_FRM_HDR_FLAG_IF(is_experimental,
+ V4L2_VP8_FRAME_HDR_FLAG_EXPERIMENTAL);
+ SET_V4L2_FRM_HDR_FLAG_IF(show_frame, V4L2_VP8_FRAME_HDR_FLAG_SHOW_FRAME);
+ SET_V4L2_FRM_HDR_FLAG_IF(mb_no_skip_coeff,
+ V4L2_VP8_FRAME_HDR_FLAG_MB_NO_SKIP_COEFF);
+#undef SET_V4L2_FRM_HDR_FLAG_IF
+
+ FillV4L2SegmentationHeader(frame_hdr->segmentation_hdr,
+ &v4l2_frame_hdr.sgmnt_hdr);
+
+ FillV4L2LoopfilterHeader(frame_hdr->loopfilter_hdr, &v4l2_frame_hdr.lf_hdr);
+
+ FillV4L2QuantizationHeader(frame_hdr->quantization_hdr,
+ &v4l2_frame_hdr.quant_hdr);
+
+ FillV4L2Vp8EntropyHeader(frame_hdr->entropy_hdr, &v4l2_frame_hdr.entropy_hdr);
+
+ v4l2_frame_hdr.first_part_size =
+ base::checked_cast<__u32>(frame_hdr->first_part_size);
+ v4l2_frame_hdr.first_part_offset =
+ base::checked_cast<__u32>(frame_hdr->first_part_offset);
+ v4l2_frame_hdr.macroblock_bit_offset =
+ base::checked_cast<__u32>(frame_hdr->macroblock_bit_offset);
+ v4l2_frame_hdr.num_dct_parts = frame_hdr->num_of_dct_partitions;
+
+ static_assert(std::extent<decltype(v4l2_frame_hdr.dct_part_sizes)>() ==
+ std::extent<decltype(frame_hdr->dct_partition_sizes)>(),
+ "DCT partition size arrays must have equal number of elements");
+ for (size_t i = 0; i < frame_hdr->num_of_dct_partitions &&
+ i < base::size(v4l2_frame_hdr.dct_part_sizes);
+ ++i)
+ v4l2_frame_hdr.dct_part_sizes[i] = frame_hdr->dct_partition_sizes[i];
+
+ scoped_refptr<V4L2DecodeSurface> dec_surface =
+ VP8PictureToV4L2DecodeSurface(pic);
+ std::vector<scoped_refptr<V4L2DecodeSurface>> ref_surfaces;
+
+ const auto last_frame = reference_frames.GetFrame(Vp8RefType::VP8_FRAME_LAST);
+ if (last_frame) {
+ scoped_refptr<V4L2DecodeSurface> last_frame_surface =
+ VP8PictureToV4L2DecodeSurface(last_frame);
+ v4l2_frame_hdr.last_frame = last_frame_surface->GetReferenceID();
+ ref_surfaces.push_back(last_frame_surface);
+ } else {
+ v4l2_frame_hdr.last_frame = VIDEO_MAX_FRAME;
+ }
+
+ const auto golden_frame =
+ reference_frames.GetFrame(Vp8RefType::VP8_FRAME_GOLDEN);
+ if (golden_frame) {
+ scoped_refptr<V4L2DecodeSurface> golden_frame_surface =
+ VP8PictureToV4L2DecodeSurface(golden_frame);
+ v4l2_frame_hdr.golden_frame = golden_frame_surface->GetReferenceID();
+ ref_surfaces.push_back(golden_frame_surface);
+ } else {
+ v4l2_frame_hdr.golden_frame = VIDEO_MAX_FRAME;
+ }
+
+ const auto alt_frame =
+ reference_frames.GetFrame(Vp8RefType::VP8_FRAME_ALTREF);
+ if (alt_frame) {
+ scoped_refptr<V4L2DecodeSurface> alt_frame_surface =
+ VP8PictureToV4L2DecodeSurface(alt_frame);
+ v4l2_frame_hdr.alt_frame = alt_frame_surface->GetReferenceID();
+ ref_surfaces.push_back(alt_frame_surface);
+ } else {
+ v4l2_frame_hdr.alt_frame = VIDEO_MAX_FRAME;
+ }
+
+ struct v4l2_ext_control ctrl;
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_MPEG_VIDEO_VP8_FRAME_HDR;
+ ctrl.size = sizeof(v4l2_frame_hdr);
+ ctrl.ptr = &v4l2_frame_hdr;
+
+ struct v4l2_ext_controls ext_ctrls;
+ memset(&ext_ctrls, 0, sizeof(ext_ctrls));
+ ext_ctrls.count = 1;
+ ext_ctrls.controls = &ctrl;
+ dec_surface->PrepareSetCtrls(&ext_ctrls);
+ if (device_->Ioctl(VIDIOC_S_EXT_CTRLS, &ext_ctrls) != 0) {
+ VPLOGF(1) << "ioctl() failed: VIDIOC_S_EXT_CTRLS";
+ return false;
+ }
+
+ dec_surface->SetReferenceSurfaces(ref_surfaces);
+
+ if (!surface_handler_->SubmitSlice(dec_surface, frame_hdr->data,
+ frame_hdr->frame_size))
+ return false;
+
+ DVLOGF(4) << "Submitting decode for surface: " << dec_surface->ToString();
+ surface_handler_->DecodeSurface(dec_surface);
+ return true;
+}
+
+bool V4L2LegacyVP8Accelerator::OutputPicture(
+ const scoped_refptr<VP8Picture>& pic) {
+ // TODO(crbug.com/647725): Insert correct color space.
+ surface_handler_->SurfaceReady(VP8PictureToV4L2DecodeSurface(pic),
+ pic->bitstream_id(), pic->visible_rect(),
+ VideoColorSpace());
+ return true;
+}
+
+scoped_refptr<V4L2DecodeSurface>
+V4L2LegacyVP8Accelerator::VP8PictureToV4L2DecodeSurface(
+ const scoped_refptr<VP8Picture>& pic) {
+ V4L2VP8Picture* v4l2_pic = pic->AsV4L2VP8Picture();
+ CHECK(v4l2_pic);
+ return v4l2_pic->dec_surface();
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h b/chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h
new file mode 100644
index 00000000000..983b3fec577
--- /dev/null
+++ b/chromium/media/gpu/v4l2/v4l2_vp8_accelerator_legacy.h
@@ -0,0 +1,44 @@
+// Copyright 2018 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_V4L2_V4L2_VP8_ACCELERATOR_LEGACY_H_
+#define MEDIA_GPU_V4L2_V4L2_VP8_ACCELERATOR_LEGACY_H_
+
+#include <vector>
+
+#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
+#include "media/gpu/vp8_decoder.h"
+
+namespace media {
+
+class V4L2Device;
+class V4L2DecodeSurface;
+class V4L2DecodeSurfaceHandler;
+
+class V4L2LegacyVP8Accelerator : public VP8Decoder::VP8Accelerator {
+ public:
+ explicit V4L2LegacyVP8Accelerator(V4L2DecodeSurfaceHandler* surface_handler,
+ V4L2Device* device);
+ ~V4L2LegacyVP8Accelerator() override;
+
+ // VP8Decoder::VP8Accelerator implementation.
+ scoped_refptr<VP8Picture> CreateVP8Picture() override;
+ bool SubmitDecode(scoped_refptr<VP8Picture> pic,
+ const Vp8ReferenceFrameVector& reference_frames) override;
+ bool OutputPicture(const scoped_refptr<VP8Picture>& pic) override;
+
+ private:
+ scoped_refptr<V4L2DecodeSurface> VP8PictureToV4L2DecodeSurface(
+ const scoped_refptr<VP8Picture>& pic);
+
+ V4L2DecodeSurfaceHandler* const surface_handler_;
+ V4L2Device* const device_;
+
+ DISALLOW_COPY_AND_ASSIGN(V4L2LegacyVP8Accelerator);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_V4L2_V4L2_VP8_ACCELERATOR_LEGACY_H_
diff --git a/chromium/media/gpu/vaapi/BUILD.gn b/chromium/media/gpu/vaapi/BUILD.gn
index 842a0800d96..2d19903f944 100644
--- a/chromium/media/gpu/vaapi/BUILD.gn
+++ b/chromium/media/gpu/vaapi/BUILD.gn
@@ -41,10 +41,10 @@ source_set("vaapi") {
"vaapi_dmabuf_video_frame_mapper.h",
"vaapi_h264_accelerator.cc",
"vaapi_h264_accelerator.h",
+ "vaapi_image_decode_accelerator_worker.cc",
+ "vaapi_image_decode_accelerator_worker.h",
"vaapi_image_decoder.cc",
"vaapi_image_decoder.h",
- "vaapi_jpeg_decode_accelerator_worker.cc",
- "vaapi_jpeg_decode_accelerator_worker.h",
"vaapi_jpeg_decoder.cc",
"vaapi_jpeg_decoder.h",
"vaapi_jpeg_encoder.cc",
@@ -97,6 +97,10 @@ source_set("vaapi") {
"//ui/gfx/geometry",
]
+ public_deps = [
+ "//skia",
+ ]
+
if (is_chromeos) {
sources += [
"vaapi_jpeg_encode_accelerator.cc",
@@ -162,12 +166,16 @@ source_set("vaapi_test_utils") {
source_set("unit_test") {
testonly = true
sources = [
+ "h264_encoder_unittest.cc",
+ "vaapi_image_decode_accelerator_worker_unittest.cc",
"vaapi_video_decode_accelerator_unittest.cc",
]
deps = [
":vaapi",
+ "//base",
"//base/test:test_support",
"//gpu:test_support",
+ "//gpu/ipc/service",
"//media/gpu:common",
"//mojo/core/embedder",
"//testing/gmock",
diff --git a/chromium/media/gpu/vaapi/OWNERS b/chromium/media/gpu/vaapi/OWNERS
index 7671f222908..96c1ecee549 100644
--- a/chromium/media/gpu/vaapi/OWNERS
+++ b/chromium/media/gpu/vaapi/OWNERS
@@ -1,3 +1,4 @@
+dstaessens@chromium.org
kcwu@chromium.org
mcasas@chromium.org
posciak@chromium.org
diff --git a/chromium/media/gpu/vaapi/h264_encoder.cc b/chromium/media/gpu/vaapi/h264_encoder.cc
index ddde6ee49a9..8e15bc5a477 100644
--- a/chromium/media/gpu/vaapi/h264_encoder.cc
+++ b/chromium/media/gpu/vaapi/h264_encoder.cc
@@ -93,13 +93,28 @@ bool H264Encoder::Initialize(
mb_height_ = coded_size_.height() / kH264MacroblockSizeInPixels;
profile_ = config.output_profile;
- level_ = config.h264_output_level.value_or(
- VideoEncodeAccelerator::kDefaultH264Level);
+ level_ = config.h264_output_level.value_or(H264SPS::kLevelIDC4p0);
uint32_t initial_framerate = config.initial_framerate.value_or(
VideoEncodeAccelerator::kDefaultFramerate);
+
+ // Checks if |level_| is valid. If it is invalid, set |level_| to a minimum
+ // level that comforts Table A-1 in H.264 spec with specified bitrate,
+ // framerate and dimension.
if (!CheckH264LevelLimits(profile_, level_, config.initial_bitrate,
- initial_framerate, mb_width_ * mb_height_))
- return false;
+ initial_framerate, mb_width_ * mb_height_)) {
+ base::Optional<uint8_t> valid_level =
+ FindValidH264Level(profile_, config.initial_bitrate, initial_framerate,
+ mb_width_ * mb_height_);
+ if (!valid_level) {
+ VLOGF(1) << "Could not find a valid h264 level for"
+ << " profile=" << profile_
+ << " bitrate=" << config.initial_bitrate
+ << " framerate=" << initial_framerate
+ << " size=" << config.input_visible_size.ToString();
+ return false;
+ }
+ level_ = *valid_level;
+ }
curr_params_.max_ref_pic_list0_size =
std::min(kMaxRefIdxL0Size, ave_config.max_num_ref_frames & 0xffff);
diff --git a/chromium/media/gpu/vaapi/h264_encoder.h b/chromium/media/gpu/vaapi/h264_encoder.h
index 3b6c855e85f..ea65c7c966f 100644
--- a/chromium/media/gpu/vaapi/h264_encoder.h
+++ b/chromium/media/gpu/vaapi/h264_encoder.h
@@ -113,6 +113,8 @@ class H264Encoder : public AcceleratedVideoEncoder {
bool PrepareEncodeJob(EncodeJob* encode_job) override;
private:
+ friend class H264EncoderTest;
+
// Fill current_sps_ and current_pps_ with current encoding state parameters.
void UpdateSPS();
void UpdatePPS();
diff --git a/chromium/media/gpu/vaapi/h264_encoder_unittest.cc b/chromium/media/gpu/vaapi/h264_encoder_unittest.cc
new file mode 100644
index 00000000000..b0c0f4cd105
--- /dev/null
+++ b/chromium/media/gpu/vaapi/h264_encoder_unittest.cc
@@ -0,0 +1,95 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/h264_encoder.h"
+
+#include <memory>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace media {
+namespace {
+
+AcceleratedVideoEncoder::Config kDefaultAVEConfig{10};
+
+VideoEncodeAccelerator::Config kDefaultVEAConfig(
+ PIXEL_FORMAT_I420,
+ gfx::Size(1280, 720),
+ H264PROFILE_BASELINE,
+ 14000000 /* = maximum bitrate in bits per second for level 3.1 */,
+ VideoEncodeAccelerator::kDefaultFramerate,
+ base::nullopt /* gop_length */,
+ base::nullopt /* h264 output level*/,
+ VideoEncodeAccelerator::Config::StorageType::kShmem,
+ VideoEncodeAccelerator::Config::ContentType::kCamera);
+
+class MockH264Accelerator : public H264Encoder::Accelerator {
+ public:
+ MockH264Accelerator() = default;
+ MOCK_METHOD1(
+ GetPicture,
+ scoped_refptr<H264Picture>(AcceleratedVideoEncoder::EncodeJob* job));
+ MOCK_METHOD3(SubmitPackedHeaders,
+ bool(AcceleratedVideoEncoder::EncodeJob*,
+ scoped_refptr<H264BitstreamBuffer>,
+ scoped_refptr<H264BitstreamBuffer>));
+ MOCK_METHOD7(SubmitFrameParameters,
+ bool(AcceleratedVideoEncoder::EncodeJob*,
+ const H264Encoder::EncodeParams&,
+ const H264SPS&,
+ const H264PPS&,
+ scoped_refptr<H264Picture>,
+ const std::list<scoped_refptr<H264Picture>>&,
+ const std::list<scoped_refptr<H264Picture>>&));
+};
+} // namespace
+
+class H264EncoderTest : public ::testing::Test {
+ public:
+ H264EncoderTest() = default;
+ void SetUp() override;
+
+ void ExpectLevel(uint8_t level) { EXPECT_EQ(encoder_->level_, level); }
+
+ protected:
+ std::unique_ptr<H264Encoder> encoder_;
+ MockH264Accelerator* accelerator_;
+};
+
+void H264EncoderTest::SetUp() {
+ auto mock_accelerator = std::make_unique<MockH264Accelerator>();
+ accelerator_ = mock_accelerator.get();
+ encoder_ = std::make_unique<H264Encoder>(std::move(mock_accelerator));
+
+ // Set default behaviors for mock methods for convenience.
+ ON_CALL(*accelerator_, GetPicture(_))
+ .WillByDefault(Invoke([](AcceleratedVideoEncoder::EncodeJob*) {
+ return new H264Picture();
+ }));
+ ON_CALL(*accelerator_, SubmitPackedHeaders(_, _, _))
+ .WillByDefault(Return(true));
+ ON_CALL(*accelerator_, SubmitFrameParameters(_, _, _, _, _, _, _))
+ .WillByDefault(Return(true));
+}
+
+TEST_F(H264EncoderTest, Initialize) {
+ VideoEncodeAccelerator::Config vea_config = kDefaultVEAConfig;
+ AcceleratedVideoEncoder::Config ave_config = kDefaultAVEConfig;
+ EXPECT_TRUE(encoder_->Initialize(vea_config, ave_config));
+ // Profile is unspecified, H264Encoder will select the default level, 4.0.
+ // 4.0 will be proper with |vea_config|'s values.
+ ExpectLevel(H264SPS::kLevelIDC4p0);
+
+ // Initialize with 4k size. The level will be adjusted to 5.1 by H264Encoder.
+ vea_config.input_visible_size.SetSize(3840, 2160);
+ EXPECT_TRUE(encoder_->Initialize(vea_config, ave_config));
+ ExpectLevel(H264SPS::kLevelIDC5p1);
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/test_utils.cc b/chromium/media/gpu/vaapi/test_utils.cc
index 2bc0f770275..b534b297bd5 100644
--- a/chromium/media/gpu/vaapi/test_utils.cc
+++ b/chromium/media/gpu/vaapi/test_utils.cc
@@ -52,13 +52,8 @@ bool CompareImages(const DecodedImage& reference_image,
return false;
// Uses the reference image's size as the ground truth.
- // Note the use of > instead of !=. This is to handle the case in the Intel
- // iHD driver where, for example, the size of an image is 1280x720 while the
- // size of the VAAPI surface is 1280x736 because of additional alignment. See
- // https://git.io/fj6nA.
const gfx::Size image_size = reference_image.size;
- if (image_size.width() > hw_decoded_image.size.width() ||
- image_size.height() > hw_decoded_image.size.height()) {
+ if (image_size != hw_decoded_image.size) {
DLOG(ERROR) << "Wrong expected software decoded image size, "
<< image_size.ToString() << " versus VaAPI provided "
<< hw_decoded_image.size.ToString();
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc
new file mode 100644
index 00000000000..2c08aa0a44f
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc
@@ -0,0 +1,211 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h"
+
+#include "string.h"
+
+#include <utility>
+
+#include "base/bind.h"
+#include "base/containers/span.h"
+#include "base/feature_list.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/memory/ptr_util.h"
+#include "base/metrics/histogram_macros.h"
+#include "base/sequenced_task_runner.h"
+#include "base/task/post_task.h"
+#include "base/trace_event/trace_event.h"
+#include "gpu/config/gpu_finch_features.h"
+#include "media/gpu/macros.h"
+#include "media/gpu/vaapi/va_surface.h"
+#include "media/gpu/vaapi/vaapi_image_decoder.h"
+#include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
+#include "media/gpu/vaapi/vaapi_webp_decoder.h"
+#include "media/gpu/vaapi/vaapi_wrapper.h"
+#include "media/parsers/webp_parser.h"
+#include "mojo/public/cpp/bindings/callback_helpers.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/linux/native_pixmap_dmabuf.h"
+#include "ui/gfx/native_pixmap_handle.h"
+
+namespace media {
+
+namespace {
+
+bool IsJpegImage(base::span<const uint8_t> encoded_data) {
+ if (encoded_data.size() < 3u)
+ return false;
+ return memcmp("\xFF\xD8\xFF", encoded_data.data(), 3u) == 0;
+}
+
+// These values are persisted to logs. Entries should not be renumbered and
+// numeric values should never be reused.
+enum class VAJDAWorkerDecoderFailure {
+ kVaapiError = 0,
+ kMaxValue = kVaapiError,
+};
+
+void ReportToVAJDAWorkerDecoderFailureUMA(VAJDAWorkerDecoderFailure failure) {
+ UMA_HISTOGRAM_ENUMERATION("Media.VAJDAWorker.DecoderFailure", failure);
+}
+
+// Uses |decoder| to decode the image corresponding to |encoded_data|.
+// |decode_cb| is called when finished or when an error is encountered. We don't
+// support decoding to scale, so |output_size| is only used for tracing.
+void DecodeTask(
+ VaapiImageDecoder* decoder,
+ std::vector<uint8_t> encoded_data,
+ const gfx::Size& output_size,
+ gpu::ImageDecodeAcceleratorWorker::CompletedDecodeCB decode_cb) {
+ TRACE_EVENT2("jpeg", "VaapiImageDecodeAcceleratorWorker::DecodeTask",
+ "encoded_bytes", encoded_data.size(), "output_size",
+ output_size.ToString());
+ gpu::ImageDecodeAcceleratorWorker::CompletedDecodeCB scoped_decode_callback =
+ mojo::WrapCallbackWithDefaultInvokeIfNotRun(std::move(decode_cb),
+ nullptr);
+
+ // Decode into a VAAPI surface.
+ if (!decoder) {
+ DVLOGF(1) << "No decoder is available for supplied image";
+ return;
+ }
+ VaapiImageDecodeStatus status = decoder->Decode(
+ base::make_span<const uint8_t>(encoded_data.data(), encoded_data.size()));
+ if (status != VaapiImageDecodeStatus::kSuccess) {
+ DVLOGF(1) << "Failed to decode - status = "
+ << static_cast<uint32_t>(status);
+ return;
+ }
+
+ // Export the decode result as a NativePixmap.
+ std::unique_ptr<NativePixmapAndSizeInfo> exported_pixmap =
+ decoder->ExportAsNativePixmapDmaBuf(&status);
+ if (status != VaapiImageDecodeStatus::kSuccess) {
+ DVLOGF(1) << "Failed to export surface - status = "
+ << static_cast<uint32_t>(status);
+ return;
+ }
+ DCHECK(exported_pixmap);
+ DCHECK(exported_pixmap->pixmap);
+ if (exported_pixmap->pixmap->GetBufferSize() != output_size) {
+ DVLOGF(1) << "Scaling is not supported";
+ return;
+ }
+
+ // Output the decoded data.
+ gfx::NativePixmapHandle pixmap_handle =
+ exported_pixmap->pixmap->ExportHandle();
+ // If a dup() failed while exporting the handle, we would get no planes.
+ if (pixmap_handle.planes.empty()) {
+ DVLOGF(1) << "Could not export the NativePixmapHandle";
+ return;
+ }
+ auto result =
+ std::make_unique<gpu::ImageDecodeAcceleratorWorker::DecodeResult>();
+ result->handle.type = gfx::GpuMemoryBufferType::NATIVE_PIXMAP;
+ result->handle.native_pixmap_handle = std::move(pixmap_handle);
+ result->visible_size = exported_pixmap->pixmap->GetBufferSize();
+ result->buffer_format = exported_pixmap->pixmap->GetBufferFormat();
+ result->buffer_byte_size = exported_pixmap->byte_size;
+ result->yuv_color_space = decoder->GetYUVColorSpace();
+ std::move(scoped_decode_callback).Run(std::move(result));
+}
+
+} // namespace
+
+// static
+std::unique_ptr<VaapiImageDecodeAcceleratorWorker>
+VaapiImageDecodeAcceleratorWorker::Create() {
+ // TODO(crbug.com/988123): revisit the Media.VAJDAWorker.DecoderFailure UMA
+ // to be able to record WebP and JPEG failures separately.
+ const auto uma_cb =
+ base::BindRepeating(&ReportToVAJDAWorkerDecoderFailureUMA,
+ VAJDAWorkerDecoderFailure::kVaapiError);
+ VaapiImageDecoderVector decoders;
+
+ if (base::FeatureList::IsEnabled(
+ features::kVaapiJpegImageDecodeAcceleration)) {
+ auto jpeg_decoder = std::make_unique<VaapiJpegDecoder>();
+ if (jpeg_decoder->Initialize(uma_cb))
+ decoders.push_back(std::move(jpeg_decoder));
+ }
+
+ if (base::FeatureList::IsEnabled(
+ features::kVaapiWebPImageDecodeAcceleration)) {
+ auto webp_decoder = std::make_unique<VaapiWebPDecoder>();
+ if (webp_decoder->Initialize(uma_cb))
+ decoders.push_back(std::move(webp_decoder));
+ }
+
+ // If there are no decoders due to disabled flags or initialization failure,
+ // return nullptr.
+ if (decoders.empty())
+ return nullptr;
+
+ return base::WrapUnique(
+ new VaapiImageDecodeAcceleratorWorker(std::move(decoders)));
+}
+
+VaapiImageDecodeAcceleratorWorker::VaapiImageDecodeAcceleratorWorker(
+ VaapiImageDecoderVector decoders) {
+ DETACH_FROM_SEQUENCE(io_sequence_checker_);
+ decoder_task_runner_ =
+ base::CreateSequencedTaskRunnerWithTraits({base::ThreadPool()});
+ DCHECK(decoder_task_runner_);
+
+ DCHECK(!decoders.empty());
+ for (auto& decoder : decoders) {
+ supported_profiles_.push_back(decoder->GetSupportedProfile());
+ const gpu::ImageDecodeAcceleratorType type = decoder->GetType();
+ decoders_[type] = std::move(decoder);
+ }
+}
+
+VaapiImageDecodeAcceleratorWorker::~VaapiImageDecodeAcceleratorWorker() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(main_sequence_checker_);
+ DCHECK(decoder_task_runner_);
+ for (auto& decoder : decoders_)
+ decoder_task_runner_->DeleteSoon(FROM_HERE, std::move(decoder.second));
+}
+
+gpu::ImageDecodeAcceleratorSupportedProfiles
+VaapiImageDecodeAcceleratorWorker::GetSupportedProfiles() {
+ return supported_profiles_;
+}
+
+VaapiImageDecoder* VaapiImageDecodeAcceleratorWorker::GetDecoderForImage(
+ const std::vector<uint8_t>& encoded_data) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(io_sequence_checker_);
+ auto encoded_data_span =
+ base::make_span<const uint8_t>(encoded_data.data(), encoded_data.size());
+ auto result = decoders_.end();
+
+ if (IsJpegImage(encoded_data_span))
+ result = decoders_.find(gpu::ImageDecodeAcceleratorType::kJpeg);
+ else if (IsLossyWebPImage(encoded_data_span))
+ result = decoders_.find(gpu::ImageDecodeAcceleratorType::kWebP);
+
+ return result == decoders_.end() ? nullptr : result->second.get();
+}
+
+void VaapiImageDecodeAcceleratorWorker::Decode(
+ std::vector<uint8_t> encoded_data,
+ const gfx::Size& output_size,
+ CompletedDecodeCB decode_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(io_sequence_checker_);
+ DCHECK(decoder_task_runner_);
+
+ // We defer checking for a null |decoder| until DecodeTask() because the
+ // gpu::ImageDecodeAcceleratorWorker interface mandates that the callback be
+ // called asynchronously.
+ VaapiImageDecoder* decoder = GetDecoderForImage(encoded_data);
+ decoder_task_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&DecodeTask, decoder, std::move(encoded_data),
+ output_size, std::move(decode_cb)));
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h
new file mode 100644
index 00000000000..af69a710d1f
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h
@@ -0,0 +1,78 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VAAPI_VAAPI_IMAGE_DECODE_ACCELERATOR_WORKER_H_
+#define MEDIA_GPU_VAAPI_VAAPI_IMAGE_DECODE_ACCELERATOR_WORKER_H_
+
+#include <stdint.h>
+
+#include <memory>
+#include <unordered_map>
+#include <vector>
+
+#include "base/containers/small_map.h"
+#include "base/macros.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/sequence_checker.h"
+#include "gpu/config/gpu_info.h"
+#include "gpu/ipc/service/image_decode_accelerator_worker.h"
+
+namespace base {
+class SequencedTaskRunner;
+}
+
+namespace gfx {
+class Size;
+}
+
+namespace media {
+
+class VaapiImageDecoder;
+
+using VaapiImageDecoderVector = std::vector<std::unique_ptr<VaapiImageDecoder>>;
+
+using VaapiImageDecoderMap =
+ base::small_map<std::unordered_map<gpu::ImageDecodeAcceleratorType,
+ std::unique_ptr<VaapiImageDecoder>>>;
+
+// This class uses the VAAPI to provide image decode acceleration. The
+// interaction with the VAAPI is done on |decoder_task_runner_|.
+class VaapiImageDecodeAcceleratorWorker
+ : public gpu::ImageDecodeAcceleratorWorker {
+ public:
+ // Creates a VaapiImageDecodeAcceleratorWorker and attempts to initialize the
+ // internal state. Returns nullptr if initialization fails.
+ static std::unique_ptr<VaapiImageDecodeAcceleratorWorker> Create();
+
+ ~VaapiImageDecodeAcceleratorWorker() override;
+
+ // gpu::ImageDecodeAcceleratorWorker implementation.
+ gpu::ImageDecodeAcceleratorSupportedProfiles GetSupportedProfiles() override;
+ void Decode(std::vector<uint8_t> encoded_data,
+ const gfx::Size& output_size,
+ CompletedDecodeCB decode_cb) override;
+
+ private:
+ friend class VaapiImageDecodeAcceleratorWorkerTest;
+
+ explicit VaapiImageDecodeAcceleratorWorker(VaapiImageDecoderVector decoders);
+
+ VaapiImageDecoder* GetDecoderForImage(
+ const std::vector<uint8_t>& encoded_data);
+
+ // We delegate the decoding to the appropriate decoder in |decoders_| which
+ // are used and destroyed on |decoder_task_runner_|.
+ VaapiImageDecoderMap decoders_;
+ gpu::ImageDecodeAcceleratorSupportedProfiles supported_profiles_;
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner_;
+
+ SEQUENCE_CHECKER(main_sequence_checker_);
+ SEQUENCE_CHECKER(io_sequence_checker_);
+
+ DISALLOW_COPY_AND_ASSIGN(VaapiImageDecodeAcceleratorWorker);
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_VAAPI_VAAPI_IMAGE_DECODE_ACCELERATOR_WORKER_H_
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker_unittest.cc b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker_unittest.cc
new file mode 100644
index 00000000000..ceafd0b2830
--- /dev/null
+++ b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker_unittest.cc
@@ -0,0 +1,282 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+#include <stdint.h>
+#include <va/va.h>
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#include "base/bind.h"
+#include "base/containers/span.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "base/memory/ptr_util.h"
+#include "base/memory/scoped_refptr.h"
+#include "base/test/task_environment.h"
+#include "gpu/config/gpu_info.h"
+#include "gpu/ipc/service/image_decode_accelerator_worker.h"
+#include "media/gpu/vaapi/vaapi_image_decode_accelerator_worker.h"
+#include "media/gpu/vaapi/vaapi_image_decoder.h"
+#include "media/gpu/vaapi/vaapi_wrapper.h"
+#include "third_party/skia/include/core/SkImageInfo.h"
+#include "ui/gfx/buffer_types.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/linux/native_pixmap_dmabuf.h"
+#include "ui/gfx/native_pixmap_handle.h"
+
+using testing::_;
+using testing::AllOf;
+using testing::InSequence;
+using testing::IsNull;
+using testing::NotNull;
+using testing::Property;
+using testing::Return;
+using testing::StrictMock;
+
+namespace media {
+namespace {
+
+constexpr gfx::BufferFormat kFormatForDecodes = gfx::BufferFormat::YVU_420;
+
+constexpr gfx::Size kVaSurfaceResolution(128, 256);
+
+constexpr gfx::Size kVisibleSize(120, 250);
+
+constexpr size_t kWebPFileAndVp8ChunkHeaderSizeInBytes = 20u;
+
+// clang-format off
+constexpr uint8_t kJpegPFileHeader[] = {0xFF, 0xD8, 0xFF};
+
+constexpr uint8_t kLossyWebPFileHeader[] = {
+ 'R', 'I', 'F', 'F',
+ 0x0c, 0x00, 0x00, 0x00, // == 12 (little endian)
+ 'W', 'E', 'B', 'P',
+ 'V', 'P', '8', ' ',
+ 0x00, 0x00, 0x00, 0x00 // == 0
+};
+// clang-format on
+
+constexpr base::span<const uint8_t> kJpegEncodedData(kJpegPFileHeader, 3u);
+
+constexpr base::span<const uint8_t> kLossyWebPEncodedData(
+ kLossyWebPFileHeader,
+ kWebPFileAndVp8ChunkHeaderSizeInBytes);
+
+class MockNativePixmapDmaBuf : public gfx::NativePixmapDmaBuf {
+ public:
+ MockNativePixmapDmaBuf(const gfx::Size& size)
+ : gfx::NativePixmapDmaBuf(size,
+ kFormatForDecodes,
+ gfx::NativePixmapHandle()) {}
+
+ gfx::NativePixmapHandle ExportHandle() override {
+ gfx::NativePixmapHandle handle{};
+ DCHECK_EQ(gfx::BufferFormat::YVU_420, GetBufferFormat());
+ handle.planes = std::vector<gfx::NativePixmapPlane>(3u);
+ return handle;
+ }
+
+ protected:
+ ~MockNativePixmapDmaBuf() override = default;
+};
+
+class MockVaapiImageDecoder : public VaapiImageDecoder {
+ public:
+ MockVaapiImageDecoder(gpu::ImageDecodeAcceleratorType type)
+ : VaapiImageDecoder(VAProfileNone), type_(type) {}
+ ~MockVaapiImageDecoder() override = default;
+
+ gpu::ImageDecodeAcceleratorType GetType() const override { return type_; }
+ SkYUVColorSpace GetYUVColorSpace() const override {
+ switch (type_) {
+ case gpu::ImageDecodeAcceleratorType::kJpeg:
+ return SkYUVColorSpace::kJPEG_SkYUVColorSpace;
+ case gpu::ImageDecodeAcceleratorType::kWebP:
+ return SkYUVColorSpace::kRec601_SkYUVColorSpace;
+ case gpu::ImageDecodeAcceleratorType::kUnknown:
+ NOTREACHED();
+ return SkYUVColorSpace::kIdentity_SkYUVColorSpace;
+ }
+ }
+
+ gpu::ImageDecodeAcceleratorSupportedProfile GetSupportedProfile()
+ const override {
+ return gpu::ImageDecodeAcceleratorSupportedProfile();
+ }
+
+ MOCK_METHOD1(Initialize, bool(const base::RepeatingClosure&));
+ MOCK_METHOD1(Decode, VaapiImageDecodeStatus(base::span<const uint8_t>));
+ MOCK_CONST_METHOD0(GetScopedVASurface, const ScopedVASurface*());
+ MOCK_METHOD1(
+ ExportAsNativePixmapDmaBuf,
+ std::unique_ptr<NativePixmapAndSizeInfo>(VaapiImageDecodeStatus*));
+ MOCK_METHOD1(AllocateVASurfaceAndSubmitVABuffers,
+ VaapiImageDecodeStatus(base::span<const uint8_t>));
+
+ private:
+ const gpu::ImageDecodeAcceleratorType type_;
+};
+
+} // namespace
+
+class VaapiImageDecodeAcceleratorWorkerTest : public testing::Test {
+ public:
+ VaapiImageDecodeAcceleratorWorkerTest() {
+ VaapiImageDecoderVector decoders;
+ decoders.push_back(std::make_unique<StrictMock<MockVaapiImageDecoder>>(
+ gpu::ImageDecodeAcceleratorType::kJpeg));
+ decoders.push_back(std::make_unique<StrictMock<MockVaapiImageDecoder>>(
+ gpu::ImageDecodeAcceleratorType::kWebP));
+ worker_ = base::WrapUnique(
+ new VaapiImageDecodeAcceleratorWorker(std::move(decoders)));
+ }
+
+ MockVaapiImageDecoder* GetJpegDecoder() const {
+ auto result =
+ worker_->decoders_.find(gpu::ImageDecodeAcceleratorType::kJpeg);
+ return result == worker_->decoders_.end()
+ ? nullptr
+ : static_cast<MockVaapiImageDecoder*>(result->second.get());
+ }
+
+ MockVaapiImageDecoder* GetWebPDecoder() const {
+ auto result =
+ worker_->decoders_.find(gpu::ImageDecodeAcceleratorType::kWebP);
+ return result == worker_->decoders_.end()
+ ? nullptr
+ : static_cast<MockVaapiImageDecoder*>(result->second.get());
+ }
+
+ MOCK_METHOD1(
+ OnDecodeCompleted,
+ void(std::unique_ptr<gpu::ImageDecodeAcceleratorWorker::DecodeResult>));
+
+ protected:
+ base::test::TaskEnvironment task_environment_;
+ std::unique_ptr<VaapiImageDecodeAcceleratorWorker> worker_;
+
+ DISALLOW_COPY_AND_ASSIGN(VaapiImageDecodeAcceleratorWorkerTest);
+};
+
+ACTION_P2(ExportAsNativePixmapDmaBufSuccessfully,
+ va_surface_resolution,
+ visible_size) {
+ *arg0 = VaapiImageDecodeStatus::kSuccess;
+ auto exported_pixmap = std::make_unique<NativePixmapAndSizeInfo>();
+ exported_pixmap->va_surface_resolution = va_surface_resolution;
+ exported_pixmap->byte_size = 1u;
+ exported_pixmap->pixmap =
+ base::MakeRefCounted<MockNativePixmapDmaBuf>(visible_size);
+ return exported_pixmap;
+}
+
+TEST_F(VaapiImageDecodeAcceleratorWorkerTest, ImageDecodeSucceeds) {
+ std::vector<uint8_t> jpeg_encoded_data(kJpegEncodedData.cbegin(),
+ kJpegEncodedData.cend());
+ std::vector<uint8_t> webp_encoded_data(kLossyWebPEncodedData.cbegin(),
+ kLossyWebPEncodedData.cend());
+ {
+ InSequence sequence;
+ MockVaapiImageDecoder* jpeg_decoder = GetJpegDecoder();
+ ASSERT_TRUE(jpeg_decoder);
+ EXPECT_CALL(
+ *jpeg_decoder,
+ Decode(AllOf(Property(&base::span<const uint8_t>::data,
+ jpeg_encoded_data.data()),
+ Property(&base::span<const uint8_t>::size,
+ jpeg_encoded_data.size())) /* encoded_data */))
+ .WillOnce(Return(VaapiImageDecodeStatus::kSuccess));
+ EXPECT_CALL(*jpeg_decoder,
+ ExportAsNativePixmapDmaBuf(NotNull() /* status */))
+ .WillOnce(ExportAsNativePixmapDmaBufSuccessfully(kVaSurfaceResolution,
+ kVisibleSize));
+ EXPECT_CALL(*this, OnDecodeCompleted(NotNull()));
+
+ MockVaapiImageDecoder* webp_decoder = GetWebPDecoder();
+ ASSERT_TRUE(webp_decoder);
+ EXPECT_CALL(
+ *webp_decoder,
+ Decode(AllOf(Property(&base::span<const uint8_t>::data,
+ webp_encoded_data.data()),
+ Property(&base::span<const uint8_t>::size,
+ webp_encoded_data.size())) /* encoded_data */))
+ .WillOnce(Return(VaapiImageDecodeStatus::kSuccess));
+ EXPECT_CALL(*webp_decoder,
+ ExportAsNativePixmapDmaBuf(NotNull() /* status */))
+ .WillOnce(ExportAsNativePixmapDmaBufSuccessfully(kVaSurfaceResolution,
+ kVisibleSize));
+ EXPECT_CALL(*this, OnDecodeCompleted(NotNull()));
+ }
+
+ worker_->Decode(
+ std::move(jpeg_encoded_data), kVisibleSize,
+ base::BindOnce(&VaapiImageDecodeAcceleratorWorkerTest::OnDecodeCompleted,
+ base::Unretained(this)));
+
+ worker_->Decode(
+ std::move(webp_encoded_data), kVisibleSize,
+ base::BindOnce(&VaapiImageDecodeAcceleratorWorkerTest::OnDecodeCompleted,
+ base::Unretained(this)));
+ task_environment_.RunUntilIdle();
+}
+
+TEST_F(VaapiImageDecodeAcceleratorWorkerTest, ImageDecodeFails) {
+ std::vector<uint8_t> jpeg_encoded_data(kJpegEncodedData.cbegin(),
+ kJpegEncodedData.cend());
+ std::vector<uint8_t> webp_encoded_data(kLossyWebPEncodedData.cbegin(),
+ kLossyWebPEncodedData.cend());
+ {
+ InSequence sequence;
+ MockVaapiImageDecoder* jpeg_decoder = GetJpegDecoder();
+ ASSERT_TRUE(jpeg_decoder);
+ EXPECT_CALL(
+ *jpeg_decoder,
+ Decode(AllOf(Property(&base::span<const uint8_t>::data,
+ jpeg_encoded_data.data()),
+ Property(&base::span<const uint8_t>::size,
+ jpeg_encoded_data.size())) /* encoded_data */))
+ .WillOnce(Return(VaapiImageDecodeStatus::kExecuteDecodeFailed));
+ EXPECT_CALL(*this, OnDecodeCompleted(IsNull()));
+
+ MockVaapiImageDecoder* webp_decoder = GetWebPDecoder();
+ ASSERT_TRUE(webp_decoder);
+ EXPECT_CALL(
+ *webp_decoder,
+ Decode(AllOf(Property(&base::span<const uint8_t>::data,
+ webp_encoded_data.data()),
+ Property(&base::span<const uint8_t>::size,
+ webp_encoded_data.size())) /* encoded_data */))
+ .WillOnce(Return(VaapiImageDecodeStatus::kExecuteDecodeFailed));
+ EXPECT_CALL(*this, OnDecodeCompleted(IsNull()));
+ }
+
+ worker_->Decode(
+ std::move(jpeg_encoded_data), kVisibleSize,
+ base::BindOnce(&VaapiImageDecodeAcceleratorWorkerTest::OnDecodeCompleted,
+ base::Unretained(this)));
+
+ worker_->Decode(
+ std::move(webp_encoded_data), kVisibleSize,
+ base::BindOnce(&VaapiImageDecodeAcceleratorWorkerTest::OnDecodeCompleted,
+ base::Unretained(this)));
+ task_environment_.RunUntilIdle();
+}
+
+TEST_F(VaapiImageDecodeAcceleratorWorkerTest, UnknownImageDecodeFails) {
+ std::vector<uint8_t> encoded_data = {1u, 2u, 3u};
+ EXPECT_CALL(*this, OnDecodeCompleted(IsNull()));
+ worker_->Decode(
+ std::move(encoded_data), kVisibleSize,
+ base::BindOnce(&VaapiImageDecodeAcceleratorWorkerTest::OnDecodeCompleted,
+ base::Unretained(this)));
+ task_environment_.RunUntilIdle();
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decoder.cc b/chromium/media/gpu/vaapi/vaapi_image_decoder.cc
index 733b1de1baf..6ba0c3e56ae 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_image_decoder.cc
@@ -89,7 +89,7 @@ VaapiImageDecoder::GetSupportedProfile() const {
return profile;
}
-scoped_refptr<gfx::NativePixmapDmaBuf>
+std::unique_ptr<NativePixmapAndSizeInfo>
VaapiImageDecoder::ExportAsNativePixmapDmaBuf(VaapiImageDecodeStatus* status) {
DCHECK(status);
@@ -108,24 +108,18 @@ VaapiImageDecoder::ExportAsNativePixmapDmaBuf(VaapiImageDecodeStatus* status) {
}
DCHECK(temp_scoped_va_surface->IsValid());
- scoped_refptr<gfx::NativePixmapDmaBuf> pixmap =
+ std::unique_ptr<NativePixmapAndSizeInfo> exported_pixmap =
vaapi_wrapper_->ExportVASurfaceAsNativePixmapDmaBuf(
- temp_scoped_va_surface->id());
- if (!pixmap) {
+ *temp_scoped_va_surface);
+ if (!exported_pixmap) {
*status = VaapiImageDecodeStatus::kCannotExportSurface;
return nullptr;
}
- // In Intel's iHD driver the size requested for the surface may be different
- // than the buffer size of the NativePixmap because of additional alignment.
- // See https://git.io/fj6nA.
- DCHECK_LE(temp_scoped_va_surface->size().width(),
- pixmap->GetBufferSize().width());
- DCHECK_LE(temp_scoped_va_surface->size().height(),
- pixmap->GetBufferSize().height());
-
+ DCHECK_EQ(temp_scoped_va_surface->size(),
+ exported_pixmap->pixmap->GetBufferSize());
*status = VaapiImageDecodeStatus::kSuccess;
- return pixmap;
+ return exported_pixmap;
}
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decoder.h b/chromium/media/gpu/vaapi/vaapi_image_decoder.h
index 68d64eea018..9b8e9065fb8 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_image_decoder.h
@@ -6,16 +6,16 @@
#define MEDIA_GPU_VAAPI_VAAPI_IMAGE_DECODER_H_
#include <stdint.h>
+#include <va/va.h>
#include <memory>
-#include <va/va.h>
-
#include "base/callback_forward.h"
#include "base/containers/span.h"
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "gpu/config/gpu_info.h"
+#include "third_party/skia/include/core/SkImageInfo.h"
namespace gfx {
class NativePixmapDmaBuf;
@@ -23,6 +23,7 @@ class NativePixmapDmaBuf;
namespace media {
+struct NativePixmapAndSizeInfo;
class ScopedVASurface;
class VaapiWrapper;
@@ -59,7 +60,7 @@ class VaapiImageDecoder {
// Initializes |vaapi_wrapper_| in kDecode mode with the
// appropriate VAAPI profile and |error_uma_cb| for error reporting.
- bool Initialize(const base::RepeatingClosure& error_uma_cb);
+ virtual bool Initialize(const base::RepeatingClosure& error_uma_cb);
// Decodes a picture. It will fill VA-API parameters and call the
// corresponding VA-API methods according to the image in |encoded_image|.
@@ -68,22 +69,28 @@ class VaapiImageDecoder {
// destruction of this class. Returns a VaapiImageDecodeStatus that will
// indicate whether the decode succeeded or the reason it failed. Note that
// the internal ScopedVASurface is destroyed on failure.
- VaapiImageDecodeStatus Decode(base::span<const uint8_t> encoded_image);
+ virtual VaapiImageDecodeStatus Decode(
+ base::span<const uint8_t> encoded_image);
// Returns a pointer to the internally managed ScopedVASurface.
- const ScopedVASurface* GetScopedVASurface() const;
+ virtual const ScopedVASurface* GetScopedVASurface() const;
// Returns the type of image supported by this decoder.
virtual gpu::ImageDecodeAcceleratorType GetType() const = 0;
+ // Returns the type of mapping needed to convert the NativePixmapDmaBuf
+ // returned by ExportAsNativePixmapDmaBuf() from YUV to RGB.
+ virtual SkYUVColorSpace GetYUVColorSpace() const = 0;
+
// Returns the image profile supported by this decoder.
- gpu::ImageDecodeAcceleratorSupportedProfile GetSupportedProfile() const;
+ virtual gpu::ImageDecodeAcceleratorSupportedProfile GetSupportedProfile()
+ const;
// Exports the decoded data from the last Decode() call as a
// gfx::NativePixmapDmaBuf. Returns nullptr on failure and sets *|status| to
// the reason for failure. On success, the image decoder gives up ownership of
// the buffer underlying the NativePixmapDmaBuf.
- scoped_refptr<gfx::NativePixmapDmaBuf> ExportAsNativePixmapDmaBuf(
+ virtual std::unique_ptr<NativePixmapAndSizeInfo> ExportAsNativePixmapDmaBuf(
VaapiImageDecodeStatus* status);
protected:
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.cc
deleted file mode 100644
index 4ed5851f785..00000000000
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.cc
+++ /dev/null
@@ -1,127 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.h"
-
-#include <va/va.h>
-
-#include <utility>
-
-#include "base/bind.h"
-#include "base/containers/span.h"
-#include "base/location.h"
-#include "base/logging.h"
-#include "base/memory/ptr_util.h"
-#include "base/metrics/histogram_macros.h"
-#include "base/sequenced_task_runner.h"
-#include "base/task/post_task.h"
-#include "base/trace_event/trace_event.h"
-#include "media/gpu/macros.h"
-#include "media/gpu/vaapi/va_surface.h"
-#include "media/gpu/vaapi/vaapi_image_decoder.h"
-#include "media/gpu/vaapi/vaapi_jpeg_decoder.h"
-#include "media/gpu/vaapi/vaapi_utils.h"
-#include "mojo/public/cpp/bindings/callback_helpers.h"
-#include "ui/gfx/geometry/size.h"
-
-namespace media {
-
-namespace {
-
-// These values are persisted to logs. Entries should not be renumbered and
-// numeric values should never be reused.
-enum class VAJDAWorkerDecoderFailure {
- kVaapiError = 0,
- kMaxValue = kVaapiError,
-};
-
-void ReportToVAJDAWorkerDecoderFailureUMA(VAJDAWorkerDecoderFailure failure) {
- UMA_HISTOGRAM_ENUMERATION("Media.VAJDAWorker.DecoderFailure", failure);
-}
-
-// Uses |decoder| to decode the JPEG corresponding to |encoded_data|.
-// |decode_cb| is called when finished or when an error is encountered. We don't
-// support decoding to scale, so |output_size| is only used for tracing.
-void DecodeTask(
- VaapiJpegDecoder* decoder,
- std::vector<uint8_t> encoded_data,
- const gfx::Size& output_size,
- gpu::ImageDecodeAcceleratorWorker::CompletedDecodeCB decode_cb) {
- TRACE_EVENT2("jpeg", "VaapiJpegDecodeAcceleratorWorker::DecodeTask",
- "encoded_bytes", encoded_data.size(), "output_size",
- output_size.ToString());
- gpu::ImageDecodeAcceleratorWorker::CompletedDecodeCB scoped_decode_callback =
- mojo::WrapCallbackWithDefaultInvokeIfNotRun(std::move(decode_cb),
- nullptr);
- DCHECK(decoder);
- VaapiImageDecodeStatus status = decoder->Decode(
- base::make_span<const uint8_t>(encoded_data.data(), encoded_data.size()));
- if (status != VaapiImageDecodeStatus::kSuccess) {
- DVLOGF(1) << "Failed to decode - status = "
- << static_cast<uint32_t>(status);
- return;
- }
- std::unique_ptr<ScopedVAImage> scoped_image =
- decoder->GetImage(VA_FOURCC_RGBX /* preferred_image_fourcc */, &status);
- if (status != VaapiImageDecodeStatus::kSuccess) {
- DVLOGF(1) << "Failed to get image - status = "
- << static_cast<uint32_t>(status);
- return;
- }
-
- // TODO(crbug.com/868400): output the decoded data.
- DCHECK(scoped_image);
- std::move(scoped_decode_callback).Run(nullptr);
-}
-
-} // namespace
-
-// static
-std::unique_ptr<VaapiJpegDecodeAcceleratorWorker>
-VaapiJpegDecodeAcceleratorWorker::Create() {
- auto decoder = std::make_unique<VaapiJpegDecoder>();
- if (!decoder->Initialize(
- base::BindRepeating(&ReportToVAJDAWorkerDecoderFailureUMA,
- VAJDAWorkerDecoderFailure::kVaapiError))) {
- return nullptr;
- }
- return base::WrapUnique(
- new VaapiJpegDecodeAcceleratorWorker(std::move(decoder)));
-}
-
-VaapiJpegDecodeAcceleratorWorker::VaapiJpegDecodeAcceleratorWorker(
- std::unique_ptr<VaapiJpegDecoder> decoder)
- : decoder_(std::move(decoder)) {
- DCHECK(decoder_);
- decoder_task_runner_ = base::CreateSequencedTaskRunnerWithTraits({});
- DCHECK(decoder_task_runner_);
-}
-
-VaapiJpegDecodeAcceleratorWorker::~VaapiJpegDecodeAcceleratorWorker() {
- if (decoder_task_runner_)
- decoder_task_runner_->DeleteSoon(FROM_HERE, std::move(decoder_));
-}
-
-std::vector<gpu::ImageDecodeAcceleratorSupportedProfile>
-VaapiJpegDecodeAcceleratorWorker::GetSupportedProfiles() {
- DCHECK(decoder_);
- const gpu::ImageDecodeAcceleratorSupportedProfile supported_profile =
- decoder_->GetSupportedProfile();
- DCHECK_EQ(gpu::ImageDecodeAcceleratorType::kJpeg,
- supported_profile.image_type);
- return {supported_profile};
-}
-
-void VaapiJpegDecodeAcceleratorWorker::Decode(std::vector<uint8_t> encoded_data,
- const gfx::Size& output_size,
- CompletedDecodeCB decode_cb) {
- DCHECK(decoder_task_runner_);
- DCHECK(!decoder_task_runner_->RunsTasksInCurrentSequence());
- decoder_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&DecodeTask, decoder_.get(), std::move(encoded_data),
- output_size, std::move(decode_cb)));
-}
-
-} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.h b/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.h
deleted file mode 100644
index 5d0eb2aad9c..00000000000
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decode_accelerator_worker.h
+++ /dev/null
@@ -1,65 +0,0 @@
-// Copyright 2019 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_GPU_VAAPI_VAAPI_JPEG_DECODE_ACCELERATOR_WORKER_H_
-#define MEDIA_GPU_VAAPI_VAAPI_JPEG_DECODE_ACCELERATOR_WORKER_H_
-
-#include <stdint.h>
-
-#include <memory>
-#include <vector>
-
-#include "base/macros.h"
-#include "base/memory/scoped_refptr.h"
-#include "gpu/ipc/service/image_decode_accelerator_worker.h"
-
-namespace base {
-class SequencedTaskRunner;
-}
-
-namespace gfx {
-class Size;
-}
-
-namespace media {
-
-class VaapiJpegDecoder;
-
-// This class uses the VAAPI to provide JPEG decode acceleration. The
-// interaction with the VAAPI is done on |decoder_task_runner_|. Objects of this
-// class can be created/destroyed on any thread, and the public interface of
-// this class is thread-safe.
-class VaapiJpegDecodeAcceleratorWorker
- : public gpu::ImageDecodeAcceleratorWorker {
- public:
- // Creates a VaapiJpegDecodeAcceleratorWorker and attempts to initialize the
- // internal state. Returns nullptr if initialization fails.
- static std::unique_ptr<VaapiJpegDecodeAcceleratorWorker> Create();
-
- ~VaapiJpegDecodeAcceleratorWorker() override;
-
- // gpu::ImageDecodeAcceleratorWorker implementation.
- std::vector<gpu::ImageDecodeAcceleratorSupportedProfile>
- GetSupportedProfiles() override;
- void Decode(std::vector<uint8_t> encoded_data,
- const gfx::Size& output_size,
- CompletedDecodeCB decode_cb) override;
-
- private:
- explicit VaapiJpegDecodeAcceleratorWorker(
- std::unique_ptr<VaapiJpegDecoder> decoder);
-
- // We delegate the decoding to |decoder_| which is constructed on the ctor and
- // then used and destroyed on |decoder_task_runner_| (unless initialization
- // failed, in which case it doesn't matter where it's destroyed since no tasks
- // using |decoder_| should have been posted to |decoder_task_runner_|).
- std::unique_ptr<VaapiJpegDecoder> decoder_;
- scoped_refptr<base::SequencedTaskRunner> decoder_task_runner_;
-
- DISALLOW_COPY_AND_ASSIGN(VaapiJpegDecodeAcceleratorWorker);
-};
-
-} // namespace media
-
-#endif // MEDIA_GPU_VAAPI_VAAPI_JPEG_DECODE_ACCELERATOR_WORKER_H_
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc
index 8658551b51b..76711fafe83 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.cc
@@ -241,20 +241,28 @@ VaapiImageDecodeStatus VaapiJpegDecoder::AllocateVASurfaceAndSubmitVABuffers(
}
// Prepare the VaSurface for decoding.
- const gfx::Size new_coded_size(
- base::strict_cast<int>(parse_result.frame_header.coded_width),
- base::strict_cast<int>(parse_result.frame_header.coded_height));
+ const gfx::Size new_visible_size(
+ base::strict_cast<int>(parse_result.frame_header.visible_width),
+ base::strict_cast<int>(parse_result.frame_header.visible_height));
DCHECK(!scoped_va_context_and_surface_ ||
scoped_va_context_and_surface_->IsValid());
if (!scoped_va_context_and_surface_ ||
- new_coded_size != scoped_va_context_and_surface_->size() ||
+ new_visible_size != scoped_va_context_and_surface_->size() ||
picture_va_rt_format != scoped_va_context_and_surface_->format()) {
scoped_va_context_and_surface_.reset();
- scoped_va_context_and_surface_ =
- ScopedVAContextAndSurface(vaapi_wrapper_
- ->CreateContextAndScopedVASurface(
- picture_va_rt_format, new_coded_size)
- .release());
+
+ // We'll request a surface of |new_coded_size| from the VAAPI, but we will
+ // keep track of the |new_visible_size| inside the ScopedVASurface so that
+ // when we create a VAImage or export the surface as a NativePixmapDmaBuf,
+ // we can report the size that clients should be using to read the contents.
+ const gfx::Size new_coded_size(
+ base::strict_cast<int>(parse_result.frame_header.coded_width),
+ base::strict_cast<int>(parse_result.frame_header.coded_height));
+ scoped_va_context_and_surface_.reset(
+ vaapi_wrapper_
+ ->CreateContextAndScopedVASurface(picture_va_rt_format,
+ new_coded_size, new_visible_size)
+ .release());
if (!scoped_va_context_and_surface_) {
VLOGF(1) << "CreateContextAndScopedVASurface() failed";
return VaapiImageDecodeStatus::kSurfaceCreationFailed;
@@ -310,6 +318,10 @@ gpu::ImageDecodeAcceleratorType VaapiJpegDecoder::GetType() const {
return gpu::ImageDecodeAcceleratorType::kJpeg;
}
+SkYUVColorSpace VaapiJpegDecoder::GetYUVColorSpace() const {
+ return SkYUVColorSpace::kJPEG_SkYUVColorSpace;
+}
+
std::unique_ptr<ScopedVAImage> VaapiJpegDecoder::GetImage(
uint32_t preferred_image_fourcc,
VaapiImageDecodeStatus* status) {
@@ -330,6 +342,18 @@ std::unique_ptr<ScopedVAImage> VaapiJpegDecoder::GetImage(
return nullptr;
}
VAImageFormat image_format{.fourcc = image_fourcc};
+ // In at least one driver, the VPP seems to have problems if we request a
+ // VAImage with odd dimensions. Rather than debugging the issue in depth, we
+ // disable support for odd dimensions since the VAImage path is only expected
+ // to be used in camera captures (and we don't expect JPEGs with odd
+ // dimensions in that path).
+ if ((scoped_va_context_and_surface_->size().width() & 1) ||
+ (scoped_va_context_and_surface_->size().height() & 1)) {
+ VLOGF(1) << "Getting images with odd dimensions is not supported";
+ *status = VaapiImageDecodeStatus::kCannotGetImage;
+ NOTREACHED();
+ return nullptr;
+ }
auto scoped_image = vaapi_wrapper_->CreateVaImage(
scoped_va_context_and_surface_->id(), &image_format,
scoped_va_context_and_surface_->size());
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h
index 1c74235a1a3..c7b15888168 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder.h
@@ -29,6 +29,7 @@ class VaapiJpegDecoder : public VaapiImageDecoder {
// VaapiImageDecoder implementation.
gpu::ImageDecodeAcceleratorType GetType() const override;
+ SkYUVColorSpace GetYUVColorSpace() const override;
// Get the decoded data from the last Decode() call as a ScopedVAImage. The
// VAImage's format will be either |preferred_image_fourcc| if the conversion
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc
index 5ec03fef37c..803bab8985f 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_decoder_unittest.cc
@@ -42,6 +42,7 @@
#include "ui/gfx/buffer_format_util.h"
#include "ui/gfx/buffer_types.h"
#include "ui/gfx/codec/jpeg_codec.h"
+#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/gpu_memory_buffer.h"
#include "ui/gfx/linux/native_pixmap_dmabuf.h"
@@ -55,14 +56,18 @@ using DecodedImagePtr = std::unique_ptr<vaapi_test_utils::DecodedImage>;
constexpr const char* kYuv422Filename = "pixel-1280x720.jpg";
constexpr const char* kYuv420Filename = "pixel-1280x720-yuv420.jpg";
constexpr const char* kYuv444Filename = "pixel-1280x720-yuv444.jpg";
-constexpr const char* kOddHeightImageFilename = "peach_pi-40x23.jpg";
-constexpr const char* kOddWidthImageFilename = "peach_pi-41x22.jpg";
-constexpr const char* kOddDimensionsImageFilename = "peach_pi-41x23.jpg";
+constexpr const char* kOddHeightImageFilename = "pixel-40x23-yuv420.jpg";
+constexpr const char* kOddWidthImageFilename = "pixel-41x22-yuv420.jpg";
+constexpr const char* kOddDimensionsImageFilename = "pixel-41x23-yuv420.jpg";
-const vaapi_test_utils::TestParam kTestCases[] = {
+const vaapi_test_utils::TestParam kVAImageTestCases[] = {
{"YUV422", kYuv422Filename},
{"YUV420", kYuv420Filename},
{"YUV444", kYuv444Filename},
+};
+
+const vaapi_test_utils::TestParam kDmaBufTestCases[] = {
+ {"YUV420", kYuv420Filename},
{"OddHeightImage40x23", kOddHeightImageFilename},
{"OddWidthImage41x22", kOddWidthImageFilename},
{"OddDimensionsImage41x23", kOddDimensionsImageFilename},
@@ -266,10 +271,6 @@ class VaapiJpegDecoderTest
base::span<const uint8_t> encoded_image,
VaapiImageDecodeStatus* status = nullptr);
- scoped_refptr<gfx::NativePixmapDmaBuf> DecodeToNativePixmapDmaBuf(
- base::span<const uint8_t> encoded_image,
- VaapiImageDecodeStatus* status = nullptr);
-
protected:
std::string test_data_path_;
VaapiJpegDecoder decoder_;
@@ -317,28 +318,6 @@ std::unique_ptr<ScopedVAImage> VaapiJpegDecoderTest::Decode(
return Decode(encoded_image, VA_FOURCC_I420, status);
}
-scoped_refptr<gfx::NativePixmapDmaBuf>
-VaapiJpegDecoderTest::DecodeToNativePixmapDmaBuf(
- base::span<const uint8_t> encoded_image,
- VaapiImageDecodeStatus* status) {
- const VaapiImageDecodeStatus decode_status = decoder_.Decode(encoded_image);
- EXPECT_EQ(!!decoder_.GetScopedVASurface(),
- decode_status == VaapiImageDecodeStatus::kSuccess);
-
- // Still try to get the pixmap when decode fails.
- VaapiImageDecodeStatus pixmap_status;
- scoped_refptr<gfx::NativePixmapDmaBuf> pixmap =
- decoder_.ExportAsNativePixmapDmaBuf(&pixmap_status);
- EXPECT_EQ(!!pixmap, pixmap_status == VaapiImageDecodeStatus::kSuccess);
-
- // Return the first fail status.
- if (status) {
- *status = decode_status != VaapiImageDecodeStatus::kSuccess ? decode_status
- : pixmap_status;
- }
- return pixmap;
-}
-
// The intention of this test is to ensure that the workarounds added in
// VaapiWrapper::GetJpegDecodeSuitableImageFourCC() don't result in an
// unsupported image format.
@@ -431,9 +410,9 @@ TEST_P(VaapiJpegDecoderTest, DecodeSucceeds) {
ASSERT_TRUE(decoder_.GetScopedVASurface());
EXPECT_TRUE(decoder_.GetScopedVASurface()->IsValid());
EXPECT_EQ(decoder_.GetScopedVASurface()->size().width(),
- base::strict_cast<int>(parse_result.frame_header.coded_width));
+ base::strict_cast<int>(parse_result.frame_header.visible_width));
EXPECT_EQ(decoder_.GetScopedVASurface()->size().height(),
- base::strict_cast<int>(parse_result.frame_header.coded_height));
+ base::strict_cast<int>(parse_result.frame_header.visible_height));
EXPECT_EQ(rt_format, decoder_.GetScopedVASurface()->format());
const uint32_t actual_fourcc = scoped_image->image()->format.fourcc;
// TODO(andrescj): CompareImages() only supports I420, NV12, YUY2, and YUYV.
@@ -515,41 +494,79 @@ TEST_F(VaapiJpegDecoderTest, DecodeSucceedsForSupportedSizes) {
}
}
+class VaapiJpegDecoderWithDmaBufsTest : public VaapiJpegDecoderTest {
+ public:
+ VaapiJpegDecoderWithDmaBufsTest() = default;
+ ~VaapiJpegDecoderWithDmaBufsTest() override = default;
+};
+
// TODO(andrescj): test other JPEG formats besides YUV 4:2:0.
-TEST_F(VaapiJpegDecoderTest, DecodeAndExportAsNativePixmapDmaBuf) {
+TEST_P(VaapiJpegDecoderWithDmaBufsTest, DecodeSucceeds) {
if (base::StartsWith(VaapiWrapper::GetVendorStringForTesting(),
"Mesa Gallium driver", base::CompareCase::SENSITIVE)) {
// TODO(crbug.com/974438): until we support surfaces with multiple buffer
// objects, the AMD driver fails this test.
GTEST_SKIP();
}
- if (base::StartsWith(VaapiWrapper::GetVendorStringForTesting(),
- "Intel i965 driver", base::CompareCase::SENSITIVE)) {
- // TODO(b/135705575): until the correct offsets are exported, the Intel i965
- // driver fails this test.
- GTEST_SKIP();
- }
- base::FilePath input_file = FindTestDataFilePath(kYuv420Filename);
+ base::FilePath input_file = FindTestDataFilePath(GetParam().filename);
std::string jpeg_data;
ASSERT_TRUE(base::ReadFileToString(input_file, &jpeg_data))
<< "failed to read input data from " << input_file.value();
const auto encoded_image = base::make_span<const uint8_t>(
reinterpret_cast<const uint8_t*>(jpeg_data.data()), jpeg_data.size());
- VaapiImageDecodeStatus status;
- scoped_refptr<gfx::NativePixmapDmaBuf> pixmap =
- DecodeToNativePixmapDmaBuf(encoded_image, &status);
- ASSERT_EQ(VaapiImageDecodeStatus::kSuccess, status);
+
+ // Decode into a VAAPI-allocated surface.
+ const VaapiImageDecodeStatus decode_status = decoder_.Decode(encoded_image);
+ EXPECT_EQ(VaapiImageDecodeStatus::kSuccess, decode_status);
+ ASSERT_TRUE(decoder_.GetScopedVASurface());
+ const gfx::Size va_surface_visible_size =
+ decoder_.GetScopedVASurface()->size();
+
+ // The size stored in the ScopedVASurface should be the visible size of the
+ // JPEG.
+ JpegParseResult parse_result;
+ ASSERT_TRUE(ParseJpegPicture(encoded_image.data(), encoded_image.size(),
+ &parse_result));
+ EXPECT_EQ(gfx::Size(parse_result.frame_header.visible_width,
+ parse_result.frame_header.visible_height),
+ va_surface_visible_size);
+
+ // Export the surface.
+ VaapiImageDecodeStatus export_status = VaapiImageDecodeStatus::kInvalidState;
+ std::unique_ptr<NativePixmapAndSizeInfo> exported_pixmap =
+ decoder_.ExportAsNativePixmapDmaBuf(&export_status);
+ EXPECT_EQ(VaapiImageDecodeStatus::kSuccess, export_status);
+ ASSERT_TRUE(exported_pixmap);
+ ASSERT_TRUE(exported_pixmap->pixmap);
EXPECT_FALSE(decoder_.GetScopedVASurface());
- ASSERT_TRUE(pixmap);
+
+ // For JPEG decoding, the size of the surface we request is the coded size of
+ // the JPEG. Make sure the surface contains that coded area.
+ EXPECT_TRUE(gfx::Rect(exported_pixmap->va_surface_resolution)
+ .Contains(gfx::Rect(parse_result.frame_header.coded_width,
+ parse_result.frame_header.coded_height)));
+
+ // Make sure the visible area is contained by the surface.
+ EXPECT_EQ(va_surface_visible_size, exported_pixmap->pixmap->GetBufferSize());
+ EXPECT_FALSE(exported_pixmap->va_surface_resolution.IsEmpty());
+ EXPECT_FALSE(exported_pixmap->pixmap->GetBufferSize().IsEmpty());
+ ASSERT_TRUE(
+ gfx::Rect(exported_pixmap->va_surface_resolution)
+ .Contains(gfx::Rect(exported_pixmap->pixmap->GetBufferSize())));
+
+ // TODO(andrescj): we could get a better lower bound based on the dimensions
+ // and the format.
+ ASSERT_GT(exported_pixmap->byte_size, 0u);
// After exporting the surface, we should not be able to obtain a VAImage with
// the decoded data.
VAImageFormat i420_format{};
i420_format.fourcc = VA_FOURCC_I420;
EXPECT_TRUE(VaapiWrapper::IsImageFormatSupported(i420_format));
- EXPECT_FALSE(decoder_.GetImage(i420_format.fourcc, &status));
- EXPECT_EQ(VaapiImageDecodeStatus::kInvalidState, status);
+ VaapiImageDecodeStatus image_status = VaapiImageDecodeStatus::kSuccess;
+ EXPECT_FALSE(decoder_.GetImage(i420_format.fourcc, &image_status));
+ EXPECT_EQ(VaapiImageDecodeStatus::kInvalidState, image_status);
// Workaround: in order to import and map the pixmap using minigbm when the
// format is gfx::BufferFormat::YVU_420, we need to reorder the planes so that
@@ -558,14 +575,18 @@ TEST_F(VaapiJpegDecoderTest, DecodeAndExportAsNativePixmapDmaBuf) {
// think that it is mapping a YVU_420, but it's actually mapping a YUV_420.
//
// TODO(andrescj): revisit this once crrev.com/c/1573718 lands.
- gfx::NativePixmapHandle handle = pixmap->ExportHandle();
- if (pixmap->GetBufferFormat() == gfx::BufferFormat::YVU_420)
+ gfx::NativePixmapHandle handle = exported_pixmap->pixmap->ExportHandle();
+ ASSERT_EQ(gfx::NumberOfPlanesForLinearBufferFormat(
+ exported_pixmap->pixmap->GetBufferFormat()),
+ handle.planes.size());
+ if (exported_pixmap->pixmap->GetBufferFormat() == gfx::BufferFormat::YVU_420)
std::swap(handle.planes[1], handle.planes[2]);
LocalGpuMemoryBufferManager gpu_memory_buffer_manager;
std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer =
- gpu_memory_buffer_manager.ImportDmaBuf(handle, pixmap->GetBufferSize(),
- pixmap->GetBufferFormat());
+ gpu_memory_buffer_manager.ImportDmaBuf(
+ handle, exported_pixmap->pixmap->GetBufferSize(),
+ exported_pixmap->pixmap->GetBufferFormat());
ASSERT_TRUE(gpu_memory_buffer);
ASSERT_TRUE(gpu_memory_buffer->Map());
vaapi_test_utils::DecodedImage decoded_image{};
@@ -723,7 +744,12 @@ TEST_F(VaapiJpegDecoderTest, DecodeFails) {
INSTANTIATE_TEST_SUITE_P(,
VaapiJpegDecoderTest,
- testing::ValuesIn(kTestCases),
+ testing::ValuesIn(kVAImageTestCases),
+ vaapi_test_utils::TestParamToString);
+
+INSTANTIATE_TEST_SUITE_P(,
+ VaapiJpegDecoderWithDmaBufsTest,
+ testing::ValuesIn(kDmaBufTestCases),
vaapi_test_utils::TestParamToString);
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
index 751f9e3de87..434e9237bb1 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
@@ -13,6 +13,7 @@
#include "base/bind_helpers.h"
#include "base/logging.h"
#include "base/memory/writable_shared_memory_region.h"
+#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "base/numerics/safe_conversions.h"
#include "base/sequence_checker.h"
@@ -36,14 +37,17 @@ namespace {
// These values are persisted to logs, and should therefore never be renumbered
// nor reused.
enum VAJEAEncoderResult {
- VAAPI_SUCCESS = 0,
- VAAPI_ERROR,
- VAJEA_ENCODER_RESULT_MAX = VAAPI_ERROR,
+ kSuccess = 0,
+ kError,
+ kMaxValue = kError,
};
-static void ReportToUMA(VAJEAEncoderResult result) {
- UMA_HISTOGRAM_ENUMERATION("Media.VAJEA.EncoderResult", result,
- VAJEAEncoderResult::VAJEA_ENCODER_RESULT_MAX + 1);
+static void ReportToVAJEAEncodeResultUMA(VAJEAEncoderResult result) {
+ UMA_HISTOGRAM_ENUMERATION("Media.VAJEA.EncoderResult", result);
+}
+
+static void ReportToVAJEAVppFailureUMA(VAJEAEncoderResult result) {
+ base::UmaHistogramEnumeration("Media.VAJEA.VppFailure", result);
}
} // namespace
@@ -64,6 +68,7 @@ VaapiJpegEncodeAccelerator::EncodeRequest::~EncodeRequest() {}
class VaapiJpegEncodeAccelerator::Encoder {
public:
Encoder(scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper,
base::RepeatingCallback<void(int32_t, size_t)> video_frame_ready_cb,
base::RepeatingCallback<void(int32_t, Status)> notify_error_cb);
~Encoder();
@@ -89,6 +94,7 @@ class VaapiJpegEncodeAccelerator::Encoder {
std::unique_ptr<VaapiJpegEncoder> jpeg_encoder_;
scoped_refptr<VaapiWrapper> vaapi_wrapper_;
+ scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper_;
std::unique_ptr<gpu::GpuMemoryBufferSupport> gpu_memory_buffer_support_;
base::RepeatingCallback<void(int32_t, size_t)> video_frame_ready_cb_;
@@ -109,11 +115,13 @@ class VaapiJpegEncodeAccelerator::Encoder {
VaapiJpegEncodeAccelerator::Encoder::Encoder(
scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper,
base::RepeatingCallback<void(int32_t, size_t)> video_frame_ready_cb,
base::RepeatingCallback<void(int32_t, Status)> notify_error_cb)
: cached_output_buffer_size_(0),
jpeg_encoder_(new VaapiJpegEncoder(vaapi_wrapper)),
vaapi_wrapper_(std::move(vaapi_wrapper)),
+ vpp_vaapi_wrapper_(std::move(vpp_vaapi_wrapper)),
gpu_memory_buffer_support_(new gpu::GpuMemoryBufferSupport()),
video_frame_ready_cb_(std::move(video_frame_ready_cb)),
notify_error_cb_(std::move(notify_error_cb)),
@@ -138,45 +146,60 @@ void VaapiJpegEncodeAccelerator::Encoder::EncodeWithDmaBufTask(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
gfx::Size input_size = input_frame->coded_size();
-
- // Construct GBM Handle from VideoFrame.
- gfx::GpuMemoryBufferHandle input_gmb_handle =
- CreateGpuMemoryBufferHandle(input_frame.get());
- DCHECK(!input_gmb_handle.is_null());
-
- // Create pixmap for input handle and create VA surface.
- auto num_planes_input = VideoFrame::NumPlanes(input_frame->format());
-
- // We only support NV12 format currently. Modify the check and |buffer_format|
- // once we support other formats.
- DCHECK(num_planes_input == 2);
gfx::BufferFormat buffer_format = gfx::BufferFormat::YUV_420_BIPLANAR;
uint32_t va_format = VaapiWrapper::BufferFormatToVARTFormat(buffer_format);
bool context_changed = input_size != input_size_ || va_format != va_format_;
if (context_changed) {
- vaapi_wrapper_->DestroyContext();
+ vaapi_wrapper_->DestroyContextAndSurfaces(
+ std::vector<VASurfaceID>({va_surface_id_}));
+ va_surface_id_ = VA_INVALID_SURFACE;
va_format_ = 0;
input_size_ = gfx::Size();
- const bool success = vaapi_wrapper_->CreateContext(input_size);
- if (!success) {
- VLOGF(1) << "Failed to create context";
- vaapi_wrapper_->DestroyContext();
+
+ std::vector<VASurfaceID> va_surfaces;
+ if (!vaapi_wrapper_->CreateContextAndSurfaces(va_format, input_size, 1,
+ &va_surfaces)) {
+ VLOGF(1) << "Failed to create VA surface";
notify_error_cb_.Run(task_id, PLATFORM_FAILURE);
return;
}
+ va_surface_id_ = va_surfaces[0];
va_format_ = va_format;
input_size_ = input_size;
}
- auto va_surface = vaapi_wrapper_->CreateVASurfaceForPixmap(
- base::WrapRefCounted(new gfx::NativePixmapDmaBuf(
- input_size, buffer_format,
- std::move(input_gmb_handle.native_pixmap_handle))));
- if (!va_surface) {
+ // We need to explicitly blit the bound input surface here to make sure the
+ // input we sent to VAAPI encoder is in tiled NV12 format since implicit
+ // tiling logic is not contained in every driver.
+ auto input_pixmap = CreateNativePixmapDmaBuf(input_frame.get());
+ if (!input_pixmap) {
+ VLOGF(1) << "Cannot create native pixmap for input frame";
+ notify_error_cb_.Run(task_id, PLATFORM_FAILURE);
+ return;
+ }
+ auto input_surface =
+ vpp_vaapi_wrapper_->CreateVASurfaceForPixmap(input_pixmap);
+ if (!input_surface) {
VLOGF(1) << "Failed to create input va surface";
notify_error_cb_.Run(task_id, PLATFORM_FAILURE);
return;
}
+ auto blit_surface =
+ base::MakeRefCounted<VASurface>(va_surface_id_, input_size, va_format,
+ base::DoNothing() /* release_cb */);
+ if (!vpp_vaapi_wrapper_->BlitSurface(input_surface, blit_surface)) {
+ VLOGF(1) << "Failed to blit surfaces";
+ notify_error_cb_.Run(task_id, PLATFORM_FAILURE);
+ return;
+ }
+ // We should call vaSyncSurface() when passing surface between contexts. See:
+ // https://lists.01.org/pipermail/intel-vaapi-media/2019-June/000131.html
+ // Sync |blit_surface| since it it passing to the JPEG encoding context.
+ if (!vpp_vaapi_wrapper_->SyncSurface(blit_surface->id())) {
+ VLOGF(1) << "Cannot sync VPP output surface";
+ notify_error_cb_.Run(task_id, PLATFORM_FAILURE);
+ return;
+ }
// Create output buffer for encoding result.
size_t max_coded_buffer_size =
@@ -214,7 +237,7 @@ void VaapiJpegEncodeAccelerator::Encoder::EncodeWithDmaBufTask(
size_t exif_offset = 0;
if (!jpeg_encoder_->Encode(input_size, exif_buffer_dummy.data(),
- exif_buffer_size, quality, va_surface->id(),
+ exif_buffer_size, quality, blit_surface->id(),
cached_output_buffer_id_, &exif_offset)) {
VLOGF(1) << "Encode JPEG failed";
notify_error_cb_.Run(task_id, PLATFORM_FAILURE);
@@ -254,7 +277,7 @@ void VaapiJpegEncodeAccelerator::Encoder::EncodeWithDmaBufTask(
// use its area as the maximum bytes we need to download to avoid buffer
// overflow.
if (!vaapi_wrapper_->DownloadFromVABuffer(
- cached_output_buffer_id_, va_surface->id(),
+ cached_output_buffer_id_, blit_surface->id(),
static_cast<uint8_t*>(output_memory),
output_gmb_buffer->GetSize().GetArea(), &encoded_size)) {
VLOGF(1) << "Failed to retrieve output image from VA coded buffer";
@@ -397,7 +420,7 @@ void VaapiJpegEncodeAccelerator::VideoFrameReady(int32_t task_id,
size_t encoded_picture_size) {
DVLOGF(4) << "task_id=" << task_id << ", size=" << encoded_picture_size;
DCHECK(task_runner_->BelongsToCurrentThread());
- ReportToUMA(VAJEAEncoderResult::VAAPI_SUCCESS);
+ ReportToVAJEAEncodeResultUMA(VAJEAEncoderResult::kSuccess);
client_->VideoFrameReady(task_id, encoded_picture_size);
}
@@ -414,24 +437,35 @@ VaapiJpegEncodeAccelerator::Initialize(
}
client_ = client;
- scoped_refptr<VaapiWrapper> vaapi_wrapper = VaapiWrapper::Create(
- VaapiWrapper::kEncode, VAProfileJPEGBaseline,
- base::Bind(&ReportToUMA, VAJEAEncoderResult::VAAPI_ERROR));
+ scoped_refptr<VaapiWrapper> vaapi_wrapper =
+ VaapiWrapper::Create(VaapiWrapper::kEncode, VAProfileJPEGBaseline,
+ base::BindRepeating(&ReportToVAJEAEncodeResultUMA,
+ VAJEAEncoderResult::kError));
if (!vaapi_wrapper) {
VLOGF(1) << "Failed initializing VAAPI";
return PLATFORM_FAILURE;
}
- encoder_task_runner_ = base::CreateSingleThreadTaskRunnerWithTraits(
- {base::MayBlock(), base::TaskPriority::USER_BLOCKING});
+ scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper =
+ VaapiWrapper::Create(VaapiWrapper::kVideoProcess, VAProfileNone,
+ base::BindRepeating(&ReportToVAJEAVppFailureUMA,
+ VAJEAEncoderResult::kError));
+ if (!vpp_vaapi_wrapper) {
+ VLOGF(1) << "Failed initializing VAAPI wrapper for VPP";
+ return PLATFORM_FAILURE;
+ }
+
+ encoder_task_runner_ =
+ base::CreateSingleThreadTaskRunner({base::ThreadPool(), base::MayBlock(),
+ base::TaskPriority::USER_BLOCKING});
if (!encoder_task_runner_) {
VLOGF(1) << "Failed to create encoder task runner.";
return THREAD_CREATION_FAILED;
}
encoder_ = std::make_unique<Encoder>(
- std::move(vaapi_wrapper),
+ std::move(vaapi_wrapper), std::move(vpp_vaapi_wrapper),
BindToCurrentLoop(base::BindRepeating(
&VaapiJpegEncodeAccelerator::VideoFrameReady, weak_this_)),
BindToCurrentLoop(base::BindRepeating(
diff --git a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
index 2f6c679e2c3..bd6381c813a 100644
--- a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
@@ -5,44 +5,67 @@
#include "media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h"
#include <stddef.h>
+#include <sys/mman.h>
#include <va/va.h>
+#include <array>
#include <utility>
#include "base/bind.h"
-#include "base/containers/span.h"
+#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
+#include "base/files/scoped_file.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
+#include "base/numerics/checked_math.h"
#include "base/numerics/safe_conversions.h"
+#include "base/optional.h"
+#include "base/process/process_metrics.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/trace_event/trace_event.h"
+#include "gpu/ipc/common/gpu_memory_buffer_impl.h"
+#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/base/bitstream_buffer.h"
#include "media/base/unaligned_shared_memory.h"
#include "media/base/video_frame.h"
+#include "media/base/video_frame_layout.h"
#include "media/base/video_types.h"
+#include "media/gpu/format_utils.h"
+#include "media/gpu/linux/platform_video_frame_utils.h"
#include "media/gpu/macros.h"
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vaapi/vaapi_image_decoder.h"
#include "media/gpu/vaapi/vaapi_utils.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "third_party/libyuv/include/libyuv.h"
+#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/gpu_memory_buffer.h"
+#include "ui/gfx/linux/native_pixmap_dmabuf.h"
+#include "ui/gfx/native_pixmap.h"
namespace media {
namespace {
+constexpr uint32_t kInvalidVaFourcc = 0u;
+
// UMA errors that the VaapiMjpegDecodeAccelerator class reports.
-enum VAJDADecoderFailure {
+enum VAJDAFailure {
VAAPI_ERROR = 0,
- VAJDA_DECODER_FAILURES_MAX,
+ VAJDA_FAILURES_MAX,
};
-static void ReportToVAJDADecoderFailureUMA(VAJDADecoderFailure failure) {
+static void ReportToVAJDADecoderFailureUMA(VAJDAFailure failure) {
UMA_HISTOGRAM_ENUMERATION("Media.VAJDA.DecoderFailure", failure,
- VAJDA_DECODER_FAILURES_MAX + 1);
+ VAJDA_FAILURES_MAX + 1);
+}
+
+static void ReportToVAJDAVppFailureUMA(VAJDAFailure failure) {
+ UMA_HISTOGRAM_ENUMERATION("Media.VAJDA.VppFailure", failure,
+ VAJDA_FAILURES_MAX + 1);
}
static void ReportToVAJDAResponseToClientUMA(
@@ -72,6 +95,8 @@ static bool VerifyDataSize(const VAImage* image) {
size_t min_size = 0;
if (image->format.fourcc == VA_FOURCC_I420) {
min_size = VideoFrame::AllocationSize(PIXEL_FORMAT_I420, dimensions);
+ } else if (image->format.fourcc == VA_FOURCC_NV12) {
+ min_size = VideoFrame::AllocationSize(PIXEL_FORMAT_NV12, dimensions);
} else if (image->format.fourcc == VA_FOURCC_YUY2 ||
image->format.fourcc == VA_FOURCC('Y', 'U', 'Y', 'V')) {
min_size = VideoFrame::AllocationSize(PIXEL_FORMAT_YUY2, dimensions);
@@ -81,15 +106,41 @@ static bool VerifyDataSize(const VAImage* image) {
return base::strict_cast<size_t>(image->data_size) >= min_size;
}
+static uint32_t VideoPixelFormatToVAFourCC(VideoPixelFormat format) {
+ switch (format) {
+ case PIXEL_FORMAT_I420:
+ return VA_FOURCC_I420;
+ case PIXEL_FORMAT_YV12:
+ return VA_FOURCC_YV12;
+ case PIXEL_FORMAT_NV12:
+ return VA_FOURCC_NV12;
+ case PIXEL_FORMAT_NV21:
+ return VA_FOURCC_NV21;
+ case PIXEL_FORMAT_UYVY:
+ return VA_FOURCC_UYVY;
+ case PIXEL_FORMAT_YUY2:
+ return VA_FOURCC_YUY2;
+ case PIXEL_FORMAT_ARGB:
+ return VA_FOURCC_ARGB;
+ case PIXEL_FORMAT_XRGB:
+ return VA_FOURCC_XRGB;
+ case PIXEL_FORMAT_ABGR:
+ return VA_FOURCC_ABGR;
+ case PIXEL_FORMAT_XBGR:
+ return VA_FOURCC_XBGR;
+ default:
+ return kInvalidVaFourcc;
+ }
+}
+
} // namespace
-void VaapiMjpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id,
- Error error) {
+void VaapiMjpegDecodeAccelerator::NotifyError(int32_t task_id, Error error) {
if (!task_runner_->BelongsToCurrentThread()) {
task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&VaapiMjpegDecodeAccelerator::NotifyError,
- weak_this_factory_.GetWeakPtr(),
- bitstream_buffer_id, error));
+ FROM_HERE,
+ base::BindOnce(&VaapiMjpegDecodeAccelerator::NotifyError,
+ weak_this_factory_.GetWeakPtr(), task_id, error));
return;
}
VLOGF(1) << "Notifying of error " << error;
@@ -98,14 +149,14 @@ void VaapiMjpegDecodeAccelerator::NotifyError(int32_t bitstream_buffer_id,
DCHECK_NE(chromeos_camera::MjpegDecodeAccelerator::Error::NO_ERRORS, error);
ReportToVAJDAResponseToClientUMA(error);
DCHECK(client_);
- client_->NotifyError(bitstream_buffer_id, error);
+ client_->NotifyError(task_id, error);
}
-void VaapiMjpegDecodeAccelerator::VideoFrameReady(int32_t bitstream_buffer_id) {
+void VaapiMjpegDecodeAccelerator::VideoFrameReady(int32_t task_id) {
DCHECK(task_runner_->BelongsToCurrentThread());
ReportToVAJDAResponseToClientUMA(
chromeos_camera::MjpegDecodeAccelerator::Error::NO_ERRORS);
- client_->VideoFrameReady(bitstream_buffer_id);
+ client_->VideoFrameReady(task_id);
}
VaapiMjpegDecodeAccelerator::VaapiMjpegDecodeAccelerator(
@@ -136,6 +187,16 @@ bool VaapiMjpegDecodeAccelerator::Initialize(
return false;
}
+ vpp_vaapi_wrapper_ = VaapiWrapper::Create(
+ VaapiWrapper::kVideoProcess, VAProfileNone,
+ base::BindRepeating(&ReportToVAJDAVppFailureUMA, VAAPI_ERROR));
+ if (!vpp_vaapi_wrapper_) {
+ VLOGF(1) << "Failed initializing VAAPI for VPP";
+ return false;
+ }
+
+ gpu_memory_buffer_support_ = std::make_unique<gpu::GpuMemoryBufferSupport>();
+
if (!decoder_thread_.Start()) {
VLOGF(1) << "Failed to start decoding thread.";
return false;
@@ -145,68 +206,159 @@ bool VaapiMjpegDecodeAccelerator::Initialize(
return true;
}
-bool VaapiMjpegDecodeAccelerator::OutputPictureOnTaskRunner(
+bool VaapiMjpegDecodeAccelerator::OutputPictureLibYuvOnTaskRunner(
std::unique_ptr<ScopedVAImage> scoped_image,
int32_t input_buffer_id,
scoped_refptr<VideoFrame> video_frame) {
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
- TRACE_EVENT1("jpeg", "VaapiMjpegDecodeAccelerator::OutputPictureOnTaskRunner",
- "input_buffer_id", input_buffer_id);
+ TRACE_EVENT1("jpeg", __func__, "input_buffer_id", input_buffer_id);
- // Copy image content from VAImage to VideoFrame. If the image is not in the
- // I420 format we'll have to convert it.
DCHECK(scoped_image);
- auto* mem = static_cast<uint8_t*>(scoped_image->va_buffer()->data());
const VAImage* image = scoped_image->image();
- DCHECK_GE(base::strict_cast<int>(image->width),
- video_frame->coded_size().width());
- DCHECK_GE(base::strict_cast<int>(image->height),
- video_frame->coded_size().height());
+
+ // For camera captures, we assume that the visible size is the same as the
+ // coded size.
+ DCHECK_EQ(video_frame->visible_rect().size(), video_frame->coded_size());
+ DCHECK_EQ(0, video_frame->visible_rect().x());
+ DCHECK_EQ(0, video_frame->visible_rect().y());
+ DCHECK(decoder_.GetScopedVASurface());
+ const gfx::Size visible_size(base::strict_cast<int>(image->width),
+ base::strict_cast<int>(image->height));
+ if (visible_size != video_frame->visible_rect().size()) {
+ VLOGF(1) << "The decoded visible size is not the same as the video frame's";
+ return false;
+ }
+
+ // The decoded image size is aligned up to JPEG MCU size, so it may be larger
+ // than |video_frame|'s visible size.
+ if (base::strict_cast<int>(image->width) < visible_size.width() ||
+ base::strict_cast<int>(image->height) < visible_size.height()) {
+ VLOGF(1) << "Decoded image size is smaller than output frame size";
+ return false;
+ }
DCHECK(VerifyDataSize(image));
- uint8_t* dst_y = video_frame->data(VideoFrame::kYPlane);
- uint8_t* dst_u = video_frame->data(VideoFrame::kUPlane);
- uint8_t* dst_v = video_frame->data(VideoFrame::kVPlane);
- size_t dst_y_stride = video_frame->stride(VideoFrame::kYPlane);
- size_t dst_u_stride = video_frame->stride(VideoFrame::kUPlane);
- size_t dst_v_stride = video_frame->stride(VideoFrame::kVPlane);
+
+ // Extract source pointers and strides.
+ auto* const mem =
+ static_cast<const uint8_t*>(scoped_image->va_buffer()->data());
+ std::array<const uint8_t*, VideoFrame::kMaxPlanes> src_ptrs{};
+ std::array<int, VideoFrame::kMaxPlanes> src_strides{};
+ for (uint32_t i = 0; i < image->num_planes; i++) {
+ src_ptrs[i] = mem + image->offsets[i];
+ if (!base::CheckedNumeric<uint32_t>(image->pitches[i])
+ .AssignIfValid(&src_strides[i])) {
+ VLOGF(1) << "Can't extract the strides";
+ return false;
+ }
+ }
+
+ // Extract destination pointers and strides.
+ std::array<uint8_t*, VideoFrame::kMaxPlanes> dst_ptrs{};
+ std::array<int, VideoFrame::kMaxPlanes> dst_strides{};
+ base::ScopedClosureRunner buffer_unmapper;
+ if (video_frame->HasDmaBufs()) {
+ // Dmabuf-backed frame needs to be mapped for SW access.
+ DCHECK(gpu_memory_buffer_support_);
+ base::Optional<gfx::BufferFormat> gfx_format =
+ VideoPixelFormatToGfxBufferFormat(video_frame->format());
+ if (!gfx_format) {
+ VLOGF(1) << "Unsupported format: " << video_frame->format();
+ return false;
+ }
+ auto gmb_handle = CreateGpuMemoryBufferHandle(video_frame.get());
+ DCHECK(!gmb_handle.is_null());
+ std::unique_ptr<gpu::GpuMemoryBufferImpl> gmb =
+ gpu_memory_buffer_support_->CreateGpuMemoryBufferImplFromHandle(
+ std::move(gmb_handle), video_frame->coded_size(), *gfx_format,
+ gfx::BufferUsage::SCANOUT_CPU_READ_WRITE, base::DoNothing());
+ if (!gmb) {
+ VLOGF(1) << "Failed to create GPU memory buffer";
+ return false;
+ }
+ if (!gmb->Map()) {
+ VLOGF(1) << "Failed to map GPU memory buffer";
+ return false;
+ }
+ for (size_t i = 0; i < video_frame->layout().num_planes(); i++) {
+ dst_ptrs[i] = static_cast<uint8_t*>(gmb->memory(i));
+ dst_strides[i] = gmb->stride(i);
+ }
+ buffer_unmapper.ReplaceClosure(
+ base::BindOnce(&gpu::GpuMemoryBufferImpl::Unmap, std::move(gmb)));
+ } else {
+ DCHECK(video_frame->IsMappable());
+ for (size_t i = 0; i < video_frame->layout().num_planes(); i++) {
+ dst_ptrs[i] = video_frame->visible_data(i);
+ dst_strides[i] = video_frame->stride(i);
+ }
+ }
switch (image->format.fourcc) {
- case VA_FOURCC_I420: {
+ case VA_FOURCC_I420:
DCHECK_EQ(image->num_planes, 3u);
- const uint8_t* src_y = mem + image->offsets[0];
- const uint8_t* src_u = mem + image->offsets[1];
- const uint8_t* src_v = mem + image->offsets[2];
- const size_t src_y_stride = image->pitches[0];
- const size_t src_u_stride = image->pitches[1];
- const size_t src_v_stride = image->pitches[2];
- if (libyuv::I420Copy(src_y, src_y_stride, src_u, src_u_stride, src_v,
- src_v_stride, dst_y, dst_y_stride, dst_u,
- dst_u_stride, dst_v, dst_v_stride,
- video_frame->coded_size().width(),
- video_frame->coded_size().height())) {
- VLOGF(1) << "I420Copy failed";
- return false;
+ switch (video_frame->format()) {
+ case PIXEL_FORMAT_I420:
+ DCHECK_EQ(video_frame->layout().num_planes(), 3u);
+ if (libyuv::I420Copy(src_ptrs[0], src_strides[0], src_ptrs[1],
+ src_strides[1], src_ptrs[2], src_strides[2],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[1],
+ dst_strides[1], dst_ptrs[2], dst_strides[2],
+ visible_size.width(), visible_size.height())) {
+ VLOGF(1) << "I420Copy failed";
+ return false;
+ }
+ break;
+ case PIXEL_FORMAT_NV12:
+ DCHECK_EQ(video_frame->layout().num_planes(), 2u);
+ if (libyuv::I420ToNV12(src_ptrs[0], src_strides[0], src_ptrs[1],
+ src_strides[1], src_ptrs[2], src_strides[2],
+ dst_ptrs[0], dst_strides[0], dst_ptrs[1],
+ dst_strides[1], visible_size.width(),
+ visible_size.height())) {
+ VLOGF(1) << "I420ToNV12 failed";
+ return false;
+ }
+ break;
+ default:
+ VLOGF(1) << "Can't convert image from I420 to "
+ << video_frame->format();
+ return false;
}
break;
- }
case VA_FOURCC_YUY2:
- case VA_FOURCC('Y', 'U', 'Y', 'V'): {
+ case VA_FOURCC('Y', 'U', 'Y', 'V'):
DCHECK_EQ(image->num_planes, 1u);
- const uint8_t* src_yuy2 = mem + image->offsets[0];
- const size_t src_yuy2_stride = image->pitches[0];
- if (libyuv::YUY2ToI420(src_yuy2, src_yuy2_stride, dst_y, dst_y_stride,
- dst_u, dst_u_stride, dst_v, dst_v_stride,
- video_frame->coded_size().width(),
- video_frame->coded_size().height())) {
- VLOGF(1) << "YUY2ToI420 failed";
- return false;
+ switch (video_frame->format()) {
+ case PIXEL_FORMAT_I420:
+ DCHECK_EQ(video_frame->layout().num_planes(), 3u);
+ if (libyuv::YUY2ToI420(src_ptrs[0], src_strides[0], dst_ptrs[0],
+ dst_strides[0], dst_ptrs[1], dst_strides[1],
+ dst_ptrs[2], dst_strides[2],
+ visible_size.width(), visible_size.height())) {
+ VLOGF(1) << "YUY2ToI420 failed";
+ return false;
+ }
+ break;
+ case PIXEL_FORMAT_NV12:
+ DCHECK_EQ(video_frame->layout().num_planes(), 2u);
+ if (libyuv::YUY2ToNV12(src_ptrs[0], src_strides[0], dst_ptrs[0],
+ dst_strides[0], dst_ptrs[1], dst_strides[1],
+ visible_size.width(), visible_size.height())) {
+ VLOGF(1) << "YUY2ToNV12 failed";
+ return false;
+ }
+ break;
+ default:
+ VLOGF(1) << "Can't convert image from YUYV to "
+ << video_frame->format();
+ return false;
}
break;
- }
default:
- VLOGF(1) << "Can't convert image to I420: unsupported format "
- << FourccToString(image->format.fourcc);
+ VLOGF(1) << "Can't convert image from "
+ << FourccToString(image->format.fourcc) << " to "
+ << video_frame->format();
return false;
}
@@ -218,31 +370,167 @@ bool VaapiMjpegDecodeAccelerator::OutputPictureOnTaskRunner(
return true;
}
-void VaapiMjpegDecodeAccelerator::DecodeTask(
- int32_t bitstream_buffer_id,
- std::unique_ptr<UnalignedSharedMemory> shm,
+bool VaapiMjpegDecodeAccelerator::OutputPictureVppOnTaskRunner(
+ const ScopedVASurface* surface,
+ int32_t input_buffer_id,
scoped_refptr<VideoFrame> video_frame) {
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread());
+ DCHECK(surface);
+
+ TRACE_EVENT1("jpeg", __func__, "input_buffer_id", input_buffer_id);
+
+ // Bind a VA surface to |video_frame|.
+ scoped_refptr<gfx::NativePixmap> pixmap =
+ CreateNativePixmapDmaBuf(video_frame.get());
+ if (!pixmap) {
+ VLOGF(1) << "Cannot create native pixmap for output buffer";
+ return false;
+ }
+ scoped_refptr<VASurface> output_surface =
+ vpp_vaapi_wrapper_->CreateVASurfaceForPixmap(pixmap);
+ if (!output_surface) {
+ VLOGF(1) << "Cannot create VA surface for output buffer";
+ return false;
+ }
+
+ // Use VPP to blit the visible size region within |surface| into
+ // |output_surface|. BlitSurface() does scaling not cropping when source and
+ // destination sizes don't match, so we manipulate the sizes of surfaces to
+ // effectively do the cropping.
+ const gfx::Size& blit_size = video_frame->visible_rect().size();
+ if (surface->size().width() < blit_size.width() ||
+ surface->size().height() < blit_size.height()) {
+ VLOGF(1) << "Decoded surface size is smaller than target size";
+ return false;
+ }
+ scoped_refptr<VASurface> src_surface = base::MakeRefCounted<VASurface>(
+ surface->id(), blit_size, surface->format(),
+ base::DoNothing() /* release_cb */);
+ scoped_refptr<VASurface> dst_surface = base::MakeRefCounted<VASurface>(
+ output_surface->id(), blit_size, output_surface->format(),
+ base::DoNothing() /* release_cb */);
+
+ // We should call vaSyncSurface() when passing surface between contexts. See:
+ // https://lists.01.org/pipermail/intel-vaapi-media/2019-June/000131.html
+ if (!vpp_vaapi_wrapper_->SyncSurface(src_surface->id())) {
+ VLOGF(1) << "Cannot sync VPP input surface";
+ return false;
+ }
+ if (!vpp_vaapi_wrapper_->BlitSurface(src_surface, dst_surface)) {
+ VLOGF(1) << "Cannot convert decoded image into output buffer";
+ return false;
+ }
+
+ // Sync target surface since the buffer is returning to client.
+ if (!vpp_vaapi_wrapper_->SyncSurface(dst_surface->id())) {
+ VLOGF(1) << "Cannot sync VPP output surface";
+ return false;
+ }
+
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VaapiMjpegDecodeAccelerator::VideoFrameReady,
+ weak_this_factory_.GetWeakPtr(), input_buffer_id));
+
+ return true;
+}
+
+void VaapiMjpegDecodeAccelerator::DecodeFromShmTask(
+ int32_t task_id,
+ std::unique_ptr<UnalignedSharedMemory> shm,
+ scoped_refptr<VideoFrame> dst_frame) {
DVLOGF(4);
DCHECK(decoder_task_runner_->BelongsToCurrentThread());
- TRACE_EVENT0("jpeg", "DecodeTask");
+ TRACE_EVENT0("jpeg", __func__);
+
+ auto src_image =
+ base::make_span(static_cast<const uint8_t*>(shm->memory()), shm->size());
+ DecodeImpl(task_id, src_image, std::move(dst_frame));
+}
- VaapiImageDecodeStatus status = decoder_.Decode(
- base::make_span(static_cast<const uint8_t*>(shm->memory()), shm->size()));
+void VaapiMjpegDecodeAccelerator::DecodeFromDmaBufTask(
+ int32_t task_id,
+ base::ScopedFD src_dmabuf_fd,
+ size_t src_size,
+ off_t src_offset,
+ scoped_refptr<VideoFrame> dst_frame) {
+ DVLOGF(4);
+ DCHECK(decoder_task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT0("jpeg", __func__);
+
+ // The DMA-buf FD should be mapped as read-only since it may only have read
+ // permission, e.g. when it comes from camera driver.
+ DCHECK(src_dmabuf_fd.is_valid());
+ DCHECK_GT(src_size, 0u);
+ void* src_addr = mmap(nullptr, src_size, PROT_READ, MAP_SHARED,
+ src_dmabuf_fd.get(), src_offset);
+ if (src_addr == MAP_FAILED) {
+ VPLOGF(1) << "Failed to map input DMA buffer";
+ NotifyError(task_id, UNREADABLE_INPUT);
+ return;
+ }
+ base::span<const uint8_t> src_image =
+ base::make_span(static_cast<const uint8_t*>(src_addr), src_size);
+
+ DecodeImpl(task_id, src_image, std::move(dst_frame));
+
+ const int ret = munmap(src_addr, src_size);
+ DPCHECK(ret == 0);
+}
+
+void VaapiMjpegDecodeAccelerator::DecodeImpl(
+ int32_t task_id,
+ base::span<const uint8_t> src_image,
+ scoped_refptr<VideoFrame> dst_frame) {
+ // TODO(andrescj): validate that the video frame's visible size is the same as
+ // the parsed JPEG's visible size when it is returned from Decode(), and
+ // remove the size checks in OutputPicture*().
+ VaapiImageDecodeStatus status = decoder_.Decode(src_image);
if (status != VaapiImageDecodeStatus::kSuccess) {
- NotifyError(bitstream_buffer_id, VaapiJpegDecodeStatusToError(status));
+ NotifyError(task_id, VaapiJpegDecodeStatusToError(status));
+ return;
+ }
+ const ScopedVASurface* surface = decoder_.GetScopedVASurface();
+ DCHECK(surface);
+ DCHECK(surface->IsValid());
+
+ // For DMA-buf backed |dst_frame|, we will import it as a VA surface and use
+ // VPP to convert the decoded |surface| into it, if the formats and sizes are
+ // supported.
+ const uint32_t video_frame_va_fourcc =
+ VideoPixelFormatToVAFourCC(dst_frame->format());
+ if (video_frame_va_fourcc == kInvalidVaFourcc) {
+ VLOGF(1) << "Unsupported video frame format: " << dst_frame->format();
+ NotifyError(task_id, PLATFORM_FAILURE);
return;
}
+ // TODO(kamesan): move HasDmaBufs() to DCHECK when we deprecate
+ // shared-memory-backed video frame.
+ if (dst_frame->HasDmaBufs() &&
+ VaapiWrapper::IsVppResolutionAllowed(surface->size()) &&
+ VaapiWrapper::IsVppSupportedForJpegDecodedSurfaceToFourCC(
+ surface->format(), video_frame_va_fourcc)) {
+ if (!OutputPictureVppOnTaskRunner(surface, task_id, std::move(dst_frame))) {
+ VLOGF(1) << "Output picture using VPP failed";
+ NotifyError(task_id, PLATFORM_FAILURE);
+ }
+ return;
+ }
+
+ // Fallback to do conversion by libyuv. This happens when:
+ // 1. |dst_frame| is backed by shared memory.
+ // 2. VPP doesn't support the format conversion. This is intended for AMD
+ // VAAPI driver whose VPP only supports converting decoded 4:2:0 JPEGs.
std::unique_ptr<ScopedVAImage> image =
- decoder_.GetImage(VA_FOURCC_I420 /* preferred_image_fourcc */, &status);
+ decoder_.GetImage(video_frame_va_fourcc, &status);
if (status != VaapiImageDecodeStatus::kSuccess) {
- NotifyError(bitstream_buffer_id, VaapiJpegDecodeStatusToError(status));
+ NotifyError(task_id, VaapiJpegDecodeStatusToError(status));
return;
}
-
- if (!OutputPictureOnTaskRunner(std::move(image), bitstream_buffer_id,
- std::move(video_frame))) {
- VLOGF(1) << "Output picture failed";
- NotifyError(bitstream_buffer_id, PLATFORM_FAILURE);
+ if (!OutputPictureLibYuvOnTaskRunner(std::move(image), task_id,
+ std::move(dst_frame))) {
+ VLOGF(1) << "Output picture using libyuv failed";
+ NotifyError(task_id, PLATFORM_FAILURE);
}
}
@@ -250,22 +538,35 @@ void VaapiMjpegDecodeAccelerator::Decode(
BitstreamBuffer bitstream_buffer,
scoped_refptr<VideoFrame> video_frame) {
DCHECK(io_task_runner_->BelongsToCurrentThread());
- TRACE_EVENT1("jpeg", "Decode", "input_id", bitstream_buffer.id());
+ TRACE_EVENT1("jpeg", __func__, "input_id", bitstream_buffer.id());
DVLOGF(4) << "Mapping new input buffer id: " << bitstream_buffer.id()
<< " size: " << bitstream_buffer.size();
- // UnalignedSharedMemory will take over the |bitstream_buffer.handle()|.
- auto shm = std::make_unique<UnalignedSharedMemory>(
- bitstream_buffer.TakeRegion(), bitstream_buffer.size(),
- false /* read_only */);
-
if (bitstream_buffer.id() < 0) {
VLOGF(1) << "Invalid bitstream_buffer, id: " << bitstream_buffer.id();
NotifyError(bitstream_buffer.id(), INVALID_ARGUMENT);
return;
}
+ // Validate output video frame.
+ if (!video_frame->IsMappable() && !video_frame->HasDmaBufs()) {
+ VLOGF(1) << "Unsupported output frame storage type";
+ NotifyError(bitstream_buffer.id(), INVALID_ARGUMENT);
+ return;
+ }
+ if ((video_frame->visible_rect().width() & 1) ||
+ (video_frame->visible_rect().height() & 1)) {
+ VLOGF(1) << "Video frame visible size has odd dimension";
+ NotifyError(bitstream_buffer.id(), PLATFORM_FAILURE);
+ return;
+ }
+
+ // UnalignedSharedMemory will take over the |bitstream_buffer.handle()|.
+ auto shm = std::make_unique<UnalignedSharedMemory>(
+ bitstream_buffer.TakeRegion(), bitstream_buffer.size(),
+ false /* read_only */);
+
if (!shm->MapAt(bitstream_buffer.offset(), bitstream_buffer.size())) {
VLOGF(1) << "Failed to map input buffer";
NotifyError(bitstream_buffer.id(), UNREADABLE_INPUT);
@@ -275,11 +576,67 @@ void VaapiMjpegDecodeAccelerator::Decode(
// It's safe to use base::Unretained(this) because |decoder_task_runner_| runs
// tasks on |decoder_thread_| which is stopped in the destructor of |this|.
decoder_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&VaapiMjpegDecodeAccelerator::DecodeTask,
+ FROM_HERE, base::BindOnce(&VaapiMjpegDecodeAccelerator::DecodeFromShmTask,
base::Unretained(this), bitstream_buffer.id(),
std::move(shm), std::move(video_frame)));
}
+void VaapiMjpegDecodeAccelerator::Decode(int32_t task_id,
+ base::ScopedFD src_dmabuf_fd,
+ size_t src_size,
+ off_t src_offset,
+ scoped_refptr<VideoFrame> dst_frame) {
+ DCHECK(io_task_runner_->BelongsToCurrentThread());
+ TRACE_EVENT1("jpeg", __func__, "task_id", task_id);
+
+ if (task_id < 0) {
+ VLOGF(1) << "Invalid task id: " << task_id;
+ NotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+
+ // Validate input arguments.
+ if (!src_dmabuf_fd.is_valid()) {
+ VLOGF(1) << "Invalid input buffer FD";
+ NotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+ if (src_size == 0) {
+ VLOGF(1) << "Input buffer size is zero";
+ NotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+ const size_t page_size = base::GetPageSize();
+ if (src_offset < 0 || src_offset % page_size != 0) {
+ VLOGF(1) << "Input buffer offset (" << src_offset
+ << ") should be non-negative and aligned to page size ("
+ << page_size << ")";
+ NotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+
+ // Validate output video frame.
+ if (!dst_frame->IsMappable() && !dst_frame->HasDmaBufs()) {
+ VLOGF(1) << "Unsupported output frame storage type";
+ NotifyError(task_id, INVALID_ARGUMENT);
+ return;
+ }
+ if ((dst_frame->visible_rect().width() & 1) ||
+ (dst_frame->visible_rect().height() & 1)) {
+ VLOGF(1) << "Output frame visible size has odd dimension";
+ NotifyError(task_id, PLATFORM_FAILURE);
+ return;
+ }
+
+ // It's safe to use base::Unretained(this) because |decoder_task_runner_| runs
+ // tasks on |decoder_thread_| which is stopped in the destructor of |this|.
+ decoder_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VaapiMjpegDecodeAccelerator::DecodeFromDmaBufTask,
+ base::Unretained(this), task_id, std::move(src_dmabuf_fd),
+ src_size, src_offset, std::move(dst_frame)));
+}
+
bool VaapiMjpegDecodeAccelerator::IsSupported() {
return VaapiWrapper::IsDecodeSupported(VAProfileJPEGBaseline);
}
diff --git a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h
index c32ceb5bc22..419ddd49639 100644
--- a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h
+++ b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.h
@@ -9,6 +9,7 @@
#include <memory>
+#include "base/containers/span.h"
#include "base/macros.h"
#include "base/memory/scoped_refptr.h"
#include "base/memory/weak_ptr.h"
@@ -21,11 +22,16 @@ namespace base {
class SingleThreadTaskRunner;
}
+namespace gpu {
+class GpuMemoryBufferSupport;
+}
+
namespace media {
class BitstreamBuffer;
class ScopedVAImage;
class UnalignedSharedMemory;
+class VaapiWrapper;
class VideoFrame;
// Class to provide MJPEG decode acceleration for Intel systems with hardware
@@ -48,28 +54,51 @@ class MEDIA_GPU_EXPORT VaapiMjpegDecodeAccelerator
chromeos_camera::MjpegDecodeAccelerator::Client* client) override;
void Decode(BitstreamBuffer bitstream_buffer,
scoped_refptr<VideoFrame> video_frame) override;
+ void Decode(int32_t task_id,
+ base::ScopedFD src_dmabuf_fd,
+ size_t src_size,
+ off_t src_offset,
+ scoped_refptr<VideoFrame> dst_frame) override;
bool IsSupported() override;
private:
// Notifies the client that an error has occurred and decoding cannot
// continue. The client is notified on the |task_runner_|, i.e., the thread in
// which |*this| was created.
- void NotifyError(int32_t bitstream_buffer_id, Error error);
+ void NotifyError(int32_t task_id, Error error);
// Notifies the client that a decode is ready. The client is notified on the
// |task_runner_|, i.e., the thread in which |*this| was created.
- void VideoFrameReady(int32_t bitstream_buffer_id);
+ void VideoFrameReady(int32_t task_id);
// Processes one decode request.
- void DecodeTask(int32_t bitstream_buffer_id,
- std::unique_ptr<UnalignedSharedMemory> shm,
- scoped_refptr<VideoFrame> video_frame);
-
- // Puts contents of |image| into given |video_frame| and passes the
- // |input_buffer_id| of the resulting picture to client for output.
- bool OutputPictureOnTaskRunner(std::unique_ptr<ScopedVAImage> image,
- int32_t input_buffer_id,
- scoped_refptr<VideoFrame> video_frame);
+ void DecodeFromShmTask(int32_t task_id,
+ std::unique_ptr<UnalignedSharedMemory> shm,
+ scoped_refptr<VideoFrame> dst_frame);
+ void DecodeFromDmaBufTask(int32_t task_id,
+ base::ScopedFD src_dmabuf_fd,
+ size_t src_size,
+ off_t src_offset,
+ scoped_refptr<VideoFrame> dst_frame);
+
+ // Decodes the JPEG in |src_image| into |dst_frame| and notifies the client
+ // when finished or when an error occurs.
+ void DecodeImpl(int32_t task_id,
+ base::span<const uint8_t> src_image,
+ scoped_refptr<VideoFrame> dst_frame);
+
+ // Puts contents of |surface| into given |video_frame| using VA-API Video
+ // Processing Pipeline (VPP), and passes the |input_buffer_id| of the
+ // resulting picture to client for output.
+ bool OutputPictureVppOnTaskRunner(const ScopedVASurface* surface,
+ int32_t input_buffer_id,
+ scoped_refptr<VideoFrame> video_frame);
+
+ // Puts contents of |image| into given |video_frame| using libyuv, and passes
+ // the |input_buffer_id| of the resulting picture to client for output.
+ bool OutputPictureLibYuvOnTaskRunner(std::unique_ptr<ScopedVAImage> image,
+ int32_t input_buffer_id,
+ scoped_refptr<VideoFrame> video_frame);
// ChildThread's task runner.
const scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
@@ -82,8 +111,14 @@ class MEDIA_GPU_EXPORT VaapiMjpegDecodeAccelerator
VaapiJpegDecoder decoder_;
- // Comes after |decoder_| to ensure its destructor is executed before
- // |decoder_| is destroyed.
+ // VaapiWrapper for VPP context. This is used to convert decoded data into
+ // client buffer.
+ scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper_;
+
+ // For creating GpuMemoryBuffer from client DMA buffer that can be mapped for
+ // software access.
+ std::unique_ptr<gpu::GpuMemoryBufferSupport> gpu_memory_buffer_support_;
+
base::Thread decoder_thread_;
// Use this to post tasks to |decoder_thread_| instead of
// |decoder_thread_.task_runner()| because the latter will be NULL once
diff --git a/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc b/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc
index fdf8e54ac19..d3f4a5e8286 100644
--- a/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_utils_unittest.cc
@@ -134,6 +134,21 @@ TEST_F(VaapiUtilsTest, ScopedVASurface) {
EXPECT_EQ(coded_size, scoped_va_surface->size());
}
+// This test exercises the creation of a ScopedVASurface where the requested
+// size and the visible size are different.
+TEST_F(VaapiUtilsTest, ScopedVASurfaceWithVisibleSize) {
+ const gfx::Size coded_size(64, 64);
+ const gfx::Size visible_size(60, 60);
+ auto scoped_va_surface = vaapi_wrapper_->CreateContextAndScopedVASurface(
+ VA_RT_FORMAT_YUV420, coded_size, visible_size);
+
+ ASSERT_TRUE(scoped_va_surface);
+ EXPECT_TRUE(scoped_va_surface->IsValid());
+ EXPECT_EQ(VA_RT_FORMAT_YUV420,
+ base::checked_cast<int>(scoped_va_surface->format()));
+ EXPECT_EQ(visible_size, scoped_va_surface->size());
+}
+
// This test exercises the creation of a ScopedVASurface with an invalid
// size.
TEST_F(VaapiUtilsTest, ScopedVASurfaceInvalidSizeRequest) {
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
index 95df4498b43..f04b60f583e 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
@@ -625,8 +625,12 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
buffers.size() >= requested_num_pics_,
"Got an invalid number of picture buffers. (Got " << buffers.size()
<< ", requested " << requested_num_pics_ << ")", INVALID_ARGUMENT, );
- // requested_pic_size_ can be adjusted by VDA client.
- requested_pic_size_ = buffers[0].size();
+ // requested_pic_size_ can be adjusted by VDA client. We should update
+ // |requested_pic_size_| by buffers[0].size(). But AMD driver doesn't decode
+ // frames correctly if the surface stride is different from the width of a
+ // coded size.
+ // TODO(b/139460315): Update |requested_pic_size_| by buffers[0].size() once
+ // the AMD driver issue is resolved.
va_surface_format_ = GetVaFormatForVideoCodecProfile(profile_);
std::vector<VASurfaceID> va_surface_ids;
@@ -649,11 +653,16 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
// If we aren't in BufferAllocationMode::kNone, this |picture| is
// only used as a copy destination. Therefore, the VaapiWrapper used and
// owned by |picture| is |vpp_vaapi_wrapper_|.
+
+ // TODO(b/139460315): Create with buffers[i] once the AMD driver issue is
+ // resolved.
+ PictureBuffer buffer = buffers[i];
+ buffer.set_size(requested_pic_size_);
std::unique_ptr<VaapiPicture> picture = vaapi_picture_factory_->Create(
(buffer_allocation_mode_ == BufferAllocationMode::kNone)
? vaapi_wrapper_
: vpp_vaapi_wrapper_,
- make_context_current_cb_, bind_image_cb_, buffers[i]);
+ make_context_current_cb_, bind_image_cb_, buffer);
RETURN_AND_NOTIFY_ON_FAILURE(picture, "Failed creating a VaapiPicture",
PLATFORM_FAILURE, );
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
index 397f7e28970..06b33de17cc 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
@@ -7,7 +7,7 @@
#include "base/bind.h"
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/gpu/accelerated_video_decoder.h"
#include "media/gpu/format_utils.h"
#include "media/gpu/vaapi/vaapi_picture.h"
@@ -363,7 +363,7 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
MOCK_METHOD0(NotifyResetDone, void());
MOCK_METHOD1(NotifyError, void(VideoDecodeAccelerator::Error));
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
// The class under test and a worker thread for it.
VaapiVideoDecodeAccelerator vda_;
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
index 55ec03e71fc..d1f62d4505c 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
@@ -68,9 +68,11 @@ VaapiVideoDecoder::DecodeTask::DecodeTask(DecodeTask&&) = default;
// static
std::unique_ptr<VideoDecoder> VaapiVideoDecoder::Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<DmabufVideoFramePool> frame_pool) {
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ GetFramePoolCB get_pool_cb) {
return base::WrapUnique<VideoDecoder>(new VaapiVideoDecoder(
- std::move(client_task_runner), std::move(frame_pool)));
+ std::move(client_task_runner), std::move(decoder_task_runner),
+ std::move(get_pool_cb)));
}
// static
@@ -81,10 +83,11 @@ SupportedVideoDecoderConfigs VaapiVideoDecoder::GetSupportedConfigs() {
VaapiVideoDecoder::VaapiVideoDecoder(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<DmabufVideoFramePool> frame_pool)
- : frame_pool_(std::move(frame_pool)),
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ GetFramePoolCB get_pool_cb)
+ : get_pool_cb_(std::move(get_pool_cb)),
client_task_runner_(std::move(client_task_runner)),
- decoder_thread_("VaapiDecoderThread"),
+ decoder_task_runner_(std::move(decoder_task_runner)),
weak_this_factory_(this) {
DETACH_FROM_SEQUENCE(decoder_sequence_checker_);
VLOGF(2);
@@ -124,7 +127,6 @@ int VaapiVideoDecoder::GetMaxDecodeRequests() const {
return kMaxDecodeRequests;
}
-// TODO(dstaessens): Handle re-initialization.
void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
bool low_delay,
CdmContext* cdm_context,
@@ -150,15 +152,7 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
- if (!decoder_thread_.IsRunning() && !decoder_thread_.Start()) {
- std::move(init_cb).Run(false);
- return;
- }
-
- decoder_thread_task_runner_ = decoder_thread_.task_runner();
- frame_pool_->set_parent_task_runner(decoder_thread_task_runner_);
-
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VaapiVideoDecoder::InitializeTask, weak_this_, config,
std::move(init_cb), std::move(output_cb)));
@@ -226,6 +220,9 @@ void VaapiVideoDecoder::InitializeTask(const VideoDecoderConfig& config,
}
needs_bitstream_conversion_ = (config.codec() == kCodecH264);
+ // Get and initialize the frame pool.
+ frame_pool_ = get_pool_cb_.Run();
+
visible_rect_ = config.visible_rect();
pixel_aspect_ratio_ = config.GetPixelAspectRatio();
profile_ = profile;
@@ -242,12 +239,8 @@ void VaapiVideoDecoder::Destroy() {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
VLOGF(2);
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VaapiVideoDecoder::DestroyTask, weak_this_));
- decoder_thread_.Stop();
-
- delete this;
- VLOGF(2) << "Destroying VAAPI VD done";
}
void VaapiVideoDecoder::DestroyTask() {
@@ -262,18 +255,17 @@ void VaapiVideoDecoder::DestroyTask() {
decoder_ = nullptr;
}
- // Drop all video frame references. This will cause the frames to be
- // destroyed once the decoder's client is done using them.
- frame_pool_ = nullptr;
-
weak_this_factory_.InvalidateWeakPtrs();
+
+ delete this;
+ VLOGF(2) << "Destroying VAAPI VD done";
}
void VaapiVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VaapiVideoDecoder::QueueDecodeTask, weak_this_,
std::move(buffer), std::move(decode_cb)));
}
@@ -321,7 +313,7 @@ void VaapiVideoDecoder::ScheduleNextDecodeTask() {
*current_decode_task_->buffer_);
}
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VaapiVideoDecoder::HandleDecodeTask, weak_this_));
}
@@ -538,7 +530,7 @@ void VaapiVideoDecoder::ChangeFrameResolutionTask() {
vaapi_wrapper_->CreateContext(pic_size);
// Retry the current decode task.
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VaapiVideoDecoder::HandleDecodeTask, weak_this_));
}
@@ -555,8 +547,6 @@ void VaapiVideoDecoder::ReleaseFrameTask(scoped_refptr<VASurface> va_surface,
// pool for reuse.
size_t num_erased = output_frames_.erase(surface_id);
DCHECK_EQ(num_erased, 1u);
-
- // Releasing the |picture| here will also destroy the associated VASurface.
}
void VaapiVideoDecoder::NotifyFrameAvailableTask() {
@@ -567,7 +557,7 @@ void VaapiVideoDecoder::NotifyFrameAvailableTask() {
if (state_ == State::kWaitingForOutput) {
DCHECK(current_decode_task_);
SetState(State::kDecoding);
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VaapiVideoDecoder::HandleDecodeTask, weak_this_));
}
@@ -607,7 +597,7 @@ void VaapiVideoDecoder::Reset(base::OnceClosure reset_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(client_sequence_checker_);
DVLOGF(2);
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VaapiVideoDecoder::ResetTask, weak_this_,
std::move(reset_cb)));
}
@@ -629,7 +619,7 @@ void VaapiVideoDecoder::ResetTask(base::OnceClosure reset_cb) {
SetState(State::kResetting);
// Wait until any pending decode task has been aborted.
- decoder_thread_task_runner_->PostTask(
+ decoder_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&VaapiVideoDecoder::ResetDoneTask, weak_this_,
std::move(reset_cb)));
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.h b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
index 5ffd13da27c..472ee24040d 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
@@ -39,9 +39,12 @@ class VASurface;
class VaapiVideoDecoder : public media::VideoDecoder,
public DecodeSurfaceHandler<VASurface> {
public:
+ using GetFramePoolCB = base::RepeatingCallback<DmabufVideoFramePool*()>;
+
static std::unique_ptr<VideoDecoder> Create(
scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<DmabufVideoFramePool> frame_pool);
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ GetFramePoolCB get_pool);
static SupportedVideoDecoderConfigs GetSupportedConfigs();
@@ -91,8 +94,10 @@ class VaapiVideoDecoder : public media::VideoDecoder,
kError, // decoder encountered an error.
};
- VaapiVideoDecoder(scoped_refptr<base::SequencedTaskRunner> client_task_runner,
- std::unique_ptr<DmabufVideoFramePool> frame_pool);
+ VaapiVideoDecoder(
+ scoped_refptr<base::SequencedTaskRunner> client_task_runner,
+ scoped_refptr<base::SequencedTaskRunner> decoder_task_runner,
+ GetFramePoolCB get_pool);
~VaapiVideoDecoder() override;
// Destroy the VAAPIVideoDecoder, aborts pending decode requests and blocks
@@ -171,7 +176,8 @@ class VaapiVideoDecoder : public media::VideoDecoder,
double pixel_aspect_ratio_ = 0.0;
// Video frame pool used to allocate and recycle video frames.
- std::unique_ptr<DmabufVideoFramePool> frame_pool_;
+ GetFramePoolCB get_pool_cb_;
+ DmabufVideoFramePool* frame_pool_ = nullptr;
// The mapping between buffer id and the timestamp.
std::map<int32_t, base::TimeDelta> buffer_id_to_timestamp_;
@@ -191,8 +197,7 @@ class VaapiVideoDecoder : public media::VideoDecoder,
scoped_refptr<VaapiWrapper> vaapi_wrapper_;
const scoped_refptr<base::SequencedTaskRunner> client_task_runner_;
- base::Thread decoder_thread_;
- scoped_refptr<base::SingleThreadTaskRunner> decoder_thread_task_runner_;
+ const scoped_refptr<base::SequencedTaskRunner> decoder_task_runner_;
SEQUENCE_CHECKER(client_sequence_checker_);
SEQUENCE_CHECKER(decoder_sequence_checker_);
diff --git a/chromium/media/gpu/vaapi/vaapi_webp_decoder.cc b/chromium/media/gpu/vaapi/vaapi_webp_decoder.cc
index 8b11273721d..fecda191310 100644
--- a/chromium/media/gpu/vaapi/vaapi_webp_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_webp_decoder.cc
@@ -84,6 +84,10 @@ gpu::ImageDecodeAcceleratorType VaapiWebPDecoder::GetType() const {
return gpu::ImageDecodeAcceleratorType::kWebP;
}
+SkYUVColorSpace VaapiWebPDecoder::GetYUVColorSpace() const {
+ return SkYUVColorSpace::kRec601_SkYUVColorSpace;
+}
+
VaapiImageDecodeStatus VaapiWebPDecoder::AllocateVASurfaceAndSubmitVABuffers(
base::span<const uint8_t> encoded_image) {
DCHECK(vaapi_wrapper_);
diff --git a/chromium/media/gpu/vaapi/vaapi_webp_decoder.h b/chromium/media/gpu/vaapi/vaapi_webp_decoder.h
index ec0647b067b..9cf621e3568 100644
--- a/chromium/media/gpu/vaapi/vaapi_webp_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_webp_decoder.h
@@ -19,6 +19,7 @@ class VaapiWebPDecoder : public VaapiImageDecoder {
// VaapiImageDecoder implementation.
gpu::ImageDecodeAcceleratorType GetType() const override;
+ SkYUVColorSpace GetYUVColorSpace() const override;
private:
// VaapiImageDecoder implementation.
diff --git a/chromium/media/gpu/vaapi/vaapi_webp_decoder_unittest.cc b/chromium/media/gpu/vaapi/vaapi_webp_decoder_unittest.cc
index 88fce45a562..623e4d66452 100644
--- a/chromium/media/gpu/vaapi/vaapi_webp_decoder_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_webp_decoder_unittest.cc
@@ -31,7 +31,9 @@
#include "media/parsers/vp8_parser.h"
#include "media/parsers/webp_parser.h"
#include "third_party/libwebp/src/webp/decode.h"
+#include "ui/gfx/buffer_format_util.h"
#include "ui/gfx/buffer_types.h"
+#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
#include "ui/gfx/gpu_memory_buffer.h"
#include "ui/gfx/linux/native_pixmap_dmabuf.h"
@@ -103,26 +105,27 @@ class VaapiWebPDecoderTest
return GetTestDataFilePath(file_name);
}
- scoped_refptr<gfx::NativePixmapDmaBuf> DecodeToNativePixmapDmaBuf(
+ std::unique_ptr<NativePixmapAndSizeInfo> Decode(
base::span<const uint8_t> encoded_image,
VaapiImageDecodeStatus* status = nullptr) {
const VaapiImageDecodeStatus decode_status = decoder_.Decode(encoded_image);
EXPECT_EQ(!!decoder_.GetScopedVASurface(),
decode_status == VaapiImageDecodeStatus::kSuccess);
- // Still try to get the pixmap when decode fails.
- VaapiImageDecodeStatus pixmap_status;
- scoped_refptr<gfx::NativePixmapDmaBuf> pixmap =
- decoder_.ExportAsNativePixmapDmaBuf(&pixmap_status);
- EXPECT_EQ(!!pixmap, pixmap_status == VaapiImageDecodeStatus::kSuccess);
+ // Still try to export the surface when decode fails.
+ VaapiImageDecodeStatus export_status;
+ std::unique_ptr<NativePixmapAndSizeInfo> exported_pixmap =
+ decoder_.ExportAsNativePixmapDmaBuf(&export_status);
+ EXPECT_EQ(!!exported_pixmap,
+ export_status == VaapiImageDecodeStatus::kSuccess);
// Return the first fail status.
if (status) {
*status = decode_status != VaapiImageDecodeStatus::kSuccess
? decode_status
- : pixmap_status;
+ : export_status;
}
- return pixmap;
+ return exported_pixmap;
}
protected:
@@ -144,18 +147,36 @@ TEST_P(VaapiWebPDecoderTest, DecodeAndExportAsNativePixmapDmaBuf) {
VAProfileVP8Version0_3, VA_RT_FORMAT_YUV420));
VaapiImageDecodeStatus status;
- scoped_refptr<gfx::NativePixmapDmaBuf> pixmap =
- DecodeToNativePixmapDmaBuf(encoded_image, &status);
+ std::unique_ptr<NativePixmapAndSizeInfo> exported_pixmap =
+ Decode(encoded_image, &status);
ASSERT_EQ(VaapiImageDecodeStatus::kSuccess, status);
EXPECT_FALSE(decoder_.GetScopedVASurface());
- ASSERT_TRUE(pixmap);
- ASSERT_EQ(gfx::BufferFormat::YUV_420_BIPLANAR, pixmap->GetBufferFormat());
+ ASSERT_TRUE(exported_pixmap);
+ ASSERT_TRUE(exported_pixmap->pixmap);
+ ASSERT_EQ(gfx::BufferFormat::YUV_420_BIPLANAR,
+ exported_pixmap->pixmap->GetBufferFormat());
+
+ // Make sure the visible area is contained by the surface.
+ EXPECT_FALSE(exported_pixmap->va_surface_resolution.IsEmpty());
+ EXPECT_FALSE(exported_pixmap->pixmap->GetBufferSize().IsEmpty());
+ ASSERT_TRUE(
+ gfx::Rect(exported_pixmap->va_surface_resolution)
+ .Contains(gfx::Rect(exported_pixmap->pixmap->GetBufferSize())));
+
+ // TODO(andrescj): we could get a better lower bound based on the dimensions
+ // and the format.
+ ASSERT_GT(exported_pixmap->byte_size, 0u);
+
+ gfx::NativePixmapHandle handle = exported_pixmap->pixmap->ExportHandle();
+ ASSERT_EQ(gfx::NumberOfPlanesForLinearBufferFormat(
+ exported_pixmap->pixmap->GetBufferFormat()),
+ handle.planes.size());
- gfx::NativePixmapHandle handle = pixmap->ExportHandle();
LocalGpuMemoryBufferManager gpu_memory_buffer_manager;
std::unique_ptr<gfx::GpuMemoryBuffer> gpu_memory_buffer =
- gpu_memory_buffer_manager.ImportDmaBuf(handle, pixmap->GetBufferSize(),
- pixmap->GetBufferFormat());
+ gpu_memory_buffer_manager.ImportDmaBuf(
+ handle, exported_pixmap->pixmap->GetBufferSize(),
+ exported_pixmap->pixmap->GetBufferFormat());
ASSERT_TRUE(gpu_memory_buffer);
ASSERT_TRUE(gpu_memory_buffer->Map());
ASSERT_EQ(gfx::BufferFormat::YUV_420_BIPLANAR,
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
index 7df60e27332..82f2b3c154f 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
@@ -46,6 +46,7 @@
#include "third_party/libyuv/include/libyuv.h"
#include "ui/gfx/buffer_format_util.h"
#include "ui/gfx/buffer_types.h"
+#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/linux/native_pixmap_dmabuf.h"
#include "ui/gfx/native_pixmap.h"
#include "ui/gfx/native_pixmap_handle.h"
@@ -105,8 +106,6 @@ uint32_t BufferFormatToVAFourCC(gfx::BufferFormat fmt) {
return VA_FOURCC_BGRA;
case gfx::BufferFormat::RGBX_8888:
return VA_FOURCC_RGBX;
- case gfx::BufferFormat::UYVY_422:
- return VA_FOURCC_UYVY;
case gfx::BufferFormat::YVU_420:
return VA_FOURCC_YV12;
case gfx::BufferFormat::YUV_420_BIPLANAR:
@@ -466,11 +465,10 @@ static bool GetRequiredAttribs(const base::Lock* va_lock,
VAProfile profile,
std::vector<VAConfigAttrib>* required_attribs) {
va_lock->AssertAcquired();
- // No attribute for kVideoProcess.
- if (mode == VaapiWrapper::kVideoProcess)
- return true;
- // VAConfigAttribRTFormat is common to both encode and decode |mode|s.
+ // Choose a suitable VAConfigAttribRTFormat for every |mode|. For video
+ // processing, the supported surface attribs may vary according to which RT
+ // format is set.
if (profile == VAProfileVP9Profile2 || profile == VAProfileVP9Profile3) {
required_attribs->push_back(
{VAConfigAttribRTFormat, VA_RT_FORMAT_YUV420_10BPP});
@@ -518,6 +516,7 @@ class VASupportedProfiles {
VAProfile va_profile;
gfx::Size min_resolution;
gfx::Size max_resolution;
+ std::vector<uint32_t> pixel_formats;
VaapiWrapper::InternalFormats supported_internal_formats;
};
static const VASupportedProfiles& Get();
@@ -640,9 +639,6 @@ VASupportedProfiles::VASupportedProfiles()
std::vector<VASupportedProfiles::ProfileInfo>
VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal(
VaapiWrapper::CodecMode mode) const {
- if (mode == VaapiWrapper::kVideoProcess)
- return {ProfileInfo{VAProfileNone, gfx::Size()}};
-
std::vector<ProfileInfo> supported_profile_infos;
std::vector<VAProfile> va_profiles;
if (!GetSupportedVAProfiles(&va_profiles))
@@ -664,7 +660,7 @@ VASupportedProfiles::GetSupportedProfileInfosForCodecModeInternal(
if (IsBlackListedDriver(va_vendor_string, mode, va_profile))
continue;
- ProfileInfo profile_info;
+ ProfileInfo profile_info{};
if (!FillProfileInfo_Locked(va_profile, entrypoint, required_attribs,
&profile_info)) {
LOG(ERROR) << "FillProfileInfo_Locked failed for va_profile "
@@ -804,6 +800,12 @@ bool VASupportedProfiles::FillProfileInfo_Locked(
} else if (attrib.type == VASurfaceAttribMinHeight) {
profile_info->min_resolution.set_height(
base::strict_cast<int>(attrib.value.value.i));
+ } else if (attrib.type == VASurfaceAttribPixelFormat) {
+ // According to va.h, VASurfaceAttribPixelFormat is meaningful as input to
+ // vaQuerySurfaceAttributes(). However, per the implementation of
+ // i965_QuerySurfaceAttributes(), our usage here should enumerate all the
+ // formats.
+ profile_info->pixel_formats.push_back(attrib.value.value.i);
}
}
if (profile_info->max_resolution.IsEmpty()) {
@@ -1036,6 +1038,10 @@ bool VASupportedImageFormats::InitSupportedImageFormats_Locked() {
} // namespace
+NativePixmapAndSizeInfo::NativePixmapAndSizeInfo() = default;
+
+NativePixmapAndSizeInfo::~NativePixmapAndSizeInfo() = default;
+
// static
const std::string& VaapiWrapper::GetVendorStringForTesting() {
return VADisplayState::Get()->va_vendor_string();
@@ -1243,6 +1249,45 @@ bool VaapiWrapper::GetJpegDecodeSuitableImageFourCC(unsigned int rt_format,
}
// static
+bool VaapiWrapper::IsVppResolutionAllowed(const gfx::Size& size) {
+ VASupportedProfiles::ProfileInfo profile_info;
+ if (!VASupportedProfiles::Get().IsProfileSupported(
+ kVideoProcess, VAProfileNone, &profile_info)) {
+ return false;
+ }
+ return gfx::Rect(profile_info.min_resolution.width(),
+ profile_info.min_resolution.height(),
+ profile_info.max_resolution.width(),
+ profile_info.max_resolution.height())
+ .Contains(size.width(), size.height());
+}
+
+// static
+bool VaapiWrapper::IsVppSupportedForJpegDecodedSurfaceToFourCC(
+ unsigned int rt_format,
+ uint32_t fourcc) {
+ if (!IsDecodingSupportedForInternalFormat(VAProfileJPEGBaseline, rt_format))
+ return false;
+
+ VASupportedProfiles::ProfileInfo profile_info;
+ if (!VASupportedProfiles::Get().IsProfileSupported(
+ kVideoProcess, VAProfileNone, &profile_info)) {
+ return false;
+ }
+
+ // Workaround: for Mesa VAAPI driver, VPP only supports internal surface
+ // format for 4:2:0 JPEG image.
+ if (base::StartsWith(VADisplayState::Get()->va_vendor_string(),
+ kMesaGalliumDriverPrefix,
+ base::CompareCase::SENSITIVE) &&
+ rt_format != VA_RT_FORMAT_YUV420) {
+ return false;
+ }
+
+ return base::Contains(profile_info.pixel_formats, fourcc);
+}
+
+// static
bool VaapiWrapper::IsJpegEncodeSupported() {
return VASupportedProfiles::Get().IsProfileSupported(kEncode,
VAProfileJPEGBaseline);
@@ -1262,8 +1307,6 @@ VaapiWrapper::GetSupportedImageFormatsForTesting() {
// static
uint32_t VaapiWrapper::BufferFormatToVARTFormat(gfx::BufferFormat fmt) {
switch (fmt) {
- case gfx::BufferFormat::UYVY_422:
- return VA_RT_FORMAT_YUV422;
case gfx::BufferFormat::BGRX_8888:
case gfx::BufferFormat::BGRA_8888:
case gfx::BufferFormat::RGBX_8888:
@@ -1302,7 +1345,8 @@ bool VaapiWrapper::CreateContextAndSurfaces(
std::unique_ptr<ScopedVASurface> VaapiWrapper::CreateContextAndScopedVASurface(
unsigned int va_format,
- const gfx::Size& size) {
+ const gfx::Size& size,
+ const base::Optional<gfx::Size>& visible_size) {
if (va_context_id_ != VA_INVALID_ID) {
LOG(ERROR) << "The current context should be destroyed before creating a "
"new one";
@@ -1310,7 +1354,7 @@ std::unique_ptr<ScopedVASurface> VaapiWrapper::CreateContextAndScopedVASurface(
}
std::unique_ptr<ScopedVASurface> scoped_va_surface =
- CreateScopedVASurface(va_format, size);
+ CreateScopedVASurface(va_format, size, visible_size);
if (!scoped_va_surface)
return nullptr;
@@ -1405,15 +1449,22 @@ scoped_refptr<VASurface> VaapiWrapper::CreateVASurfaceForPixmap(
base::BindOnce(&VaapiWrapper::DestroySurface, this));
}
-scoped_refptr<gfx::NativePixmapDmaBuf>
-VaapiWrapper::ExportVASurfaceAsNativePixmapDmaBuf(VASurfaceID va_surface_id) {
+std::unique_ptr<NativePixmapAndSizeInfo>
+VaapiWrapper::ExportVASurfaceAsNativePixmapDmaBuf(
+ const ScopedVASurface& scoped_va_surface) {
+ if (!scoped_va_surface.IsValid()) {
+ LOG(ERROR) << "Cannot export an invalid surface";
+ return nullptr;
+ }
+
VADRMPRIMESurfaceDescriptor descriptor;
{
base::AutoLock auto_lock(*va_lock_);
- VAStatus va_res = vaSyncSurface(va_display_, va_surface_id);
+ VAStatus va_res = vaSyncSurface(va_display_, scoped_va_surface.id());
VA_SUCCESS_OR_RETURN(va_res, "Cannot sync VASurface", nullptr);
va_res = vaExportSurfaceHandle(
- va_display_, va_surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
+ va_display_, scoped_va_surface.id(),
+ VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS,
&descriptor);
VA_SUCCESS_OR_RETURN(va_res, "Failed to export VASurface", nullptr);
@@ -1426,6 +1477,7 @@ VaapiWrapper::ExportVASurfaceAsNativePixmapDmaBuf(VASurfaceID va_surface_id) {
// work in AMD.
if (descriptor.num_objects != 1u) {
DVLOG(1) << "Only surface descriptors with one bo are supported";
+ NOTREACHED();
return nullptr;
}
base::ScopedFD bo_fd(descriptor.objects[0].fd);
@@ -1480,10 +1532,33 @@ VaapiWrapper::ExportVASurfaceAsNativePixmapDmaBuf(VASurfaceID va_surface_id) {
std::swap(handle.planes[1], handle.planes[2]);
}
- return base::MakeRefCounted<gfx::NativePixmapDmaBuf>(
+ auto exported_pixmap = std::make_unique<NativePixmapAndSizeInfo>();
+ exported_pixmap->va_surface_resolution =
gfx::Size(base::checked_cast<int>(descriptor.width),
- base::checked_cast<int>(descriptor.height)),
- buffer_format, std::move(handle));
+ base::checked_cast<int>(descriptor.height));
+ exported_pixmap->byte_size =
+ base::strict_cast<size_t>(descriptor.objects[0].size);
+ if (!gfx::Rect(exported_pixmap->va_surface_resolution)
+ .Contains(gfx::Rect(scoped_va_surface.size()))) {
+ LOG(ERROR) << "A " << scoped_va_surface.size().ToString()
+ << " ScopedVASurface cannot be contained by a "
+ << exported_pixmap->va_surface_resolution.ToString()
+ << " buffer";
+ return nullptr;
+ }
+ exported_pixmap->pixmap = base::MakeRefCounted<gfx::NativePixmapDmaBuf>(
+ scoped_va_surface.size(), buffer_format, std::move(handle));
+ return exported_pixmap;
+}
+
+bool VaapiWrapper::SyncSurface(VASurfaceID va_surface_id) {
+ DCHECK_NE(va_surface_id, VA_INVALID_ID);
+
+ base::AutoLock auto_lock(*va_lock_);
+
+ VAStatus va_res = vaSyncSurface(va_display_, va_surface_id);
+ VA_SUCCESS_OR_RETURN(va_res, "Failed syncing surface", false);
+ return true;
}
bool VaapiWrapper::SubmitBuffer(VABufferType va_buffer_type,
@@ -2002,7 +2077,8 @@ bool VaapiWrapper::CreateSurfaces(unsigned int va_format,
std::unique_ptr<ScopedVASurface> VaapiWrapper::CreateScopedVASurface(
unsigned int va_rt_format,
- const gfx::Size& size) {
+ const gfx::Size& size,
+ const base::Optional<gfx::Size>& visible_size) {
if (kInvalidVaRtFormat == va_rt_format) {
LOG(ERROR) << "Invalid VA RT format to CreateScopedVASurface";
return nullptr;
@@ -2024,8 +2100,10 @@ std::unique_ptr<ScopedVASurface> VaapiWrapper::CreateScopedVASurface(
DCHECK_NE(VA_INVALID_ID, va_surface_id)
<< "Invalid VA surface id after vaCreateSurfaces";
+ DCHECK(!visible_size.has_value() || !visible_size->IsEmpty());
auto scoped_va_surface = std::make_unique<ScopedVASurface>(
- this, va_surface_id, size, va_rt_format);
+ this, va_surface_id, visible_size.has_value() ? *visible_size : size,
+ va_rt_format);
DCHECK(scoped_va_surface);
DCHECK(scoped_va_surface->IsValid());
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h
index 4d6c0d0de47..5c35b20dd38 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h
@@ -14,6 +14,7 @@
#include <stdint.h>
#include <va/va.h>
+#include <memory>
#include <set>
#include <string>
#include <vector>
@@ -23,6 +24,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
+#include "base/optional.h"
#include "base/synchronization/lock.h"
#include "base/thread_annotations.h"
#include "media/gpu/media_gpu_export.h"
@@ -48,6 +50,30 @@ class ScopedVAImage;
class ScopedVASurface;
class VideoFrame;
+// This struct holds a NativePixmapDmaBuf, usually the result of exporting a VA
+// surface, and some associated size information needed to tell clients about
+// the underlying buffer.
+struct NativePixmapAndSizeInfo {
+ NativePixmapAndSizeInfo();
+ ~NativePixmapAndSizeInfo();
+
+ // The VA-API internal buffer dimensions, which may be different than the
+ // dimensions requested at the time of creation of the surface (but always
+ // larger than or equal to those). This can be used for validation in, e.g.,
+ // testing.
+ gfx::Size va_surface_resolution;
+
+ // The size of the underlying Buffer Object. A use case for this is when an
+ // image decode is requested and the caller needs to know the size of the
+ // allocated buffer for caching purposes.
+ size_t byte_size = 0u;
+
+ // Contains the information needed to use the surface in a graphics API,
+ // including the visible size (|pixmap|->GetBufferSize()) which should be no
+ // larger than |va_surface_resolution|.
+ scoped_refptr<gfx::NativePixmapDmaBuf> pixmap;
+};
+
// This class handles VA-API calls and ensures proper locking of VA-API calls
// to libva, the userspace shim to the HW codec driver. libva is not
// thread-safe, so we have to perform locking ourselves. This class is fully
@@ -138,6 +164,17 @@ class MEDIA_GPU_EXPORT VaapiWrapper
uint32_t preferred_fourcc,
uint32_t* suitable_fourcc);
+ // Checks the surface size is allowed for VPP. Returns true if the size is
+ // supported, false otherwise.
+ static bool IsVppResolutionAllowed(const gfx::Size& size);
+
+ // Returns true if VPP supports the format conversion from a JPEG decoded
+ // internal surface to a FOURCC. |rt_format| corresponds to the JPEG's
+ // subsampling format. |fourcc| is the output surface's FOURCC.
+ static bool IsVppSupportedForJpegDecodedSurfaceToFourCC(
+ unsigned int rt_format,
+ uint32_t fourcc);
+
// Return true when JPEG encode is supported.
static bool IsJpegEncodeSupported();
@@ -159,13 +196,15 @@ class MEDIA_GPU_EXPORT VaapiWrapper
size_t num_surfaces,
std::vector<VASurfaceID>* va_surfaces);
- // Creates a single VASurfaceID of |va_format| and |size| and, if successful,
- // creates a |va_context_id_| of the same size. Returns a ScopedVASurface
- // containing the created VASurfaceID, the |va_format|, and |size|, or nullptr
- // if creation failed.
+ // Creates a single ScopedVASurface of |va_format| and |size| and, if
+ // successful, creates a |va_context_id_| of the same size. Returns nullptr if
+ // creation failed. If |visible_size| is supplied, the returned
+ // ScopedVASurface's size is set to it. Otherwise, it's set to |size| (refer
+ // to CreateScopedVASurface() for details).
std::unique_ptr<ScopedVASurface> CreateContextAndScopedVASurface(
unsigned int va_format,
- const gfx::Size& size);
+ const gfx::Size& size,
+ const base::Optional<gfx::Size>& visible_size = base::nullopt);
// Releases the |va_surfaces| and destroys |va_context_id_|.
virtual void DestroyContextAndSurfaces(std::vector<VASurfaceID> va_surfaces);
@@ -178,20 +217,27 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// Destroys the context identified by |va_context_id_|.
void DestroyContext();
- // Tries to allocate a VA surface of size |size| and |va_rt_format|.
- // Returns a self-cleaning ScopedVASurface or nullptr if creation failed.
+ // Requests a VA surface of size |size| and |va_rt_format|. Returns a
+ // self-cleaning ScopedVASurface or nullptr if creation failed. If
+ // |visible_size| is supplied, the returned ScopedVASurface's size is set to
+ // it: for example, we may want to request a 16x16 surface to decode a 13x12
+ // JPEG: we may want to keep track of the visible size 13x12 inside the
+ // ScopedVASurface to inform the surface's users that that's the only region
+ // with meaningful content. If |visible_size| is not supplied, we store |size|
+ // in the returned ScopedVASurface.
std::unique_ptr<ScopedVASurface> CreateScopedVASurface(
unsigned int va_rt_format,
- const gfx::Size& size);
+ const gfx::Size& size,
+ const base::Optional<gfx::Size>& visible_size = base::nullopt);
// Creates a self-releasing VASurface from |pixmap|. The ownership of the
// surface is transferred to the caller.
scoped_refptr<VASurface> CreateVASurfaceForPixmap(
const scoped_refptr<gfx::NativePixmap>& pixmap);
- // Syncs and exports the VA surface identified by |va_surface_id| as a
- // gfx::NativePixmapDmaBuf. Currently, the only VAAPI surface pixel formats
- // supported are VA_FOURCC_IMC3 and VA_FOURCC_NV12.
+ // Syncs and exports |va_surface| as a gfx::NativePixmapDmaBuf. Currently, the
+ // only VAAPI surface pixel formats supported are VA_FOURCC_IMC3 and
+ // VA_FOURCC_NV12.
//
// Notes:
//
@@ -205,8 +251,12 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// gfx::BufferFormat::YUV_420_BIPLANAR.
//
// Returns nullptr on failure.
- scoped_refptr<gfx::NativePixmapDmaBuf> ExportVASurfaceAsNativePixmapDmaBuf(
- VASurfaceID va_surface_id);
+ std::unique_ptr<NativePixmapAndSizeInfo> ExportVASurfaceAsNativePixmapDmaBuf(
+ const ScopedVASurface& va_surface);
+
+ // Synchronize the VASurface explicitly. This is useful when sharing a surface
+ // between contexts.
+ bool SyncSurface(VASurfaceID va_surface_id);
// Submit parameters or slice data of |va_buffer_type|, copying them from
// |buffer| of size |size|, into HW codec. The data in |buffer| is no
diff --git a/chromium/media/gpu/vaapi/vp8_encoder.cc b/chromium/media/gpu/vaapi/vp8_encoder.cc
index c2b4f03f753..d72d152f991 100644
--- a/chromium/media/gpu/vaapi/vp8_encoder.cc
+++ b/chromium/media/gpu/vaapi/vp8_encoder.cc
@@ -18,8 +18,9 @@ constexpr int kCPBWindowSizeMs = 1500;
// Based on WebRTC's defaults.
constexpr int kMinQP = 4;
-// b/110059922: Tuned 112->113 for bitrate issue in a lower resolution (180p).
-constexpr int kMaxQP = 113;
+// b/110059922, crbug.com/1001900: Tuned 112->117 for bitrate issue in a lower
+// resolution (180p).
+constexpr int kMaxQP = 117;
constexpr int kDefaultQP = (3 * kMinQP + kMaxQP) / 4;
} // namespace
@@ -153,10 +154,11 @@ void VP8Encoder::InitializeFrameHeader() {
DCHECK(!visible_size_.IsEmpty());
current_frame_hdr_.width = visible_size_.width();
current_frame_hdr_.height = visible_size_.height();
- // Since initial_qp is always kDefaultQP (=31), y_ac_qi should be 27
+ // Since initial_qp is always kDefaultQP (=32), y_ac_qi should be 28
// (the table index for kDefaultQP, see rfc 14.1. table ac_qlookup)
+ static_assert(kDefaultQP == 32, "kDefault QP is not 32");
DCHECK_EQ(current_params_.initial_qp, kDefaultQP);
- constexpr uint8_t kDefaultQPACQIndex = 27;
+ constexpr uint8_t kDefaultQPACQIndex = 28;
current_frame_hdr_.quantization_hdr.y_ac_qi = kDefaultQPACQIndex;
current_frame_hdr_.show_frame = true;
// TODO(sprang): Make this dynamic. Value based on reference implementation
diff --git a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
index 12c424c78ab..68302ac9bec 100644
--- a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
@@ -87,8 +87,9 @@ struct PerformanceMetrics {
// The number of frames dropped because of the decoder running behind, only
// relevant for capped performance tests.
size_t frames_dropped_ = 0;
- // The rate at which frames are dropped: dropped frames / non-dropped frames.
- double dropped_frame_rate_ = 0;
+ // The percentage of frames dropped because of the decoder running behind,
+ // only relevant for capped performance tests.
+ double dropped_frame_percentage_ = 0.0;
// Statistics about the time between subsequent frame deliveries.
PerformanceTimeStats delivery_time_stats_;
// Statistics about the time between decode start and frame deliveries.
@@ -165,12 +166,12 @@ void PerformanceEvaluator::StopMeasuring() {
perf_metrics_.total_duration_.InSecondsF();
perf_metrics_.frames_dropped_ = frame_renderer_->FramesDropped();
- // Calculate the frame drop rate.
- // TODO(dstaessens@) Find a better metric for dropped frames.
- size_t frames_rendered =
- perf_metrics_.frames_decoded_ - perf_metrics_.frames_dropped_;
- perf_metrics_.dropped_frame_rate_ =
- perf_metrics_.frames_dropped_ / std::max<size_t>(frames_rendered, 1ul);
+ // Calculate the dropped frame percentage.
+ perf_metrics_.dropped_frame_percentage_ =
+ static_cast<double>(perf_metrics_.frames_dropped_) /
+ static_cast<double>(
+ std::max<size_t>(perf_metrics_.frames_decoded_, 1ul)) *
+ 100.0;
// Calculate delivery and decode time metrics.
perf_metrics_.delivery_time_stats_ =
@@ -186,8 +187,8 @@ void PerformanceEvaluator::StopMeasuring() {
<< std::endl;
std::cout << "Frames Dropped: " << perf_metrics_.frames_dropped_
<< std::endl;
- std::cout << "Dropped frame rate: " << perf_metrics_.dropped_frame_rate_
- << std::endl;
+ std::cout << "Dropped frame percentage: "
+ << perf_metrics_.dropped_frame_percentage_ << "%" << std::endl;
std::cout << "Frame delivery time - average: "
<< perf_metrics_.delivery_time_stats_.avg_ms_ << "ms" << std::endl;
std::cout << "Frame delivery time - percentile 25: "
@@ -229,8 +230,8 @@ void PerformanceEvaluator::WriteMetricsToFile() const {
metrics.SetKey(
"FramesDropped",
base::Value(base::checked_cast<int>(perf_metrics_.frames_dropped_)));
- metrics.SetKey("DroppedFrameRate",
- base::Value(perf_metrics_.dropped_frame_rate_));
+ metrics.SetKey("DroppedFramePercentage",
+ base::Value(perf_metrics_.dropped_frame_percentage_));
metrics.SetKey("FrameDeliveryTimeAverage",
base::Value(perf_metrics_.delivery_time_stats_.avg_ms_));
metrics.SetKey(
@@ -321,8 +322,15 @@ class VideoDecoderTest : public ::testing::Test {
if (!g_env->ImportSupported())
config.allocation_mode = AllocationMode::kAllocate;
- return VideoPlayer::Create(video, std::move(frame_renderer),
- std::move(frame_processors), config);
+ auto video_player = VideoPlayer::Create(config, std::move(frame_renderer),
+ std::move(frame_processors));
+ LOG_ASSERT(video_player);
+ LOG_ASSERT(video_player->Initialize(video));
+
+ // Make sure the event timeout is at least as long as the video's duration.
+ video_player->SetEventWaitTimeout(
+ std::max(kDefaultEventWaitTimeout, g_env->Video()->GetDuration()));
+ return video_player;
}
PerformanceEvaluator* performance_evaluator_;
diff --git a/chromium/media/gpu/video_decode_accelerator_tests.cc b/chromium/media/gpu/video_decode_accelerator_tests.cc
index 261ada781c5..4532c2eb826 100644
--- a/chromium/media/gpu/video_decode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_tests.cc
@@ -74,8 +74,10 @@ class VideoDecoderTest : public ::testing::Test {
media::test::VideoFrameValidator::Create(video->FrameChecksums()));
}
- // Write decoded video frames to the '<testname>' folder.
- if (g_env->IsFramesOutputEnabled()) {
+ // Write decoded video frames to the '<testname>' folder if import mode is
+ // supported and enabled.
+ if (g_env->IsFramesOutputEnabled() &&
+ config.allocation_mode == AllocationMode::kImport) {
base::FilePath output_folder =
base::FilePath(g_env->OutputFolder())
.Append(base::FilePath(g_env->GetTestName()));
@@ -86,8 +88,17 @@ class VideoDecoderTest : public ::testing::Test {
// Use the new VD-based video decoders if requested.
config.use_vd = g_env->UseVD();
- return VideoPlayer::Create(video, std::move(frame_renderer),
- std::move(frame_processors), config);
+ auto video_player = VideoPlayer::Create(config, std::move(frame_renderer),
+ std::move(frame_processors));
+ LOG_ASSERT(video_player);
+ LOG_ASSERT(video_player->Initialize(video));
+
+ // Increase event timeout when outputting video frames.
+ if (g_env->IsFramesOutputEnabled()) {
+ video_player->SetEventWaitTimeout(std::max(
+ kDefaultEventWaitTimeout, g_env->Video()->GetDuration() * 10));
+ }
+ return video_player;
}
};
@@ -310,6 +321,53 @@ TEST_F(VideoDecoderTest, FlushAtEndOfStream_Allocate) {
EXPECT_TRUE(tvp->WaitForFrameProcessors());
}
+// Test initializing the video decoder for the specified video. Initialization
+// will be successful if the video decoder is capable of decoding the test
+// video's configuration (e.g. codec and resolution). The test only verifies
+// initialization and doesn't decode the video.
+TEST_F(VideoDecoderTest, Initialize) {
+ auto tvp = CreateVideoPlayer(g_env->Video());
+ EXPECT_EQ(tvp->GetEventCount(VideoPlayerEvent::kInitialized), 1u);
+}
+
+// Test video decoder re-initialization. Re-initialization is only supported by
+// the media::VideoDecoder interface, so the test will be skipped if --use_vd
+// is not specified.
+TEST_F(VideoDecoderTest, Reinitialize) {
+ if (!g_env->UseVD())
+ GTEST_SKIP();
+
+ // Create and initialize the video decoder.
+ auto tvp = CreateVideoPlayer(g_env->Video());
+ EXPECT_EQ(tvp->GetEventCount(VideoPlayerEvent::kInitialized), 1u);
+
+ // Re-initialize the video decoder, without having played the video.
+ EXPECT_TRUE(tvp->Initialize(g_env->Video()));
+ EXPECT_EQ(tvp->GetEventCount(VideoPlayerEvent::kInitialized), 2u);
+
+ // Play the video from start to end.
+ tvp->Play();
+ EXPECT_TRUE(tvp->WaitForFlushDone());
+ EXPECT_EQ(tvp->GetFlushDoneCount(), 1u);
+ EXPECT_EQ(tvp->GetFrameDecodedCount(), g_env->Video()->NumFrames());
+ EXPECT_TRUE(tvp->WaitForFrameProcessors());
+
+ // Try re-initializing the video decoder again.
+ EXPECT_TRUE(tvp->Initialize(g_env->Video()));
+ EXPECT_EQ(tvp->GetEventCount(VideoPlayerEvent::kInitialized), 3u);
+}
+
+// Create a video decoder and immediately destroy it without initializing. The
+// video decoder will be automatically destroyed when the video player goes out
+// of scope at the end of the test. The test will pass if no asserts or crashes
+// are triggered upon destroying.
+TEST_F(VideoDecoderTest, DestroyBeforeInitialize) {
+ VideoDecoderClientConfig config = VideoDecoderClientConfig();
+ config.use_vd = g_env->UseVD();
+ auto tvp = VideoPlayer::Create(config, FrameRendererDummy::Create());
+ EXPECT_NE(tvp, nullptr);
+}
+
} // namespace test
} // namespace media
diff --git a/chromium/media/gpu/video_decode_accelerator_unittest.cc b/chromium/media/gpu/video_decode_accelerator_unittest.cc
index 7db3114e664..109df8209fe 100644
--- a/chromium/media/gpu/video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_decode_accelerator_unittest.cc
@@ -52,7 +52,7 @@
#include "base/synchronization/lock.h"
#include "base/synchronization/waitable_event.h"
#include "base/test/launcher/unit_test_launcher.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_suite.h"
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -403,7 +403,7 @@ class GLRenderingVDAClient
const std::unique_ptr<media::test::VideoFrameFileWriter> video_frame_writer_;
base::WeakPtr<GLRenderingVDAClient> weak_this_;
- base::WeakPtrFactory<GLRenderingVDAClient> weak_this_factory_;
+ base::WeakPtrFactory<GLRenderingVDAClient> weak_this_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
};
@@ -442,8 +442,7 @@ GLRenderingVDAClient::GLRenderingVDAClient(
std::move(encoded_data),
config_.profile)),
video_frame_validator_(std::move(video_frame_validator)),
- video_frame_writer_(std::move(video_frame_writer)),
- weak_this_factory_(this) {
+ video_frame_writer_(std::move(video_frame_writer)) {
DCHECK_NE(config.profile, VIDEO_CODEC_PROFILE_UNKNOWN);
LOG_ASSERT(config_.num_in_flight_decodes > 0);
LOG_ASSERT(config_.num_play_throughs > 0);
@@ -539,15 +538,22 @@ void GLRenderingVDAClient::ProvidePictureBuffers(
texture_target_, g_test_import, pixel_format, dimensions);
LOG_ASSERT(texture_ref);
int32_t picture_buffer_id = next_picture_buffer_id_++;
- int irrelevant_id = picture_buffer_id;
LOG_ASSERT(
active_textures_.insert(std::make_pair(picture_buffer_id, texture_ref))
.second);
- PictureBuffer::TextureIds texture_ids(1, texture_ref->texture_id());
- buffers.push_back(PictureBuffer(picture_buffer_id, dimensions,
- PictureBuffer::TextureIds{irrelevant_id++},
- texture_ids, texture_target, pixel_format));
+ if (g_test_import) {
+ // Texture ids are not needed in import mode. GpuArcVideoDecodeAccelerator
+ // actually doesn't pass them. This test code follows the implementation.
+ buffers.push_back(PictureBuffer(picture_buffer_id, dimensions));
+ } else {
+ int irrelevant_id = picture_buffer_id;
+ PictureBuffer::TextureIds texture_ids(1, texture_ref->texture_id());
+ buffers.push_back(
+ PictureBuffer(picture_buffer_id, dimensions,
+ PictureBuffer::TextureIds{irrelevant_id++}, texture_ids,
+ texture_target, pixel_format));
+ }
}
decoder_->AssignPictureBuffers(buffers);
@@ -1036,7 +1042,7 @@ void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
LOG_ASSERT(base::StringToDouble(field, &video_file->min_fps_render));
}
if (!fields[6].empty()) {
- std::string field(fields[5].begin(), fields[5].end());
+ std::string field(fields[6].begin(), fields[6].end());
LOG_ASSERT(base::StringToDouble(field, &video_file->min_fps_no_render));
}
// Default to H264 baseline if no profile provided.
@@ -1229,6 +1235,14 @@ TEST_P(VideoDecodeAcceleratorParamTest, MAYBE_TestSimpleDecode) {
return;
}
+ if (render_as_thumbnails && g_test_import) {
+ // We cannot renderer a thumbnail in import mode because we don't assign
+ // texture id to PictureBuffer. Since the frame soundness should be ensured
+ // by frame validator in import mode. So this will not reduces the test
+ // coverage.
+ GTEST_SKIP();
+ }
+
if (test_video_files_.size() > 1)
num_concurrent_decoders = test_video_files_.size();
@@ -1714,12 +1728,10 @@ class VDATestSuite : public base::TestSuite {
// which uses COM. We need the thread to be a UI thread.
// On Ozone, the backend initializes the event system using a UI
// thread.
- scoped_task_environment_ =
- std::make_unique<base::test::ScopedTaskEnvironment>(
- base::test::ScopedTaskEnvironment::MainThreadType::UI);
+ task_environment_ = std::make_unique<base::test::TaskEnvironment>(
+ base::test::TaskEnvironment::MainThreadType::UI);
#else
- scoped_task_environment_ =
- std::make_unique<base::test::ScopedTaskEnvironment>();
+ task_environment_ = std::make_unique<base::test::TaskEnvironment>();
#endif // OS_WIN || OS_CHROMEOS
media::g_env =
@@ -1742,11 +1754,11 @@ class VDATestSuite : public base::TestSuite {
}
void Shutdown() override {
- scoped_task_environment_.reset();
+ task_environment_.reset();
base::TestSuite::Shutdown();
}
- std::unique_ptr<base::test::ScopedTaskEnvironment> scoped_task_environment_;
+ std::unique_ptr<base::test::TaskEnvironment> task_environment_;
};
} // namespace
diff --git a/chromium/media/gpu/video_encode_accelerator_unittest.cc b/chromium/media/gpu/video_encode_accelerator_unittest.cc
index 67ba27c02f6..39ff09345e1 100644
--- a/chromium/media/gpu/video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_encode_accelerator_unittest.cc
@@ -24,6 +24,7 @@
#include "base/memory/ref_counted.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/memory/weak_ptr.h"
+#include "base/message_loop/message_pump_type.h"
#include "base/numerics/safe_conversions.h"
#include "base/process/process_handle.h"
#include "base/single_thread_task_runner.h"
@@ -35,7 +36,7 @@
#include "base/system/sys_info.h"
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/scoped_feature_list.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_suite.h"
#include "base/threading/thread.h"
#include "base/threading/thread_checker.h"
@@ -63,6 +64,7 @@
#include "media/gpu/gpu_video_encode_accelerator_factory.h"
#include "media/gpu/h264_decoder.h"
#include "media/gpu/h264_dpb.h"
+#include "media/gpu/macros.h"
#include "media/gpu/test/video_accelerator_unittest_helpers.h"
#include "media/gpu/test/video_frame_helpers.h"
#include "media/parsers/vp8_parser.h"
@@ -112,10 +114,6 @@ const uint32_t kDefaultFramerate = 30;
const double kDefaultSubsequentFramerateRatio = 0.1;
// Tolerance factor for how encoded bitrate can differ from requested bitrate.
const double kBitrateTolerance = 0.1;
-// Minimum required FPS throughput for the basic performance test.
-const uint32_t kMinPerfFPS = 30;
-// The frame size for 2160p (UHD 4K) video in pixels.
-const int k2160PSizeInPixels = 3840 * 2160;
// Minimum (arbitrary) number of frames required to enforce bitrate requirements
// over. Streams shorter than this may be too short to realistically require
// an encoder to be able to converge to the requested bitrate over.
@@ -124,16 +122,12 @@ const int k2160PSizeInPixels = 3840 * 2160;
const unsigned int kMinFramesForBitrateTests = 300;
// The percentiles to measure for encode latency.
const unsigned int kLoggedLatencyPercentiles[] = {50, 75, 95};
-// Timeout for the flush is completed. The period starts from passing the last
-// frame to the encoder, to the flush callback is called. There might be many
-// pending frames in the encoder, so the timeout might be larger than a frame
-// period.
+// Timeout between each BitstreamBufferReady() call and flush callback.
// In the multiple encoder test case, the FPS might be lower than expected.
// Currently the largest resolution we run at lab is 4K. The FPS of the slowest
-// device in single encoder is about 10. In MultipleEncoders test case, the
-// measured time period on the slowest device is about 5 seconds. Here we set
-// the timeout 2x of the measured period.
-const unsigned int kFlushTimeoutMs = 10000;
+// device in MultipleEncoders test case is 3. Here we set the timeout 10x of the
+// expected period for margin.
+const unsigned int kBitstreamBufferReadyTimeoutMs = 3000;
// The syntax of multiple test streams is:
// test-stream1;test-stream2;test-stream3
@@ -337,10 +331,11 @@ static bool IsVP9(VideoCodecProfile profile) {
#if defined(OS_CHROMEOS)
// Determine the test is known-to-fail and should be skipped.
-bool ShouldSkipTest() {
+bool ShouldSkipTest(VideoPixelFormat format) {
struct Pattern {
const char* board_pattern;
const char* suite_name_prefix;
+ VideoPixelFormat format; // Set PIXEL_FORMAT_UNKNOWN for any format.
};
// Warning: The list should be only used as a last resort for known vendor
@@ -348,10 +343,15 @@ bool ShouldSkipTest() {
constexpr Pattern kSkipTestPatterns[] = {
// crbug.com/769722: MTK driver doesn't compute bitrate correctly.
// Disable mid_stream_bitrate_switch test cases for elm/hana.
- {"elm", "MidStreamParamSwitchBitrate"},
- {"elm", "MultipleEncoders"},
- {"hana", "MidStreamParamSwitchBitrate"},
- {"hana", "MultipleEncoders"},
+ {"elm", "MidStreamParamSwitchBitrate", PIXEL_FORMAT_UNKNOWN},
+ {"elm", "MultipleEncoders", PIXEL_FORMAT_UNKNOWN},
+ {"hana", "MidStreamParamSwitchBitrate", PIXEL_FORMAT_UNKNOWN},
+ {"hana", "MultipleEncoders", PIXEL_FORMAT_UNKNOWN},
+
+ // crbug.com/965348#c6: Tegra driver calculates the wrong plane size of
+ // NV12. Disable all tests on nyan family for NV12 test.
+ // TODO(akahuang): Remove this after nyan family are EOL.
+ {"nyan_*", "", PIXEL_FORMAT_NV12},
};
const std::string board = base::SysInfo::GetLsbReleaseBoard();
@@ -365,10 +365,12 @@ bool ShouldSkipTest() {
->test_suite_name();
for (const auto& pattern : kSkipTestPatterns) {
if (suite_name.find(pattern.suite_name_prefix) == 0 &&
- base::MatchPattern(board, pattern.board_pattern)) {
+ base::MatchPattern(board, pattern.board_pattern) &&
+ (pattern.format == PIXEL_FORMAT_UNKNOWN || pattern.format == format)) {
return true;
}
}
+
return false;
}
#endif // defined(OS_CHROMEOS)
@@ -554,25 +556,34 @@ static void CreateAlignedInputStreamFile(const gfx::Size& coded_size,
int64_t src_file_size = 0;
LOG_ASSERT(base::GetFileSize(src_file, &src_file_size));
- size_t visible_buffer_size =
- VideoFrame::AllocationSize(pixel_format, test_stream->visible_size);
- LOG_ASSERT(src_file_size % visible_buffer_size == 0U)
+ // NOTE: VideoFrame::AllocationSize() cannot used here because the width and
+ // height on each plane is aligned by 2 for YUV format.
+ size_t frame_buffer_size = 0;
+ for (size_t i = 0; i < num_planes; ++i) {
+ size_t row_bytes = VideoFrame::RowBytes(i, pixel_format,
+ test_stream->visible_size.width());
+ size_t rows =
+ VideoFrame::Rows(i, pixel_format, test_stream->visible_size.height());
+ frame_buffer_size += rows * row_bytes;
+ }
+
+ LOG_ASSERT(src_file_size % frame_buffer_size == 0U)
<< "Stream byte size is not a product of calculated frame byte size";
test_stream->num_frames =
- static_cast<unsigned int>(src_file_size / visible_buffer_size);
+ static_cast<unsigned int>(src_file_size / frame_buffer_size);
LOG_ASSERT(test_stream->aligned_buffer_size > 0UL);
test_stream->aligned_in_file_data.resize(test_stream->aligned_buffer_size *
test_stream->num_frames);
base::File src(src_file, base::File::FLAG_OPEN | base::File::FLAG_READ);
- std::vector<char> src_data(visible_buffer_size);
+ std::vector<char> src_data(frame_buffer_size);
off_t src_offset = 0, dest_offset = 0;
for (size_t frame = 0; frame < test_stream->num_frames; frame++) {
LOG_ASSERT(src.Read(src_offset, &src_data[0],
- static_cast<int>(visible_buffer_size)) ==
- static_cast<int>(visible_buffer_size));
+ static_cast<int>(frame_buffer_size)) ==
+ static_cast<int>(frame_buffer_size));
const char* src_ptr = &src_data[0];
for (size_t i = 0; i < num_planes; i++) {
// Assert that each plane of frame starts at required byte boundary.
@@ -586,7 +597,7 @@ static void CreateAlignedInputStreamFile(const gfx::Size& coded_size,
}
dest_offset += static_cast<off_t>(padding_sizes[i]);
}
- src_offset += static_cast<off_t>(visible_buffer_size);
+ src_offset += static_cast<off_t>(frame_buffer_size);
}
src.Close();
@@ -734,7 +745,7 @@ class VideoEncodeAcceleratorTestEnvironment : public ::testing::Environment {
ui::OzonePlatform::InitializeForUI(params);
base::Thread::Options options;
- options.message_loop_type = base::MessageLoop::TYPE_UI;
+ options.message_pump_type = base::MessagePumpType::UI;
ASSERT_TRUE(rendering_thread_.StartWithOptions(options));
base::WaitableEvent done(base::WaitableEvent::ResetPolicy::AUTOMATIC,
base::WaitableEvent::InitialState::NOT_SIGNALED);
@@ -1046,7 +1057,7 @@ void VP9Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) {
// partition numbers/sizes. For now assume one frame per buffer.
Vp9FrameHeader header;
gfx::Size allocate_size;
- parser_.SetStream(stream, size, {}, nullptr);
+ parser_.SetStream(stream, size, nullptr);
EXPECT_TRUE(Vp9Parser::kInvalidStream !=
parser_.ParseNextFrame(&header, &allocate_size, nullptr));
if (header.IsKeyframe()) {
@@ -1523,7 +1534,6 @@ class VEAClient : public VEAClientBase {
bool save_to_file,
unsigned int keyframe_period,
bool force_bitrate,
- bool test_perf,
bool mid_stream_bitrate_switch,
bool mid_stream_framerate_switch,
bool verify_output,
@@ -1573,12 +1583,9 @@ class VEAClient : public VEAClientBase {
void FlushEncoderDone(bool success);
void FlushEncoderSuccessfully();
- // Timeout function to check the flush callback function is called in the
- // short period.
- void FlushTimeout();
-
- // Verify the minimum FPS requirement.
- void VerifyMinFPS();
+ // Timeout function to check BitstreamBufferReady() and flush callback is
+ // called in the short period.
+ void BitstreamBufferReadyTimeout(int32_t bitstream_buffer_id);
// Verify that stream bitrate has been close to current_requested_bitrate_,
// assuming current_framerate_ since the last time VerifyStreamProperties()
@@ -1622,6 +1629,9 @@ class VEAClient : public VEAClientBase {
// Verify that the output timestamp matches input timestamp.
void VerifyOutputTimestamp(base::TimeDelta timestamp);
+ // Cancel and reset |buffer_ready_timeout_|.
+ void UpdateBitstreamBufferReadyTimeout(int32_t bitstream_buffer_id);
+
ClientState state_;
TestStream* test_stream_;
@@ -1683,9 +1693,6 @@ class VEAClient : public VEAClientBase {
// time we checked bitrate.
size_t encoded_stream_size_since_last_check_;
- // If true, verify performance at the end of the test.
- bool test_perf_;
-
// Check the output frame quality of the encoder.
bool verify_output_;
@@ -1719,8 +1726,10 @@ class VEAClient : public VEAClientBase {
// The timer used to feed the encoder with the input frames.
std::unique_ptr<base::RepeatingTimer> input_timer_;
- // The FlushTimeout closure. It is cancelled when flush is finished.
- base::CancelableClosure flush_timeout_;
+ // The BitstreamBufferReadyTimeout closure. It is set at each
+ // BitstreamBufferReady() call, and cancelled at the next
+ // BitstreamBufferReady() or flush callback is called.
+ base::CancelableClosure buffer_ready_timeout_;
// The timestamps for each frame in the order of CreateFrame() invocation.
base::queue<base::TimeDelta> frame_timestamps_;
@@ -1734,7 +1743,6 @@ VEAClient::VEAClient(TestStream* test_stream,
bool save_to_file,
unsigned int keyframe_period,
bool force_bitrate,
- bool test_perf,
bool mid_stream_bitrate_switch,
bool mid_stream_framerate_switch,
bool verify_output,
@@ -1757,7 +1765,6 @@ VEAClient::VEAClient(TestStream* test_stream,
current_requested_bitrate_(0),
current_framerate_(0),
encoded_stream_size_since_last_check_(0),
- test_perf_(test_perf),
verify_output_(verify_output),
verify_output_timestamp_(verify_output_timestamp),
requested_bitrate_(0),
@@ -1995,6 +2002,8 @@ void VEAClient::BitstreamBufferReady(
DCHECK(thread_checker_.CalledOnValidThread());
ASSERT_LE(metadata.payload_size_bytes, output_buffer_size_);
+ UpdateBitstreamBufferReadyTimeout(bitstream_buffer_id);
+
IdToSHM::iterator it = output_buffers_at_client_.find(bitstream_buffer_id);
ASSERT_NE(it, output_buffers_at_client_.end());
base::UnsafeSharedMemoryRegion* shm = it->second;
@@ -2035,7 +2044,7 @@ void VEAClient::BitstreamBufferReady(
// frames are received.
if (!encoder_->IsFlushSupported() &&
num_encoded_frames_ == num_frames_to_encode_) {
- FlushEncoderSuccessfully();
+ FlushEncoderDone(true);
}
if (save_to_file_) {
@@ -2054,6 +2063,18 @@ void VEAClient::BitstreamBufferReady(
FeedEncoderWithOutput(shm);
}
+void VEAClient::UpdateBitstreamBufferReadyTimeout(int32_t bitstream_buffer_id) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DVLOGF(4);
+
+ buffer_ready_timeout_.Reset(
+ base::BindRepeating(&VEAClient::BitstreamBufferReadyTimeout,
+ base::Unretained(this), bitstream_buffer_id));
+ base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
+ FROM_HERE, buffer_ready_timeout_.callback(),
+ base::TimeDelta::FromMilliseconds(kBitstreamBufferReadyTimeoutMs));
+}
+
void VEAClient::SetState(ClientState new_state) {
DCHECK(thread_checker_.CalledOnValidThread());
@@ -2291,7 +2312,6 @@ bool VEAClient::HandleEncodedFrame(bool keyframe,
}
} else if (num_encoded_frames_ == num_frames_to_encode_) {
LogPerf();
- VerifyMinFPS();
VerifyStreamProperties();
// We might receive the last frame before calling Flush(). In this case we
// set the state to CS_FLUSHING first to bypass the state transition check.
@@ -2344,19 +2364,16 @@ void VEAClient::FlushEncoder() {
// the state to CS_FLUSHING when receiving the last frame.
if (state_ != CS_FINISHED)
SetState(CS_FLUSHING);
-
- flush_timeout_.Reset(
- base::BindRepeating(&VEAClient::FlushTimeout, base::Unretained(this)));
- base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
- FROM_HERE, flush_timeout_.callback(),
- base::TimeDelta::FromMilliseconds(kFlushTimeoutMs));
}
void VEAClient::FlushEncoderDone(bool success) {
DCHECK(thread_checker_.CalledOnValidThread());
- flush_timeout_.Cancel();
+ DVLOGF(3);
LOG_ASSERT(num_frames_submitted_to_encoder_ == num_frames_to_encode_);
+ // Stop the timeout callback.
+ buffer_ready_timeout_.Cancel();
+
if (!success || num_encoded_frames_ != num_frames_to_encode_) {
SetState(CS_ERROR);
return;
@@ -2376,29 +2393,13 @@ void VEAClient::FlushEncoderSuccessfully() {
}
}
-void VEAClient::FlushTimeout() {
+void VEAClient::BitstreamBufferReadyTimeout(int32_t bitstream_buffer_id) {
DCHECK(thread_checker_.CalledOnValidThread());
- LOG(ERROR) << "Flush timeout.";
+ LOG(ERROR) << "Timeout getting next bitstream after BitstreamBufferReady("
+ << bitstream_buffer_id << ").";
SetState(CS_ERROR);
}
-void VEAClient::VerifyMinFPS() {
- DCHECK(thread_checker_.CalledOnValidThread());
- if (test_perf_) {
- if (input_coded_size_.GetArea() >= k2160PSizeInPixels) {
- // When |input_coded_size_| is 2160p or more, it is expected that the
- // calculated FPS might be lower than kMinPerfFPS. Log as warning instead
- // of failing the test in this case.
- if (frames_per_second() < kMinPerfFPS) {
- LOG(WARNING) << "Measured FPS: " << frames_per_second()
- << " is below min required: " << kMinPerfFPS << " FPS.";
- }
- } else {
- EXPECT_GE(frames_per_second(), kMinPerfFPS);
- }
- }
-}
-
void VEAClient::VerifyStreamProperties() {
DCHECK(thread_checker_.CalledOnValidThread());
LOG_ASSERT(num_frames_since_last_check_ > 0UL);
@@ -2707,7 +2708,6 @@ void VEACacheLineUnalignedInputClient::FeedEncoderWithOneInput(
// - Force a keyframe every n frames.
// - Force bitrate; the actual required value is provided as a property
// of the input stream, because it depends on stream type/resolution/etc.
-// - If true, measure performance.
// - If true, switch bitrate mid-stream.
// - If true, switch framerate mid-stream.
// - If true, verify the output frames of encoder.
@@ -2716,25 +2716,22 @@ void VEACacheLineUnalignedInputClient::FeedEncoderWithOneInput(
// available for H264 encoder for now.
class VideoEncodeAcceleratorTest
: public ::testing::TestWithParam<
- std::
- tuple<int, bool, int, bool, bool, bool, bool, bool, bool, bool>> {
-};
+ std::tuple<int, bool, int, bool, bool, bool, bool, bool, bool>> {};
TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) {
size_t num_concurrent_encoders = std::get<0>(GetParam());
const bool save_to_file = std::get<1>(GetParam());
const unsigned int keyframe_period = std::get<2>(GetParam());
const bool force_bitrate = std::get<3>(GetParam());
- const bool test_perf = std::get<4>(GetParam());
- const bool mid_stream_bitrate_switch = std::get<5>(GetParam());
- const bool mid_stream_framerate_switch = std::get<6>(GetParam());
+ const bool mid_stream_bitrate_switch = std::get<4>(GetParam());
+ const bool mid_stream_framerate_switch = std::get<5>(GetParam());
const bool verify_output =
- std::get<7>(GetParam()) || g_env->verify_all_output();
- const bool verify_output_timestamp = std::get<8>(GetParam());
- const bool force_level = std::get<9>(GetParam());
+ std::get<6>(GetParam()) || g_env->verify_all_output();
+ const bool verify_output_timestamp = std::get<7>(GetParam());
+ const bool force_level = std::get<8>(GetParam());
#if defined(OS_CHROMEOS)
- if (ShouldSkipTest())
+ if (ShouldSkipTest(g_env->test_streams_[0]->pixel_format))
GTEST_SKIP();
#endif // defined(OS_CHROMEOS)
@@ -2783,7 +2780,7 @@ TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) {
std::make_unique<media::test::ClientStateNotification<ClientState>>());
clients.push_back(std::make_unique<VEAClient>(
g_env->test_streams_[test_stream_index].get(), notes.back().get(),
- encoder_save_to_file, keyframe_period, force_bitrate, test_perf,
+ encoder_save_to_file, keyframe_period, force_bitrate,
mid_stream_bitrate_switch, mid_stream_framerate_switch, verify_output,
verify_output_timestamp, force_level));
@@ -2865,7 +2862,7 @@ TEST_P(VideoEncodeAcceleratorSimpleTest, TestSimpleEncode) {
ASSERT_LT(test_type, 2) << "Invalid test type=" << test_type;
#if defined(OS_CHROMEOS)
- if (ShouldSkipTest())
+ if (ShouldSkipTest(g_env->test_streams_[0]->pixel_format))
GTEST_SKIP();
#endif // defined(OS_CHROMEOS)
@@ -2888,7 +2885,6 @@ INSTANTIATE_TEST_SUITE_P(SimpleEncode,
false,
false,
false,
- false,
false)));
INSTANTIATE_TEST_SUITE_P(EncoderPerf,
@@ -2897,7 +2893,6 @@ INSTANTIATE_TEST_SUITE_P(EncoderPerf,
false,
0,
false,
- true,
false,
false,
false,
@@ -2914,7 +2909,6 @@ INSTANTIATE_TEST_SUITE_P(ForceKeyframes,
false,
false,
false,
- false,
false)));
INSTANTIATE_TEST_SUITE_P(ForceBitrate,
@@ -2927,7 +2921,6 @@ INSTANTIATE_TEST_SUITE_P(ForceBitrate,
false,
false,
false,
- false,
false)));
INSTANTIATE_TEST_SUITE_P(MidStreamParamSwitchBitrate,
@@ -2936,7 +2929,6 @@ INSTANTIATE_TEST_SUITE_P(MidStreamParamSwitchBitrate,
false,
0,
true,
- false,
true,
false,
false,
@@ -2951,7 +2943,6 @@ INSTANTIATE_TEST_SUITE_P(DISABLED_MidStreamParamSwitchFPS,
0,
true,
false,
- false,
true,
false,
false,
@@ -2967,13 +2958,11 @@ INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
false,
false,
false,
- false,
false),
std::make_tuple(3,
false,
0,
true,
- false,
true,
false,
false,
@@ -2989,7 +2978,6 @@ INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
false,
false,
false,
- false,
true,
false)));
@@ -3003,7 +2991,6 @@ INSTANTIATE_TEST_SUITE_P(ForceLevel,
false,
false,
false,
- false,
true)));
INSTANTIATE_TEST_SUITE_P(NoInputTest,
@@ -3025,7 +3012,6 @@ INSTANTIATE_TEST_SUITE_P(SimpleEncode,
false,
false,
false,
- false,
false),
std::make_tuple(1,
true,
@@ -3033,7 +3019,6 @@ INSTANTIATE_TEST_SUITE_P(SimpleEncode,
false,
false,
false,
- false,
true,
false,
false)));
@@ -3044,7 +3029,6 @@ INSTANTIATE_TEST_SUITE_P(EncoderPerf,
false,
0,
false,
- true,
false,
false,
false,
@@ -3061,7 +3045,6 @@ INSTANTIATE_TEST_SUITE_P(MultipleEncoders,
false,
false,
false,
- false,
false)));
INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
@@ -3073,7 +3056,6 @@ INSTANTIATE_TEST_SUITE_P(VerifyTimestamp,
false,
false,
false,
- false,
true,
false)));
@@ -3088,7 +3070,6 @@ INSTANTIATE_TEST_SUITE_P(ForceBitrate,
false,
false,
false,
- false,
false)));
#endif // defined(OS_WIN)
@@ -3109,12 +3090,10 @@ class VEATestSuite : public base::TestSuite {
base::TestSuite::Initialize();
#if defined(OS_CHROMEOS)
- scoped_task_environment_ =
- std::make_unique<base::test::ScopedTaskEnvironment>(
- base::test::ScopedTaskEnvironment::MainThreadType::UI);
+ task_environment_ = std::make_unique<base::test::TaskEnvironment>(
+ base::test::TaskEnvironment::MainThreadType::UI);
#else
- scoped_task_environment_ =
- std::make_unique<base::test::ScopedTaskEnvironment>();
+ task_environment_ = std::make_unique<base::test::TaskEnvironment>();
#endif
media::g_env =
reinterpret_cast<media::VideoEncodeAcceleratorTestEnvironment*>(
@@ -3139,12 +3118,12 @@ class VEATestSuite : public base::TestSuite {
}
void Shutdown() override {
- scoped_task_environment_.reset();
+ task_environment_.reset();
base::TestSuite::Shutdown();
}
private:
- std::unique_ptr<base::test::ScopedTaskEnvironment> scoped_task_environment_;
+ std::unique_ptr<base::test::TaskEnvironment> task_environment_;
};
} // namespace
diff --git a/chromium/media/gpu/video_frame_converter.cc b/chromium/media/gpu/video_frame_converter.cc
index 8d46b05d759..30df4fa769f 100644
--- a/chromium/media/gpu/video_frame_converter.cc
+++ b/chromium/media/gpu/video_frame_converter.cc
@@ -10,16 +10,37 @@ VideoFrameConverter::VideoFrameConverter() = default;
VideoFrameConverter::~VideoFrameConverter() = default;
-void VideoFrameConverter::set_parent_task_runner(
- scoped_refptr<base::SequencedTaskRunner> task_runner) {
- parent_task_runner_ = std::move(task_runner);
+void VideoFrameConverter::Destroy() {
+ delete this;
}
-scoped_refptr<VideoFrame> VideoFrameConverter::ConvertFrame(
- scoped_refptr<VideoFrame> frame) {
+void VideoFrameConverter::Initialize(
+ scoped_refptr<base::SequencedTaskRunner> parent_task_runner,
+ OutputCB output_cb) {
+ parent_task_runner_ = std::move(parent_task_runner);
+ output_cb_ = std::move(output_cb);
+}
+
+void VideoFrameConverter::ConvertFrame(scoped_refptr<VideoFrame> frame) {
DCHECK(parent_task_runner_->RunsTasksInCurrentSequence());
+ DCHECK(output_cb_);
+
+ output_cb_.Run(std::move(frame));
+}
+
+void VideoFrameConverter::AbortPendingFrames() {}
- return frame;
+bool VideoFrameConverter::HasPendingFrames() const {
+ return false;
}
} // namespace media
+
+namespace std {
+
+void default_delete<media::VideoFrameConverter>::operator()(
+ media::VideoFrameConverter* ptr) const {
+ ptr->Destroy();
+}
+
+} // namespace std
diff --git a/chromium/media/gpu/video_frame_converter.h b/chromium/media/gpu/video_frame_converter.h
index 4139aa0f7c4..c995e0f083e 100644
--- a/chromium/media/gpu/video_frame_converter.h
+++ b/chromium/media/gpu/video_frame_converter.h
@@ -5,6 +5,9 @@
#ifndef MEDIA_GPU_VIDEO_FRAME_CONVERTER_H_
#define MEDIA_GPU_VIDEO_FRAME_CONVERTER_H_
+#include <memory>
+
+#include "base/callback_forward.h"
#include "base/memory/ref_counted.h"
#include "base/memory/scoped_refptr.h"
#include "base/sequenced_task_runner.h"
@@ -19,26 +22,58 @@ namespace media {
// can be used to convert the type of output video frame in this case.
class MEDIA_GPU_EXPORT VideoFrameConverter {
public:
+ using OutputCB = base::RepeatingCallback<void(scoped_refptr<VideoFrame>)>;
+
VideoFrameConverter();
- virtual ~VideoFrameConverter();
- // Setter method of |parent_task_runner_|. This method should be called before
- // any ConvertFrame() is called.
- void set_parent_task_runner(
- scoped_refptr<base::SequencedTaskRunner> parent_task_runner);
+ // Initialize the converter. This method must be called before any
+ // ConvertFrame() is called.
+ void Initialize(scoped_refptr<base::SequencedTaskRunner> parent_task_runner,
+ OutputCB output_cb);
+
+ // Convert the frame and return the converted frame to the client by
+ // |output_cb_|. This method must be called on |parent_task_runner_|.
+ // The default implementation calls |output_cb_| with |frame| as-is.
+ virtual void ConvertFrame(scoped_refptr<VideoFrame> frame);
- // Convert the frame. The default implementation returns the passed frame
- // as-is.
- virtual scoped_refptr<VideoFrame> ConvertFrame(
- scoped_refptr<VideoFrame> frame);
+ // Abort all pending frames. |output_cb_| should not be called for the input
+ // frames passed before calling AbortPendingFrames(). This method must be
+ // called on |parent_task_runner_|.
+ virtual void AbortPendingFrames();
+
+ // Return true if there is any pending frame. This method must be called on
+ // |parent_task_runner_|.
+ virtual bool HasPendingFrames() const;
protected:
- // The working task runner. ConvertFrame() should be called on this.
+ // Deletion is only allowed via Destroy().
+ virtual ~VideoFrameConverter();
+
+ // The working task runner.
scoped_refptr<base::SequencedTaskRunner> parent_task_runner_;
+ // The callback to return converted frames back to client. This callback will
+ // be called on |parent_task_runner_|.
+ OutputCB output_cb_;
+
private:
+ friend struct std::default_delete<VideoFrameConverter>;
+ // Called by std::default_delete.
+ virtual void Destroy();
+
DISALLOW_COPY_AND_ASSIGN(VideoFrameConverter);
};
} // namespace media
+
+namespace std {
+
+// Specialize std::default_delete to call Destroy().
+template <>
+struct MEDIA_GPU_EXPORT default_delete<media::VideoFrameConverter> {
+ void operator()(media::VideoFrameConverter* ptr) const;
+};
+
+} // namespace std
+
#endif // MEDIA_GPU_VIDEO_FRAME_CONVERTER_H_
diff --git a/chromium/media/gpu/video_frame_mapper_factory.cc b/chromium/media/gpu/video_frame_mapper_factory.cc
index 6989b97eecb..f4fa04b1719 100644
--- a/chromium/media/gpu/video_frame_mapper_factory.cc
+++ b/chromium/media/gpu/video_frame_mapper_factory.cc
@@ -7,9 +7,9 @@
#include "build/build_config.h"
#include "media/gpu/buildflags.h"
-#if BUILDFLAG(USE_V4L2_CODEC) || BUILDFLAG(USE_VAAPI)
+#if BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#include "media/gpu/linux/generic_dmabuf_video_frame_mapper.h"
-#endif // BUILDFLAG(USE_V4L2_CODEC) || BUILDFLAG(USE_VAAPI)
+#endif // BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#if BUILDFLAG(USE_VAAPI)
#include "media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.h"
@@ -31,10 +31,10 @@ std::unique_ptr<VideoFrameMapper> VideoFrameMapperFactory::CreateMapper(
std::unique_ptr<VideoFrameMapper> VideoFrameMapperFactory::CreateMapper(
VideoPixelFormat format,
bool linear_buffer_mapper) {
-#if BUILDFLAG(USE_V4L2_CODEC) || BUILDFLAG(USE_VAAPI)
+#if BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
if (linear_buffer_mapper)
return GenericDmaBufVideoFrameMapper::Create(format);
-#endif // BUILDFLAG(USE_V4L2_CODEC) || BUILDFLAG(USE_VAAPI)
+#endif // BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#if BUILDFLAG(USE_VAAPI)
return VaapiDmaBufVideoFrameMapper::Create(format);
diff --git a/chromium/media/gpu/vp9_decoder.cc b/chromium/media/gpu/vp9_decoder.cc
index 68188ea4df0..59668bde2c4 100644
--- a/chromium/media/gpu/vp9_decoder.cc
+++ b/chromium/media/gpu/vp9_decoder.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/feature_list.h"
#include "base/logging.h"
+#include "build/build_config.h"
#include "media/base/limits.h"
#include "media/base/media_switches.h"
#include "media/gpu/vp9_decoder.h"
@@ -18,6 +19,7 @@ namespace media {
namespace {
std::vector<uint32_t> GetSpatialLayerFrameSize(
const DecoderBuffer& decoder_buffer) {
+#if defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
const uint32_t* cue_data =
reinterpret_cast<const uint32_t*>(decoder_buffer.side_data());
if (!cue_data)
@@ -33,6 +35,8 @@ std::vector<uint32_t> GetSpatialLayerFrameSize(
return {};
}
return std::vector<uint32_t>(cue_data, cue_data + num_of_layers);
+#endif // defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+ return {};
}
} // namespace
diff --git a/chromium/media/gpu/windows/d3d11_cdm_proxy.cc b/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
index c493da1c3b7..a2687c205c8 100644
--- a/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
+++ b/chromium/media/gpu/windows/d3d11_cdm_proxy.cc
@@ -195,7 +195,7 @@ class D3D11CdmProxy::HardwareEventWatcher
class D3D11CdmContext : public CdmContext {
public:
explicit D3D11CdmContext(const GUID& key_info_guid)
- : cdm_proxy_context_(key_info_guid), weak_factory_(this) {}
+ : cdm_proxy_context_(key_info_guid) {}
~D3D11CdmContext() override = default;
// The pointers are owned by the caller.
@@ -242,7 +242,7 @@ class D3D11CdmContext : public CdmContext {
CallbackRegistry<EventCB::RunType> event_callbacks_;
- base::WeakPtrFactory<D3D11CdmContext> weak_factory_;
+ base::WeakPtrFactory<D3D11CdmContext> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(D3D11CdmContext);
};
@@ -254,8 +254,7 @@ D3D11CdmProxy::D3D11CdmProxy(const GUID& crypto_type,
protocol_(protocol),
function_id_map_(function_id_map),
cdm_context_(std::make_unique<D3D11CdmContext>(crypto_type)),
- create_device_func_(base::BindRepeating(D3D11CreateDevice)),
- weak_factory_(this) {}
+ create_device_func_(base::BindRepeating(D3D11CreateDevice)) {}
D3D11CdmProxy::~D3D11CdmProxy() {}
diff --git a/chromium/media/gpu/windows/d3d11_cdm_proxy.h b/chromium/media/gpu/windows/d3d11_cdm_proxy.h
index d05410e6f5d..572ed2eab0b 100644
--- a/chromium/media/gpu/windows/d3d11_cdm_proxy.h
+++ b/chromium/media/gpu/windows/d3d11_cdm_proxy.h
@@ -116,7 +116,7 @@ class MEDIA_GPU_EXPORT D3D11CdmProxy : public CdmProxy {
UINT private_input_size_ = 0;
UINT private_output_size_ = 0;
- base::WeakPtrFactory<D3D11CdmProxy> weak_factory_;
+ base::WeakPtrFactory<D3D11CdmProxy> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(D3D11CdmProxy);
};
diff --git a/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc b/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc
index 374f9a9ace0..a711e8c6b18 100644
--- a/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_cdm_proxy_unittest.cc
@@ -13,7 +13,7 @@
#include "base/power_monitor/power_monitor_source.h"
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/callback_registry.h"
#include "media/base/win/d3d11_mocks.h"
#include "media/cdm/cdm_proxy_context.h"
@@ -297,7 +297,7 @@ class D3D11CdmProxyTest : public ::testing::Test {
const UINT kPrivateOutputSize = 40;
// ObjectWatcher uses SequencedTaskRunnerHandle.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
};
// Verifies that if device creation fails, then the call fails.
diff --git a/chromium/media/gpu/windows/d3d11_decryptor.cc b/chromium/media/gpu/windows/d3d11_decryptor.cc
index 258560965dc..e03346ec36f 100644
--- a/chromium/media/gpu/windows/d3d11_decryptor.cc
+++ b/chromium/media/gpu/windows/d3d11_decryptor.cc
@@ -134,7 +134,7 @@ UINT To16Multiple(size_t num) {
} // namespace
D3D11Decryptor::D3D11Decryptor(CdmProxyContext* cdm_proxy_context)
- : cdm_proxy_context_(cdm_proxy_context), weak_factory_(this) {
+ : cdm_proxy_context_(cdm_proxy_context) {
DCHECK(cdm_proxy_context_);
}
diff --git a/chromium/media/gpu/windows/d3d11_decryptor.h b/chromium/media/gpu/windows/d3d11_decryptor.h
index b8a1a761eb2..8ae3069e8c3 100644
--- a/chromium/media/gpu/windows/d3d11_decryptor.h
+++ b/chromium/media/gpu/windows/d3d11_decryptor.h
@@ -87,7 +87,7 @@ class MEDIA_GPU_EXPORT D3D11Decryptor : public Decryptor {
// to.
ComPtr<ID3D11Buffer> cpu_accessible_buffer_;
- base::WeakPtrFactory<D3D11Decryptor> weak_factory_;
+ base::WeakPtrFactory<D3D11Decryptor> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(D3D11Decryptor);
};
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.h b/chromium/media/gpu/windows/d3d11_picture_buffer.h
index 5e5ee6ec0c3..8358a18509d 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.h
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.h
@@ -6,7 +6,6 @@
#define MEDIA_GPU_WINDOWS_D3D11_PICTURE_BUFFER_H_
#include <d3d11.h>
-#include <dxva.h>
#include <wrl/client.h>
#include <memory>
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector.cc b/chromium/media/gpu/windows/d3d11_texture_selector.cc
index b248418e9ad..ac1c327e6ce 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector.cc
@@ -10,6 +10,7 @@
#include "media/base/media_switches.h"
#include "media/gpu/windows/d3d11_copying_texture_wrapper.h"
#include "ui/gfx/geometry/size.h"
+#include "ui/gl/direct_composition_surface_win.h"
namespace media {
@@ -46,8 +47,8 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
const gpu::GpuPreferences& gpu_preferences,
const gpu::GpuDriverBugWorkarounds& workarounds,
const VideoDecoderConfig& config) {
- bool supports_nv12_decode_swap_chain = base::FeatureList::IsEnabled(
- features::kDirectCompositionUseNV12DecodeSwapChain);
+ bool supports_nv12_decode_swap_chain =
+ gl::DirectCompositionSurfaceWin::IsDecodeSwapChainSupported();
bool needs_texture_copy = !SupportsZeroCopy(gpu_preferences, workarounds);
DXGI_FORMAT input_dxgi_format = DXGI_FORMAT_NV12;
diff --git a/chromium/media/gpu/windows/d3d11_video_context_wrapper.cc b/chromium/media/gpu/windows/d3d11_video_context_wrapper.cc
index c17bdc7a9e3..5d217975837 100644
--- a/chromium/media/gpu/windows/d3d11_video_context_wrapper.cc
+++ b/chromium/media/gpu/windows/d3d11_video_context_wrapper.cc
@@ -26,6 +26,7 @@ struct BufferSubmitter<ID3D11VideoContext1> {
constexpr UINT max_buffers = 4;
DCHECK_LE(num_buffers, max_buffers);
D3D11_VIDEO_DECODER_BUFFER_DESC1 buffers[max_buffers] = {};
+ memset(buffers, 0, sizeof(D3D11_VIDEO_DECODER_BUFFER_DESC1) * max_buffers);
for (size_t i = 0; i < num_buffers; i++) {
buffers[i].BufferType = src[i].BufferType;
buffers[i].DataOffset = src[i].DataOffset;
@@ -49,6 +50,7 @@ struct BufferSubmitter<ID3D11VideoContext> {
constexpr UINT max_buffers = 4;
DCHECK_LE(num_buffers, max_buffers);
D3D11_VIDEO_DECODER_BUFFER_DESC buffers[max_buffers] = {};
+ memset(buffers, 0, sizeof(D3D11_VIDEO_DECODER_BUFFER_DESC) * max_buffers);
for (size_t i = 0; i < num_buffers; i++) {
buffers[i].BufferType = src[i].BufferType;
buffers[i].DataOffset = src[i].DataOffset;
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.cc b/chromium/media/gpu/windows/d3d11_video_decoder.cc
index c94bc88abc7..a9d873f234a 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.cc
@@ -112,8 +112,7 @@ D3D11VideoDecoder::D3D11VideoDecoder(
gpu_workarounds_(gpu_workarounds),
get_d3d11_device_cb_(std::move(get_d3d11_device_cb)),
get_helper_cb_(std::move(get_helper_cb)),
- supported_configs_(std::move(supported_configs)),
- weak_factory_(this) {
+ supported_configs_(std::move(supported_configs)) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(media_log_);
@@ -224,14 +223,18 @@ void D3D11VideoDecoder::Initialize(const VideoDecoderConfig& config,
//
// TODO(liberato): On re-init, we can probably re-use the device.
device_ = get_d3d11_device_cb_.Run();
- usable_feature_level_ = device_->GetFeatureLevel();
-
if (!device_) {
// This happens if, for example, if chrome is configured to use
// D3D9 for ANGLE.
NotifyError("ANGLE did not provide D3D11 device");
return;
}
+
+ if (!GetD3D11FeatureLevel(device_, &usable_feature_level_)) {
+ NotifyError("D3D11 feature level not supported");
+ return;
+ }
+
device_->GetImmediateContext(device_context_.ReleaseAndGetAddressOf());
HRESULT hr;
@@ -707,6 +710,22 @@ void D3D11VideoDecoder::NotifyError(const char* reason) {
}
// static
+bool D3D11VideoDecoder::GetD3D11FeatureLevel(ComD3D11Device dev,
+ D3D_FEATURE_LEVEL* feature_level) {
+ if (!dev || !feature_level)
+ return false;
+
+ *feature_level = dev->GetFeatureLevel();
+ if (*feature_level < D3D_FEATURE_LEVEL_11_0)
+ return false;
+
+ // TODO(tmathmeyer) should we log this to UMA?
+ if (base::FeatureList::IsEnabled(kD3D11LimitTo11_0))
+ *feature_level = D3D_FEATURE_LEVEL_11_0;
+ return true;
+}
+
+// static
std::vector<SupportedVideoDecoderConfig>
D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
const gpu::GpuPreferences& gpu_preferences,
@@ -746,7 +765,12 @@ D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
return {};
}
- D3D_FEATURE_LEVEL usable_feature_level = d3d11_device->GetFeatureLevel();
+ D3D_FEATURE_LEVEL usable_feature_level;
+ if (!GetD3D11FeatureLevel(d3d11_device, &usable_feature_level)) {
+ UMA_HISTOGRAM_ENUMERATION(
+ uma_name, NotSupportedReason::kInsufficientD3D11FeatureLevel);
+ return {};
+ }
const bool allow_encrypted =
(usable_feature_level > D3D_FEATURE_LEVEL_11_0) &&
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.h b/chromium/media/gpu/windows/d3d11_video_decoder.h
index b76bf595f6a..270acb71ba4 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.h
@@ -83,6 +83,9 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
void OutputResult(const CodecPicture* picture,
D3D11PictureBuffer* picture_buffer) override;
+ static bool GetD3D11FeatureLevel(ComD3D11Device dev,
+ D3D_FEATURE_LEVEL* feature_level);
+
// Return the set of video decoder configs that we support.
static std::vector<SupportedVideoDecoderConfig>
GetSupportedVideoDecoderConfigs(
@@ -259,7 +262,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
SupportedConfigs supported_configs_;
- base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_;
+ base::WeakPtrFactory<D3D11VideoDecoder> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoder);
};
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc b/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
index ec542175d99..da38ad7979f 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_impl.cc
@@ -17,8 +17,7 @@ D3D11VideoDecoderImpl::D3D11VideoDecoderImpl(
std::unique_ptr<MediaLog> media_log,
base::RepeatingCallback<scoped_refptr<CommandBufferHelper>()> get_helper_cb)
: media_log_(std::move(media_log)),
- get_helper_cb_(std::move(get_helper_cb)),
- weak_factory_(this) {
+ get_helper_cb_(std::move(get_helper_cb)) {
// May be called from any thread.
}
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
index 55862eaf464..c6549268c2c 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_impl.h
@@ -75,7 +75,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoderImpl {
// Has thread affinity -- must be run on the gpu main thread.
THREAD_CHECKER(thread_checker_);
- base::WeakPtrFactory<D3D11VideoDecoderImpl> weak_factory_;
+ base::WeakPtrFactory<D3D11VideoDecoderImpl> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(D3D11VideoDecoderImpl);
};
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
index 8803fda7382..24e388cbe95 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -14,7 +14,7 @@
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/test/scoped_feature_list.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/win/scoped_com_initializer.h"
#include "base/win/windows_version.h"
@@ -229,7 +229,7 @@ class D3D11VideoDecoderTest : public ::testing::Test {
MOCK_METHOD1(MockInitCB, void(bool));
- base::test::ScopedTaskEnvironment env_;
+ base::test::TaskEnvironment env_;
scoped_refptr<base::SingleThreadTaskRunner> gpu_task_runner_;
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
index 861752bc5bf..d428a0cafe9 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.h
@@ -7,6 +7,7 @@
#include <d3d11_1.h>
#include <d3d9.h>
+#include <dxva.h>
#include <windows.h>
#include <wrl/client.h>
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index 2d470bae6c3..365b1d520c8 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -412,7 +412,7 @@ class VP9ConfigChangeDetector : public ConfigChangeDetector {
// Detects stream configuration changes.
// Returns false on failure.
bool DetectConfig(const uint8_t* stream, unsigned int size) override {
- parser_.SetStream(stream, size, {} /* spatial_layer_frame_size */, nullptr);
+ parser_.SetStream(stream, size, nullptr);
Vp9FrameHeader fhdr;
gfx::Size allocate_size;
std::unique_ptr<DecryptConfig> null_config;
@@ -519,8 +519,7 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
using_angle_device_(false),
enable_accelerated_vpx_decode_(
!workarounds.disable_accelerated_vpx_decode),
- processing_config_changed_(false),
- weak_this_factory_(this) {
+ processing_config_changed_(false) {
weak_ptr_ = weak_this_factory_.GetWeakPtr();
memset(&input_stream_info_, 0, sizeof(input_stream_info_));
memset(&output_stream_info_, 0, sizeof(output_stream_info_));
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
index 43b4150d6ac..b6b41eb34a4 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
@@ -582,7 +582,7 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
VideoColorSpace current_color_space_;
// WeakPtrFactory for posting tasks back to |this|.
- base::WeakPtrFactory<DXVAVideoDecodeAccelerator> weak_this_factory_;
+ base::WeakPtrFactory<DXVAVideoDecodeAccelerator> weak_this_factory_{this};
// Function pointer for the MFCreateDXGIDeviceManager API.
static CreateDXGIDeviceManager create_dxgi_device_manager_;
diff --git a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
index 593a8dc8c2b..c042d0351a3 100644
--- a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.cc
@@ -109,8 +109,7 @@ MediaFoundationVideoEncodeAccelerator::MediaFoundationVideoEncodeAccelerator(
bool compatible_with_win7)
: compatible_with_win7_(compatible_with_win7),
main_client_task_runner_(base::ThreadTaskRunnerHandle::Get()),
- encoder_thread_("MFEncoderThread"),
- encoder_task_weak_factory_(this) {}
+ encoder_thread_("MFEncoderThread") {}
MediaFoundationVideoEncodeAccelerator::
~MediaFoundationVideoEncodeAccelerator() {
diff --git a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
index df7052d9fa0..c1a2e6d275d 100644
--- a/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
+++ b/chromium/media/gpu/windows/media_foundation_video_encode_accelerator_win.h
@@ -151,7 +151,7 @@ class MEDIA_GPU_EXPORT MediaFoundationVideoEncodeAccelerator
// Declared last to ensure that all weak pointers are invalidated before
// other destructors run.
base::WeakPtrFactory<MediaFoundationVideoEncodeAccelerator>
- encoder_task_weak_factory_;
+ encoder_task_weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MediaFoundationVideoEncodeAccelerator);
};
diff --git a/chromium/media/learning/common/BUILD.gn b/chromium/media/learning/common/BUILD.gn
index 4e208d3a9b7..b86b09c0355 100644
--- a/chromium/media/learning/common/BUILD.gn
+++ b/chromium/media/learning/common/BUILD.gn
@@ -23,6 +23,8 @@ component("common") {
defines = [ "IS_LEARNING_COMMON_IMPL" ]
sources = [
+ "feature_dictionary.cc",
+ "feature_dictionary.h",
"feature_library.cc",
"feature_library.h",
"labelled_example.cc",
@@ -45,6 +47,7 @@ component("common") {
source_set("unit_tests") {
testonly = true
sources = [
+ "feature_dictionary_unittest.cc",
"labelled_example_unittest.cc",
"value_unittest.cc",
]
diff --git a/chromium/media/learning/common/feature_dictionary.cc b/chromium/media/learning/common/feature_dictionary.cc
new file mode 100644
index 00000000000..66cf6081f6d
--- /dev/null
+++ b/chromium/media/learning/common/feature_dictionary.cc
@@ -0,0 +1,38 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/learning/common/feature_dictionary.h"
+
+namespace media {
+namespace learning {
+
+FeatureDictionary::FeatureDictionary() = default;
+
+FeatureDictionary::~FeatureDictionary() = default;
+
+void FeatureDictionary::Lookup(const LearningTask& task,
+ FeatureVector* features) {
+ const size_t num_features = task.feature_descriptions.size();
+
+ if (features->size() < num_features)
+ features->resize(num_features);
+
+ for (size_t i = 0; i < num_features; i++) {
+ const auto& name = task.feature_descriptions[i].name;
+ auto entry = dictionary_.find(name);
+ if (entry == dictionary_.end())
+ continue;
+
+ // |name| appears in the dictionary, so add its value to |features|.
+ (*features)[i] = entry->second;
+ }
+}
+
+void FeatureDictionary::Add(const std::string& name,
+ const FeatureValue& value) {
+ dictionary_[name] = value;
+}
+
+} // namespace learning
+} // namespace media
diff --git a/chromium/media/learning/common/feature_dictionary.h b/chromium/media/learning/common/feature_dictionary.h
new file mode 100644
index 00000000000..149804501a4
--- /dev/null
+++ b/chromium/media/learning/common/feature_dictionary.h
@@ -0,0 +1,52 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_LEARNING_COMMON_FEATURE_DICTIONARY_H_
+#define MEDIA_LEARNING_COMMON_FEATURE_DICTIONARY_H_
+
+#include <map>
+#include <string>
+
+#include "base/component_export.h"
+#include "base/macros.h"
+#include "media/learning/common/labelled_example.h"
+#include "media/learning/common/learning_task.h"
+
+namespace media {
+namespace learning {
+
+// Dictionary of feature name => value pairs.
+//
+// This is useful if one simply wants to snapshot some features, and apply them
+// to more than one task without recomputing anything.
+//
+// While it's not required, FeatureLibrary is useful to provide the descriptions
+// that a FeatureDictionary will provide, so that the LearningTask and the
+// dictionary agree on names.
+class COMPONENT_EXPORT(LEARNING_COMMON) FeatureDictionary {
+ public:
+ // [feature name] => snapshotted value.
+ using Dictionary = std::map<std::string, FeatureValue>;
+
+ FeatureDictionary();
+ ~FeatureDictionary();
+
+ // Add features for |task| to |features| from our dictionary. Features that
+ // aren't present in the dictionary will be ignored. |features| will be
+ // expanded if needed to match |task|.
+ void Lookup(const LearningTask& task, FeatureVector* features);
+
+ // Add |name| to the dictionary with value |value|.
+ void Add(const std::string& name, const FeatureValue& value);
+
+ private:
+ Dictionary dictionary_;
+
+ DISALLOW_COPY_AND_ASSIGN(FeatureDictionary);
+};
+
+} // namespace learning
+} // namespace media
+
+#endif // MEDIA_LEARNING_COMMON_FEATURE_DICTIONARY_H_
diff --git a/chromium/media/learning/common/feature_dictionary_unittest.cc b/chromium/media/learning/common/feature_dictionary_unittest.cc
new file mode 100644
index 00000000000..9d4a00fab6e
--- /dev/null
+++ b/chromium/media/learning/common/feature_dictionary_unittest.cc
@@ -0,0 +1,45 @@
+// Copyright 2019 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/learning/common/feature_dictionary.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+namespace learning {
+
+class FeatureDictionaryTest : public testing::Test {};
+
+TEST_F(FeatureDictionaryTest, FillsInFeatures) {
+ FeatureDictionary dict;
+ const std::string feature_name_1("feature 1");
+ const FeatureValue feature_value_1("feature value 1");
+
+ const std::string feature_name_2("feature 2");
+ const FeatureValue feature_value_2("feature value 2");
+
+ const std::string feature_name_3("feature 3");
+ const FeatureValue feature_value_3("feature value 3");
+
+ dict.Add(feature_name_1, feature_value_1);
+ dict.Add(feature_name_2, feature_value_2);
+ dict.Add(feature_name_3, feature_value_3);
+
+ LearningTask task;
+ task.feature_descriptions.push_back({"some other feature"});
+ task.feature_descriptions.push_back({feature_name_3});
+ task.feature_descriptions.push_back({feature_name_1});
+
+ FeatureVector features;
+ features.push_back(FeatureValue(0)); // some other feature
+
+ dict.Lookup(task, &features);
+ EXPECT_EQ(features.size(), 3u);
+ EXPECT_EQ(features[0], FeatureValue(0));
+ EXPECT_EQ(features[1], feature_value_3);
+ EXPECT_EQ(features[2], feature_value_1);
+}
+
+} // namespace learning
+} // namespace media
diff --git a/chromium/media/learning/common/learning_session.h b/chromium/media/learning/common/learning_session.h
index f0fb9ba911e..6468871eab4 100644
--- a/chromium/media/learning/common/learning_session.h
+++ b/chromium/media/learning/common/learning_session.h
@@ -10,6 +10,7 @@
#include "base/component_export.h"
#include "base/macros.h"
+#include "base/supports_user_data.h"
#include "media/learning/common/labelled_example.h"
#include "media/learning/common/learning_task.h"
@@ -19,10 +20,11 @@ namespace learning {
class LearningTaskController;
// Interface to provide a Learner given the task name.
-class COMPONENT_EXPORT(LEARNING_COMMON) LearningSession {
+class COMPONENT_EXPORT(LEARNING_COMMON) LearningSession
+ : public base::SupportsUserData::Data {
public:
LearningSession();
- virtual ~LearningSession();
+ ~LearningSession() override;
// Return a LearningTaskController for the given task.
virtual std::unique_ptr<LearningTaskController> GetController(
diff --git a/chromium/media/learning/common/learning_task.cc b/chromium/media/learning/common/learning_task.cc
index fa5c088c8e2..75c0ae59eb8 100644
--- a/chromium/media/learning/common/learning_task.cc
+++ b/chromium/media/learning/common/learning_task.cc
@@ -5,6 +5,7 @@
#include "media/learning/common/learning_task.h"
#include "base/hash/hash.h"
+#include "base/no_destructor.h"
namespace media {
namespace learning {
@@ -29,5 +30,11 @@ LearningTask::Id LearningTask::GetId() const {
return base::PersistentHash(name);
}
+// static
+const LearningTask& LearningTask::Empty() {
+ static const base::NoDestructor<LearningTask> empty_task;
+ return *empty_task;
+}
+
} // namespace learning
} // namespace media
diff --git a/chromium/media/learning/common/learning_task.h b/chromium/media/learning/common/learning_task.h
index 258dd39016f..9d70b93e1e1 100644
--- a/chromium/media/learning/common/learning_task.h
+++ b/chromium/media/learning/common/learning_task.h
@@ -91,6 +91,9 @@ struct COMPONENT_EXPORT(LEARNING_COMMON) LearningTask {
// unique |name| for the task. This is used to identify this task in UKM.
Id GetId() const;
+ // Returns a reference to an empty learning task.
+ static const LearningTask& Empty();
+
// Unique name for this task.
std::string name;
diff --git a/chromium/media/learning/common/learning_task_controller.h b/chromium/media/learning/common/learning_task_controller.h
index ef098330458..d4fd6ae7104 100644
--- a/chromium/media/learning/common/learning_task_controller.h
+++ b/chromium/media/learning/common/learning_task_controller.h
@@ -67,6 +67,9 @@ class COMPONENT_EXPORT(LEARNING_COMMON) LearningTaskController {
// Notify the LearningTaskController that no completion will be sent.
virtual void CancelObservation(base::UnguessableToken id) = 0;
+ // Returns the LearningTask associated with |this|.
+ virtual const LearningTask& GetLearningTask() = 0;
+
private:
DISALLOW_COPY_AND_ASSIGN(LearningTaskController);
};
diff --git a/chromium/media/learning/impl/distribution_reporter_unittest.cc b/chromium/media/learning/impl/distribution_reporter_unittest.cc
index 55f1f4a2f47..3e968c5061e 100644
--- a/chromium/media/learning/impl/distribution_reporter_unittest.cc
+++ b/chromium/media/learning/impl/distribution_reporter_unittest.cc
@@ -6,7 +6,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "components/ukm/test_ukm_recorder.h"
#include "media/learning/common/learning_task.h"
#include "media/learning/impl/distribution_reporter.h"
@@ -25,7 +25,7 @@ class DistributionReporterTest : public testing::Test {
task_.target_description.ordering = LearningTask::Ordering::kNumeric;
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<ukm::TestAutoSetUkmRecorder> ukm_recorder_;
diff --git a/chromium/media/learning/impl/extra_trees_trainer_unittest.cc b/chromium/media/learning/impl/extra_trees_trainer_unittest.cc
index d9e18970ced..ed8d1679e3c 100644
--- a/chromium/media/learning/impl/extra_trees_trainer_unittest.cc
+++ b/chromium/media/learning/impl/extra_trees_trainer_unittest.cc
@@ -6,7 +6,7 @@
#include "base/bind.h"
#include "base/memory/ref_counted.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/learning/impl/fisher_iris_dataset.h"
#include "media/learning/impl/test_random_number_generator.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -38,11 +38,11 @@ class ExtraTreesTest : public testing::TestWithParam<LearningTask::Ordering> {
[](std::unique_ptr<Model>* model_out,
std::unique_ptr<Model> model) { *model_out = std::move(model); },
&model));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
return model;
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
TestRandomNumberGenerator rng_;
ExtraTreesTrainer trainer_;
diff --git a/chromium/media/learning/impl/learning_fuzzertest.cc b/chromium/media/learning/impl/learning_fuzzertest.cc
index cfff3d410e7..385cf8748bf 100644
--- a/chromium/media/learning/impl/learning_fuzzertest.cc
+++ b/chromium/media/learning/impl/learning_fuzzertest.cc
@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "base/test/scoped_task_environment.h"
+#include <fuzzer/FuzzedDataProvider.h>
+
+#include "base/test/task_environment.h"
#include "media/learning/impl/learning_task_controller_impl.h"
-#include "third_party/libFuzzer/src/utils/FuzzedDataProvider.h"
using media::learning::FeatureValue;
using media::learning::FeatureVector;
@@ -40,7 +41,7 @@ FeatureVector ConsumeFeatureVector(FuzzedDataProvider* provider) {
}
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
FuzzedDataProvider provider(data, size);
LearningTask task;
@@ -67,7 +68,7 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
controller.CompleteObservation(
id, ObservationCompletion(TargetValue(ConsumeDouble(&provider)),
ConsumeDouble(&provider)));
- scoped_task_environment.RunUntilIdle();
+ task_environment.RunUntilIdle();
}
return 0;
diff --git a/chromium/media/learning/impl/learning_session_impl.cc b/chromium/media/learning/impl/learning_session_impl.cc
index 7f4e94f86cf..cbe0f5dd30c 100644
--- a/chromium/media/learning/impl/learning_session_impl.cc
+++ b/chromium/media/learning/impl/learning_session_impl.cc
@@ -22,8 +22,11 @@ class WeakLearningTaskController : public LearningTaskController {
public:
WeakLearningTaskController(
base::WeakPtr<LearningSessionImpl> weak_session,
- base::SequenceBound<LearningTaskController>* controller)
- : weak_session_(std::move(weak_session)), controller_(controller) {}
+ base::SequenceBound<LearningTaskController>* controller,
+ const LearningTask& task)
+ : weak_session_(std::move(weak_session)),
+ controller_(controller),
+ task_(task) {}
~WeakLearningTaskController() override {
if (!weak_session_)
@@ -63,8 +66,11 @@ class WeakLearningTaskController : public LearningTaskController {
id);
}
+ const LearningTask& GetLearningTask() override { return task_; }
+
base::WeakPtr<LearningSessionImpl> weak_session_;
base::SequenceBound<LearningTaskController>* controller_;
+ LearningTask task_;
// Set of ids that have been started but not completed / cancelled yet.
std::set<base::UnguessableToken> outstanding_ids_;
@@ -92,23 +98,25 @@ void LearningSessionImpl::SetTaskControllerFactoryCBForTesting(
std::unique_ptr<LearningTaskController> LearningSessionImpl::GetController(
const std::string& task_name) {
- auto iter = task_map_.find(task_name);
- if (iter == task_map_.end())
+ auto iter = controller_map_.find(task_name);
+ if (iter == controller_map_.end())
return nullptr;
// If there were any way to replace / destroy a controller other than when we
// destroy |this|, then this wouldn't be such a good idea.
return std::make_unique<WeakLearningTaskController>(
- weak_factory_.GetWeakPtr(), &iter->second);
+ weak_factory_.GetWeakPtr(), &iter->second, task_map_[task_name]);
}
void LearningSessionImpl::RegisterTask(
const LearningTask& task,
SequenceBoundFeatureProvider feature_provider) {
- DCHECK(task_map_.count(task.name) == 0);
- task_map_.emplace(
+ DCHECK(controller_map_.count(task.name) == 0);
+ controller_map_.emplace(
task.name,
controller_factory_.Run(task_runner_, task, std::move(feature_provider)));
+
+ task_map_.emplace(task.name, task);
}
} // namespace learning
diff --git a/chromium/media/learning/impl/learning_session_impl.h b/chromium/media/learning/impl/learning_session_impl.h
index 06c3eedb513..dd43123d53a 100644
--- a/chromium/media/learning/impl/learning_session_impl.h
+++ b/chromium/media/learning/impl/learning_session_impl.h
@@ -52,9 +52,12 @@ class COMPONENT_EXPORT(LEARNING_IMPL) LearningSessionImpl
scoped_refptr<base::SequencedTaskRunner> task_runner_;
// [task_name] = task controller.
- using LearningTaskMap =
+ using LearningTaskControllerMap =
std::map<std::string, base::SequenceBound<LearningTaskController>>;
- LearningTaskMap task_map_;
+ LearningTaskControllerMap controller_map_;
+
+ // Used to fetch registered LearningTasks from their name.
+ std::map<std::string, LearningTask> task_map_;
CreateTaskControllerCB controller_factory_;
diff --git a/chromium/media/learning/impl/learning_session_impl_unittest.cc b/chromium/media/learning/impl/learning_session_impl_unittest.cc
index d69ec98280d..f8f23018fa7 100644
--- a/chromium/media/learning/impl/learning_session_impl_unittest.cc
+++ b/chromium/media/learning/impl/learning_session_impl_unittest.cc
@@ -7,7 +7,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "media/learning/common/learning_task_controller.h"
#include "media/learning/impl/learning_session_impl.h"
@@ -58,6 +58,11 @@ class LearningSessionImplTest : public testing::Test {
cancelled_id_ = id;
}
+ const LearningTask& GetLearningTask() override {
+ NOTREACHED();
+ return LearningTask::Empty();
+ }
+
SequenceBoundFeatureProvider feature_provider_;
base::UnguessableToken id_;
FeatureVector features_;
@@ -104,10 +109,10 @@ class LearningSessionImplTest : public testing::Test {
// To prevent a memory leak, reset the session. This will post destruction
// of other objects, so RunUntilIdle().
session_.reset();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
scoped_refptr<base::SequencedTaskRunner> task_runner_;
@@ -125,16 +130,29 @@ TEST_F(LearningSessionImplTest, RegisteringTasksCreatesControllers) {
EXPECT_EQ(task_runners_.size(), 0u);
session_->RegisterTask(task_0_);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(task_controllers_.size(), 1u);
EXPECT_EQ(task_runners_.size(), 1u);
EXPECT_EQ(task_runners_[0], task_runner_.get());
session_->RegisterTask(task_1_);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(task_controllers_.size(), 2u);
EXPECT_EQ(task_runners_.size(), 2u);
EXPECT_EQ(task_runners_[1], task_runner_.get());
+
+ // Make sure controllers are being returned for the right tasks.
+ // Note: this test passes because LearningSessionController::GetController()
+ // returns a wrapper around a FakeLTC, instead of the FakeLTC itself. The
+ // wrapper internally built by LearningSessionImpl has a proper implementation
+ // of GetLearningTask(), whereas the FakeLTC does not.
+ std::unique_ptr<LearningTaskController> ltc_0 =
+ session_->GetController(task_0_.name);
+ EXPECT_EQ(ltc_0->GetLearningTask().name, task_0_.name);
+
+ std::unique_ptr<LearningTaskController> ltc_1 =
+ session_->GetController(task_1_.name);
+ EXPECT_EQ(ltc_1->GetLearningTask().name, task_1_.name);
}
TEST_F(LearningSessionImplTest, ExamplesAreForwardedToCorrectTask) {
@@ -160,7 +178,7 @@ TEST_F(LearningSessionImplTest, ExamplesAreForwardedToCorrectTask) {
ltc_1->CompleteObservation(
id, ObservationCompletion(example_1.target_value, example_1.weight));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(task_controllers_[0]->example_, example_0);
EXPECT_EQ(task_controllers_[1]->example_, example_1);
}
@@ -174,7 +192,7 @@ TEST_F(LearningSessionImplTest, ControllerLifetimeScopedToSession) {
// Destroy the session. |controller| should still be usable, though it won't
// forward requests anymore.
session_.reset();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Should not crash.
controller->BeginObservation(base::UnguessableToken::Create(),
@@ -186,7 +204,7 @@ TEST_F(LearningSessionImplTest, FeatureProviderIsForwarded) {
bool flag = false;
session_->RegisterTask(
task_0_, base::SequenceBound<FakeFeatureProvider>(task_runner_, &flag));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Registering the task should create a FakeLearningTaskController, which will
// call AddFeatures on the fake FeatureProvider.
EXPECT_TRUE(flag);
@@ -197,18 +215,18 @@ TEST_F(LearningSessionImplTest, DestroyingControllerCancelsObservations) {
std::unique_ptr<LearningTaskController> controller =
session_->GetController(task_0_.name);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Start an observation and verify that it starts.
base::UnguessableToken id = base::UnguessableToken::Create();
controller->BeginObservation(id, FeatureVector());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(task_controllers_[0]->id_, id);
EXPECT_NE(task_controllers_[0]->cancelled_id_, id);
// Should result in cancelling the observation.
controller.reset();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(task_controllers_[0]->cancelled_id_, id);
}
diff --git a/chromium/media/learning/impl/learning_task_controller_helper_unittest.cc b/chromium/media/learning/impl/learning_task_controller_helper_unittest.cc
index 606c7a7ece2..003cc0c95f6 100644
--- a/chromium/media/learning/impl/learning_task_controller_helper_unittest.cc
+++ b/chromium/media/learning/impl/learning_task_controller_helper_unittest.cc
@@ -7,7 +7,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "media/learning/impl/learning_task_controller_helper.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -51,7 +51,7 @@ class LearningTaskControllerHelperTest : public testing::Test {
// To prevent a memory leak, reset the helper. This will post destruction
// of other objects, so RunUntilIdle().
helper_.reset();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
void CreateClient(bool include_fp) {
@@ -60,7 +60,7 @@ class LearningTaskControllerHelperTest : public testing::Test {
if (include_fp) {
sb_fp = base::SequenceBound<FakeFeatureProvider>(task_runner_,
&fp_features_, &fp_cb_);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
// TODO(liberato): make sure this works without a fp.
@@ -82,7 +82,7 @@ class LearningTaskControllerHelperTest : public testing::Test {
return helper_->pending_example_count_for_testing();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
scoped_refptr<base::SequencedTaskRunner> task_runner_;
@@ -126,7 +126,7 @@ TEST_F(LearningTaskControllerHelperTest, DropTargetValueWithoutFPWorks) {
helper_->BeginObservation(id_, example_.features);
EXPECT_EQ(pending_example_count(), 1u);
helper_->CancelObservation(id_);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(most_recent_example_);
EXPECT_EQ(pending_example_count(), 0u);
}
@@ -136,7 +136,7 @@ TEST_F(LearningTaskControllerHelperTest, AddTargetValueBeforeFP) {
CreateClient(true);
helper_->BeginObservation(id_, example_.features);
EXPECT_EQ(pending_example_count(), 1u);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// The feature provider should know about the example.
EXPECT_EQ(fp_features_, example_.features);
@@ -149,7 +149,7 @@ TEST_F(LearningTaskControllerHelperTest, AddTargetValueBeforeFP) {
// Add the features, and verify that they arrive at the AddExampleCB.
example_.features[0] = FeatureValue(456);
std::move(fp_cb_).Run(example_.features);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(pending_example_count(), 0u);
EXPECT_TRUE(most_recent_example_);
EXPECT_EQ(*most_recent_example_, example_);
@@ -161,7 +161,7 @@ TEST_F(LearningTaskControllerHelperTest, DropTargetValueBeforeFP) {
CreateClient(true);
helper_->BeginObservation(id_, example_.features);
EXPECT_EQ(pending_example_count(), 1u);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// The feature provider should know about the example.
EXPECT_EQ(fp_features_, example_.features);
@@ -174,7 +174,7 @@ TEST_F(LearningTaskControllerHelperTest, DropTargetValueBeforeFP) {
// example was sent to us.
example_.features[0] = FeatureValue(456);
std::move(fp_cb_).Run(example_.features);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(pending_example_count(), 0u);
EXPECT_FALSE(most_recent_example_);
}
@@ -184,7 +184,7 @@ TEST_F(LearningTaskControllerHelperTest, AddTargetValueAfterFP) {
CreateClient(true);
helper_->BeginObservation(id_, example_.features);
EXPECT_EQ(pending_example_count(), 1u);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// The feature provider should know about the example.
EXPECT_EQ(fp_features_, example_.features);
EXPECT_EQ(pending_example_count(), 1u);
@@ -192,7 +192,7 @@ TEST_F(LearningTaskControllerHelperTest, AddTargetValueAfterFP) {
// Add the features, and verify that the example isn't sent yet.
example_.features[0] = FeatureValue(456);
std::move(fp_cb_).Run(example_.features);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(most_recent_example_);
EXPECT_EQ(pending_example_count(), 1u);
@@ -210,7 +210,7 @@ TEST_F(LearningTaskControllerHelperTest, DropTargetValueAfterFP) {
CreateClient(true);
helper_->BeginObservation(id_, example_.features);
EXPECT_EQ(pending_example_count(), 1u);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// The feature provider should know about the example.
EXPECT_EQ(fp_features_, example_.features);
EXPECT_EQ(pending_example_count(), 1u);
@@ -220,14 +220,14 @@ TEST_F(LearningTaskControllerHelperTest, DropTargetValueAfterFP) {
// callback yet; we might send a TargetValue.
example_.features[0] = FeatureValue(456);
std::move(fp_cb_).Run(example_.features);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(most_recent_example_);
EXPECT_EQ(pending_example_count(), 1u);
// Cancel the observation, and verify that the pending example has been
// removed, and no example was sent to us.
helper_->CancelObservation(id_);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(most_recent_example_);
EXPECT_EQ(pending_example_count(), 0u);
}
diff --git a/chromium/media/learning/impl/learning_task_controller_impl.cc b/chromium/media/learning/impl/learning_task_controller_impl.cc
index 50a89482cdb..c9bb0f35365 100644
--- a/chromium/media/learning/impl/learning_task_controller_impl.cc
+++ b/chromium/media/learning/impl/learning_task_controller_impl.cc
@@ -75,6 +75,10 @@ void LearningTaskControllerImpl::CancelObservation(base::UnguessableToken id) {
helper_->CancelObservation(id);
}
+const LearningTask& LearningTaskControllerImpl::GetLearningTask() {
+ return task_;
+}
+
void LearningTaskControllerImpl::AddFinishedExample(LabelledExample example,
ukm::SourceId source_id) {
// Verify that we have a trainer and that we got the right number of features.
diff --git a/chromium/media/learning/impl/learning_task_controller_impl.h b/chromium/media/learning/impl/learning_task_controller_impl.h
index 06df120b045..ae011fde55b 100644
--- a/chromium/media/learning/impl/learning_task_controller_impl.h
+++ b/chromium/media/learning/impl/learning_task_controller_impl.h
@@ -51,6 +51,7 @@ class COMPONENT_EXPORT(LEARNING_IMPL) LearningTaskControllerImpl
void CompleteObservation(base::UnguessableToken id,
const ObservationCompletion& completion) override;
void CancelObservation(base::UnguessableToken id) override;
+ const LearningTask& GetLearningTask() override;
private:
// Add |example| to the training data, and process it.
diff --git a/chromium/media/learning/impl/learning_task_controller_impl_unittest.cc b/chromium/media/learning/impl/learning_task_controller_impl_unittest.cc
index 9daec3aeaf1..0faa166be69 100644
--- a/chromium/media/learning/impl/learning_task_controller_impl_unittest.cc
+++ b/chromium/media/learning/impl/learning_task_controller_impl_unittest.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "media/learning/impl/distribution_reporter.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -112,7 +112,7 @@ class LearningTaskControllerImplTest : public testing::Test {
// To prevent a memory leak, reset the controller. This may post
// destruction of other objects, so RunUntilIdle().
controller_.reset();
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
void CreateController(SequenceBoundFeatureProvider feature_provider =
@@ -137,7 +137,7 @@ class LearningTaskControllerImplTest : public testing::Test {
id, ObservationCompletion(example.target_value, example.weight));
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
// Number of models that we trained.
int num_models_ = 0;
@@ -208,7 +208,7 @@ TEST_F(LearningTaskControllerImplTest, FeatureProviderIsUsed) {
example.features.push_back(FeatureValue(123));
example.weight = 321u;
AddExample(example);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(trainer_raw_->training_data()[0].features[0], FeatureValue(124));
EXPECT_EQ(trainer_raw_->training_data()[0].weight, example.weight);
}
diff --git a/chromium/media/learning/impl/lookup_table_trainer_unittest.cc b/chromium/media/learning/impl/lookup_table_trainer_unittest.cc
index 47618746617..fa0fc88f65c 100644
--- a/chromium/media/learning/impl/lookup_table_trainer_unittest.cc
+++ b/chromium/media/learning/impl/lookup_table_trainer_unittest.cc
@@ -6,7 +6,7 @@
#include "base/bind.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
@@ -23,11 +23,11 @@ class LookupTableTrainerTest : public testing::Test {
[](std::unique_ptr<Model>* model_out,
std::unique_ptr<Model> model) { *model_out = std::move(model); },
&model));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
return model;
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
LookupTableTrainer trainer_;
LearningTask task_;
diff --git a/chromium/media/learning/impl/random_tree_trainer_unittest.cc b/chromium/media/learning/impl/random_tree_trainer_unittest.cc
index f9face03115..e289f06e07c 100644
--- a/chromium/media/learning/impl/random_tree_trainer_unittest.cc
+++ b/chromium/media/learning/impl/random_tree_trainer_unittest.cc
@@ -6,7 +6,7 @@
#include "base/bind.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/learning/impl/test_random_number_generator.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -38,11 +38,11 @@ class RandomTreeTest : public testing::TestWithParam<LearningTask::Ordering> {
[](std::unique_ptr<Model>* model_out,
std::unique_ptr<Model> model) { *model_out = std::move(model); },
&model));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
return model;
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
TestRandomNumberGenerator rng_;
RandomTreeTrainer trainer_;
diff --git a/chromium/media/learning/mojo/mojo_learning_task_controller_service_unittest.cc b/chromium/media/learning/mojo/mojo_learning_task_controller_service_unittest.cc
index 99e8a391269..b2a38ad25d5 100644
--- a/chromium/media/learning/mojo/mojo_learning_task_controller_service_unittest.cc
+++ b/chromium/media/learning/mojo/mojo_learning_task_controller_service_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "media/learning/mojo/mojo_learning_task_controller_service.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -36,6 +36,10 @@ class MojoLearningTaskControllerServiceTest : public ::testing::Test {
cancel_args_.id_ = id;
}
+ const LearningTask& GetLearningTask() override {
+ return LearningTask::Empty();
+ }
+
struct {
base::UnguessableToken id_;
FeatureVector features_;
@@ -72,7 +76,7 @@ class MojoLearningTaskControllerServiceTest : public ::testing::Test {
LearningTask task_;
// Mojo stuff.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
FakeLearningTaskController* controller_raw_ = nullptr;
diff --git a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.cc b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.cc
index f4648b38bcb..7cd0cb19589 100644
--- a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.cc
+++ b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.cc
@@ -35,5 +35,9 @@ void MojoLearningTaskController::CancelObservation(base::UnguessableToken id) {
controller_ptr_->CancelObservation(id);
}
+const LearningTask& MojoLearningTaskController::GetLearningTask() {
+ return LearningTask::Empty();
+}
+
} // namespace learning
} // namespace media
diff --git a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h
index 893d1f0890e..d6c9bed38f1 100644
--- a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h
+++ b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller.h
@@ -29,6 +29,7 @@ class COMPONENT_EXPORT(MEDIA_LEARNING_MOJO) MojoLearningTaskController
void CompleteObservation(base::UnguessableToken id,
const ObservationCompletion& completion) override;
void CancelObservation(base::UnguessableToken id) override;
+ const LearningTask& GetLearningTask() override;
private:
mojom::LearningTaskControllerPtr controller_ptr_;
diff --git a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc
index 546aae171b4..b7af774dfbc 100644
--- a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc
+++ b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "media/learning/mojo/public/cpp/mojo_learning_task_controller.h"
#include "mojo/public/cpp/bindings/binding.h"
@@ -70,7 +70,7 @@ class MojoLearningTaskControllerTest : public ::testing::Test {
}
// Mojo stuff.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
FakeMojoLearningTaskController fake_learning_controller_;
mojo::Binding<mojom::LearningTaskController> learning_controller_binding_;
@@ -83,7 +83,7 @@ TEST_F(MojoLearningTaskControllerTest, Begin) {
base::UnguessableToken id = base::UnguessableToken::Create();
FeatureVector features = {FeatureValue(123), FeatureValue(456)};
learning_controller_->BeginObservation(id, features);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(id, fake_learning_controller_.begin_args_.id_);
EXPECT_EQ(features, fake_learning_controller_.begin_args_.features_);
}
@@ -92,7 +92,7 @@ TEST_F(MojoLearningTaskControllerTest, Complete) {
base::UnguessableToken id = base::UnguessableToken::Create();
ObservationCompletion completion(TargetValue(1234));
learning_controller_->CompleteObservation(id, completion);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(id, fake_learning_controller_.complete_args_.id_);
EXPECT_EQ(completion.target_value,
fake_learning_controller_.complete_args_.completion_.target_value);
@@ -101,7 +101,7 @@ TEST_F(MojoLearningTaskControllerTest, Complete) {
TEST_F(MojoLearningTaskControllerTest, Cancel) {
base::UnguessableToken id = base::UnguessableToken::Create();
learning_controller_->CancelObservation(id);
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_EQ(id, fake_learning_controller_.cancel_args_.id_);
}
diff --git a/chromium/media/learning/mojo/public/mojom/learning_types.typemap b/chromium/media/learning/mojo/public/mojom/learning_types.typemap
index beaf1467335..cb7d19c0c5c 100644
--- a/chromium/media/learning/mojo/public/mojom/learning_types.typemap
+++ b/chromium/media/learning/mojo/public/mojom/learning_types.typemap
@@ -13,8 +13,8 @@ public_deps = [
"//media/learning/common",
]
type_mappings = [
- "media.learning.mojom.LabelledExample=media::learning::LabelledExample",
- "media.learning.mojom.FeatureValue=media::learning::FeatureValue",
- "media.learning.mojom.TargetValue=media::learning::TargetValue",
- "media.learning.mojom.ObservationCompletion=media::learning::ObservationCompletion",
+ "media.learning.mojom.LabelledExample=::media::learning::LabelledExample",
+ "media.learning.mojom.FeatureValue=::media::learning::FeatureValue",
+ "media.learning.mojom.TargetValue=::media::learning::TargetValue",
+ "media.learning.mojom.ObservationCompletion=::media::learning::ObservationCompletion",
]
diff --git a/chromium/media/media_options.gni b/chromium/media/media_options.gni
index d6b769e20f5..044068ebc10 100644
--- a/chromium/media/media_options.gni
+++ b/chromium/media/media_options.gni
@@ -61,7 +61,7 @@ declare_args() {
enable_mpeg_h_audio_demuxing = proprietary_codecs && is_chromecast
enable_mse_mpeg2ts_stream_parser =
- proprietary_codecs && (is_chromecast || use_fuzzing_engine)
+ proprietary_codecs && (is_chromecast || is_fuchsia || use_fuzzing_engine)
# Enable parsing for the 'cbcs' encryption scheme added by MPEG Common
# Encryption 3rd Edition (ISO/IEC 23001-7), published 02/15/2016.
@@ -136,16 +136,21 @@ declare_args() {
# Enables the use of library CDMs that implements the interface defined at
# media/cdm/api/content_decryption_module.h. If true, the actually library CDM
# will be hosted in the mojo CDM service running in the CDM (utility) process.
-enable_library_cdms = (is_linux && !is_chromecast) || is_mac || is_win
+# On Fuchsia, this is only enabled to build libclearkeycdm.so, the mojo CDM
+# service is not used.
+enable_library_cdms =
+ (is_linux && !is_chromecast) || is_mac || is_win || is_fuchsia
declare_args() {
# Experiment to enable mojo media services (e.g. "renderer", "cdm", see
# |mojo_media_services|). When enabled, selected mojo paths will be enabled in
# the media pipeline and corresponding services will hosted in the selected
# remote process (e.g. "utility" process, see |mojo_media_host|).
+ # This is explicitly disabled for Fuchsia.
enable_mojo_media =
- is_android || is_chromeos || is_mac || is_win || enable_library_cdms ||
- (is_desktop_linux && use_vaapi) || (is_chromecast && !is_fuchsia)
+ !is_fuchsia &&
+ (is_android || is_chromeos || is_mac || is_win || enable_library_cdms ||
+ (is_desktop_linux && use_vaapi) || is_chromecast)
# Enable the TestMojoMediaClient to be used in mojo MediaService. This is for
# testing only and will override the default platform MojoMediaClient, if any.
diff --git a/chromium/media/midi/midi_input_port_android.cc b/chromium/media/midi/midi_input_port_android.cc
index b28df74f323..ee0155aac41 100644
--- a/chromium/media/midi/midi_input_port_android.cc
+++ b/chromium/media/midi/midi_input_port_android.cc
@@ -33,7 +33,6 @@ void MidiInputPortAndroid::Close() {
}
void MidiInputPortAndroid::OnData(JNIEnv* env,
- const JavaParamRef<jobject>& caller,
const JavaParamRef<jbyteArray>& data,
jint offset,
jint size,
diff --git a/chromium/media/midi/midi_input_port_android.h b/chromium/media/midi/midi_input_port_android.h
index f641703ad58..35e04effbf9 100644
--- a/chromium/media/midi/midi_input_port_android.h
+++ b/chromium/media/midi/midi_input_port_android.h
@@ -33,7 +33,6 @@ class MidiInputPortAndroid final {
// Called by the Java world.
void OnData(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
const base::android::JavaParamRef<jbyteArray>& data,
jint offset,
jint size,
diff --git a/chromium/media/midi/midi_manager_android.cc b/chromium/media/midi/midi_manager_android.cc
index 8b9d147620e..a2326b8f265 100644
--- a/chromium/media/midi/midi_manager_android.cc
+++ b/chromium/media/midi/midi_manager_android.cc
@@ -118,7 +118,6 @@ void MidiManagerAndroid::OnReceivedData(MidiInputPortAndroid* port,
void MidiManagerAndroid::OnInitialized(
JNIEnv* env,
- const JavaParamRef<jobject>& caller,
const JavaParamRef<jobjectArray>& devices) {
for (auto raw_device : devices.ReadElements<jobject>()) {
AddDevice(std::make_unique<MidiDeviceAndroid>(env, raw_device, this));
@@ -129,9 +128,7 @@ void MidiManagerAndroid::OnInitialized(
base::Unretained(this), Result::OK));
}
-void MidiManagerAndroid::OnInitializationFailed(
- JNIEnv* env,
- const JavaParamRef<jobject>& caller) {
+void MidiManagerAndroid::OnInitializationFailed(JNIEnv* env) {
service()->task_service()->PostBoundTask(
TaskService::kDefaultRunnerId,
base::BindOnce(&MidiManagerAndroid::CompleteInitialization,
@@ -139,13 +136,11 @@ void MidiManagerAndroid::OnInitializationFailed(
}
void MidiManagerAndroid::OnAttached(JNIEnv* env,
- const JavaParamRef<jobject>& caller,
const JavaParamRef<jobject>& raw_device) {
AddDevice(std::make_unique<MidiDeviceAndroid>(env, raw_device, this));
}
void MidiManagerAndroid::OnDetached(JNIEnv* env,
- const JavaParamRef<jobject>& caller,
const JavaParamRef<jobject>& raw_device) {
for (auto& device : devices_) {
if (device->HasRawDevice(env, raw_device)) {
diff --git a/chromium/media/midi/midi_manager_android.h b/chromium/media/midi/midi_manager_android.h
index 7fd29e2ffe8..3f2b42d986d 100644
--- a/chromium/media/midi/midi_manager_android.h
+++ b/chromium/media/midi/midi_manager_android.h
@@ -48,16 +48,11 @@ class MidiManagerAndroid final : public MidiManager,
// Called from the Java world.
void OnInitialized(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
const base::android::JavaParamRef<jobjectArray>& devices);
- void OnInitializationFailed(
- JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller);
+ void OnInitializationFailed(JNIEnv* env);
void OnAttached(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
const base::android::JavaParamRef<jobject>& device);
void OnDetached(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
const base::android::JavaParamRef<jobject>& device);
private:
diff --git a/chromium/media/midi/midi_manager_unittest.cc b/chromium/media/midi/midi_manager_unittest.cc
index d4bd3145e3d..4a3e4abb6e2 100644
--- a/chromium/media/midi/midi_manager_unittest.cc
+++ b/chromium/media/midi/midi_manager_unittest.cc
@@ -16,7 +16,7 @@
#include "base/memory/weak_ptr.h"
#include "base/run_loop.h"
#include "base/system/system_monitor.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/midi/midi_service.h"
#include "media/midi/task_service.h"
@@ -221,7 +221,7 @@ class MidiManagerTest : public ::testing::Test {
base::WeakPtr<FakeMidiManagerFactory> factory() { return factory_; }
private:
- base::test::ScopedTaskEnvironment env_;
+ base::test::TaskEnvironment env_;
base::WeakPtr<FakeMidiManagerFactory> factory_;
std::unique_ptr<MidiService> service_;
@@ -354,7 +354,7 @@ class PlatformMidiManagerTest : public ::testing::Test {
// SystemMonitor is needed on Windows.
base::SystemMonitor system_monitor;
- base::test::ScopedTaskEnvironment env_;
+ base::test::TaskEnvironment env_;
std::unique_ptr<FakeMidiManagerClient> client_;
std::unique_ptr<MidiService> service_;
diff --git a/chromium/media/midi/midi_message_queue.h b/chromium/media/midi/midi_message_queue.h
index 804164f6677..c351b33fc4c 100644
--- a/chromium/media/midi/midi_message_queue.h
+++ b/chromium/media/midi/midi_message_queue.h
@@ -8,9 +8,9 @@
#include <stddef.h>
#include <stdint.h>
-#include <deque>
#include <vector>
+#include "base/containers/circular_deque.h"
#include "base/macros.h"
#include "media/midi/midi_export.h"
@@ -64,14 +64,7 @@ class MIDI_EXPORT MidiMessageQueue {
void Get(std::vector<uint8_t>* message);
private:
- // While we should be using base::circular_deque here, the Chrome OS version
- // hasn't been upreved to contain that implementation yet. Since for the
- // purposes of this class the semantics of std::deque and
- // base::circular_deque are the same, we can use the former.
- // Once Chrome OS has upreved to a version of libchrome which includes
- // base::circular_deque, we can switch back to it.
- // TODO(pmalani): http://crbug.com/787643 tracking this.
- std::deque<uint8_t> queue_;
+ base::circular_deque<uint8_t> queue_;
std::vector<uint8_t> next_message_;
const bool allow_running_status_;
DISALLOW_COPY_AND_ASSIGN(MidiMessageQueue);
diff --git a/chromium/media/midi/midi_service.mojom b/chromium/media/midi/midi_service.mojom
index ab49b735a0a..bc32b820332 100644
--- a/chromium/media/midi/midi_service.mojom
+++ b/chromium/media/midi/midi_service.mojom
@@ -60,7 +60,8 @@ interface MidiSessionClient {
// Interface used by the renderer to start a MIDI session in the browser.
interface MidiSessionProvider {
// Start session to access MIDI hardware.
- StartSession(MidiSession& request, MidiSessionClient client);
+ StartSession(pending_receiver<MidiSession> receiver,
+ pending_remote<MidiSessionClient> client);
};
// Represents an active MIDI session.
diff --git a/chromium/media/midi/task_service.cc b/chromium/media/midi/task_service.cc
index 4ba79ddbece..8fc087b15e0 100644
--- a/chromium/media/midi/task_service.cc
+++ b/chromium/media/midi/task_service.cc
@@ -7,7 +7,7 @@
#include <limits>
#include "base/bind.h"
-#include "base/message_loop/message_loop.h"
+#include "base/message_loop/message_pump_type.h"
#include "base/strings/stringprintf.h"
#include "base/threading/thread_restrictions.h"
#include "base/threading/thread_task_runner_handle.h"
@@ -149,7 +149,7 @@ scoped_refptr<base::SingleThreadTaskRunner> TaskService::GetTaskRunner(
#if defined(OS_WIN)
threads_[thread]->init_com_with_mta(true);
#elif defined(OS_MACOSX)
- options.message_loop_type = base::MessageLoop::TYPE_UI;
+ options.message_pump_type = base::MessagePumpType::UI;
#endif
threads_[thread]->StartWithOptions(options);
}
diff --git a/chromium/media/midi/usb_midi_device_android.cc b/chromium/media/midi/usb_midi_device_android.cc
index ab6c108634a..f089abf6252 100644
--- a/chromium/media/midi/usb_midi_device_android.cc
+++ b/chromium/media/midi/usb_midi_device_android.cc
@@ -63,7 +63,6 @@ void UsbMidiDeviceAndroid::Send(int endpoint_number,
}
void UsbMidiDeviceAndroid::OnData(JNIEnv* env,
- const JavaParamRef<jobject>& caller,
jint endpoint_number,
const JavaParamRef<jbyteArray>& data) {
std::vector<uint8_t> bytes;
diff --git a/chromium/media/midi/usb_midi_device_android.h b/chromium/media/midi/usb_midi_device_android.h
index 85c37941dac..ab5321319ae 100644
--- a/chromium/media/midi/usb_midi_device_android.h
+++ b/chromium/media/midi/usb_midi_device_android.h
@@ -36,7 +36,6 @@ class USB_MIDI_EXPORT UsbMidiDeviceAndroid : public UsbMidiDevice {
// Called by the Java world.
void OnData(JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
jint endpoint_number,
const base::android::JavaParamRef<jbyteArray>& data);
diff --git a/chromium/media/midi/usb_midi_device_factory_android.cc b/chromium/media/midi/usb_midi_device_factory_android.cc
index 5ba6d6fb9b8..99592df6fdc 100644
--- a/chromium/media/midi/usb_midi_device_factory_android.cc
+++ b/chromium/media/midi/usb_midi_device_factory_android.cc
@@ -53,7 +53,6 @@ void UsbMidiDeviceFactoryAndroid::EnumerateDevices(
// Called from the Java world.
void UsbMidiDeviceFactoryAndroid::OnUsbMidiDeviceRequestDone(
JNIEnv* env,
- const JavaParamRef<jobject>& caller,
const JavaParamRef<jobjectArray>& devices) {
UsbMidiDevice::Devices devices_to_pass;
for (auto raw_device : devices.ReadElements<jobject>()) {
@@ -67,7 +66,6 @@ void UsbMidiDeviceFactoryAndroid::OnUsbMidiDeviceRequestDone(
// Called from the Java world.
void UsbMidiDeviceFactoryAndroid::OnUsbMidiDeviceAttached(
JNIEnv* env,
- const JavaParamRef<jobject>& caller,
const JavaParamRef<jobject>& device) {
delegate_->OnDeviceAttached(
std::make_unique<UsbMidiDeviceAndroid>(device, delegate_));
@@ -76,7 +74,6 @@ void UsbMidiDeviceFactoryAndroid::OnUsbMidiDeviceAttached(
// Called from the Java world.
void UsbMidiDeviceFactoryAndroid::OnUsbMidiDeviceDetached(
JNIEnv* env,
- const JavaParamRef<jobject>& caller,
jint index) {
delegate_->OnDeviceDetached(index);
}
diff --git a/chromium/media/midi/usb_midi_device_factory_android.h b/chromium/media/midi/usb_midi_device_factory_android.h
index b92c1202a62..d0dae559ac7 100644
--- a/chromium/media/midi/usb_midi_device_factory_android.h
+++ b/chromium/media/midi/usb_midi_device_factory_android.h
@@ -30,15 +30,12 @@ class USB_MIDI_EXPORT UsbMidiDeviceFactoryAndroid
void OnUsbMidiDeviceRequestDone(
JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
const base::android::JavaParamRef<jobjectArray>& devices);
void OnUsbMidiDeviceAttached(
JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
const base::android::JavaParamRef<jobject>& device);
void OnUsbMidiDeviceDetached(
JNIEnv* env,
- const base::android::JavaParamRef<jobject>& caller,
jint index);
private:
diff --git a/chromium/media/mojo/BUILD.gn b/chromium/media/mojo/BUILD.gn
index 740d816f315..4401902c5b4 100644
--- a/chromium/media/mojo/BUILD.gn
+++ b/chromium/media/mojo/BUILD.gn
@@ -69,8 +69,8 @@ source_set("test_support") {
"//media/mojo/clients",
"//media/mojo/common",
"//media/mojo/common:mojo_shared_buffer_video_frame",
- "//media/mojo/interfaces",
- "//media/mojo/interfaces:test_interfaces",
+ "//media/mojo/mojom",
+ "//media/mojo/mojom:test_interfaces",
"//media/mojo/services",
]
}
@@ -82,7 +82,7 @@ source_set("unit_tests") {
"//media/learning/mojo:unit_tests",
"//media/mojo/clients:unit_tests",
"//media/mojo/common:unit_tests",
- "//media/mojo/interfaces:unit_tests",
+ "//media/mojo/mojom:unit_tests",
"//media/mojo/services:unit_tests",
"//media/mojo/test:unit_tests",
]
diff --git a/chromium/media/mojo/clients/BUILD.gn b/chromium/media/mojo/clients/BUILD.gn
index e5d488f15e4..cc317fbc850 100644
--- a/chromium/media/mojo/clients/BUILD.gn
+++ b/chromium/media/mojo/clients/BUILD.gn
@@ -67,7 +67,7 @@ jumbo_source_set("clients") {
"//base",
"//media",
"//media/mojo:buildflags",
- "//media/mojo/interfaces",
+ "//media/mojo/mojom",
"//mojo/public/cpp/bindings",
"//mojo/public/cpp/system",
"//services/service_manager/public/mojom",
@@ -112,7 +112,7 @@ source_set("unit_tests") {
sources += [ "mojo_android_overlay_unittest.cc" ]
deps += [
- "//media/mojo/interfaces:test_interfaces",
+ "//media/mojo/mojom:test_interfaces",
"//ui/gfx:test_support",
"//ui/gl",
]
diff --git a/chromium/media/mojo/clients/mojo_android_overlay.h b/chromium/media/mojo/clients/mojo_android_overlay.h
index 27065a52fe8..05aceb144cb 100644
--- a/chromium/media/mojo/clients/mojo_android_overlay.h
+++ b/chromium/media/mojo/clients/mojo_android_overlay.h
@@ -8,7 +8,7 @@
#include "base/macros.h"
#include "base/unguessable_token.h"
#include "media/base/android/android_overlay.h"
-#include "media/mojo/interfaces/android_overlay.mojom.h"
+#include "media/mojo/mojom/android_overlay.mojom.h"
#include "mojo/public/cpp/bindings/binding.h"
namespace media {
diff --git a/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc b/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc
index 744f401f721..c56ab5dfe16 100644
--- a/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc
@@ -9,7 +9,7 @@
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "gpu/ipc/common/gpu_surface_tracker.h"
#include "media/base/mock_filters.h"
#include "media/mojo/clients/mojo_android_overlay.h"
@@ -155,7 +155,7 @@ class MojoAndroidOverlayTest : public ::testing::Test {
}
// Mojo stuff.
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
// The mock provider that |overlay_client_| will talk to.
// |interface_provider_| will bind it.
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.h b/chromium/media/mojo/clients/mojo_audio_decoder.h
index 5095e798629..5e88c808c7b 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.h
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.h
@@ -10,8 +10,8 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "media/base/audio_decoder.h"
-#include "media/mojo/interfaces/audio_decoder.mojom.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/audio_decoder.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
#include "mojo/public/cpp/bindings/binding.h"
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
index f3da2870aa9..7be1a1d58d0 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
@@ -10,7 +10,7 @@
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/audio_timestamp_helper.h"
@@ -20,7 +20,7 @@
#include "media/base/test_helpers.h"
#include "media/base/waiting.h"
#include "media/mojo/clients/mojo_audio_decoder.h"
-#include "media/mojo/interfaces/audio_decoder.mojom.h"
+#include "media/mojo/mojom/audio_decoder.mojom.h"
#include "media/mojo/services/mojo_audio_decoder_service.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/interface_request.h"
@@ -66,7 +66,7 @@ class MojoAudioDecoderTest : public ::testing::Test {
base::Unretained(this),
base::Passed(mojo::MakeRequest(&remote_audio_decoder))));
mojo_audio_decoder_.reset(
- new MojoAudioDecoder(scoped_task_environment_.GetMainThreadTaskRunner(),
+ new MojoAudioDecoder(task_environment_.GetMainThreadTaskRunner(),
std::move(remote_audio_decoder)));
}
@@ -219,7 +219,7 @@ class MojoAudioDecoderTest : public ::testing::Test {
RunLoop();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<base::RunLoop> run_loop_;
// The MojoAudioDecoder that we are testing.
diff --git a/chromium/media/mojo/clients/mojo_cdm.cc b/chromium/media/mojo/clients/mojo_cdm.cc
index fd843c076cb..da2a4e470fc 100644
--- a/chromium/media/mojo/clients/mojo_cdm.cc
+++ b/chromium/media/mojo/clients/mojo_cdm.cc
@@ -19,8 +19,8 @@
#include "media/base/cdm_promise.h"
#include "media/mojo/clients/mojo_decryptor.h"
#include "media/mojo/common/media_type_converters.h"
-#include "media/mojo/interfaces/decryptor.mojom.h"
-#include "media/mojo/interfaces/interface_factory.mojom.h"
+#include "media/mojo/mojom/decryptor.mojom.h"
+#include "media/mojo/mojom/interface_factory.mojom.h"
#include "services/service_manager/public/cpp/connect.h"
#include "services/service_manager/public/mojom/interface_provider.mojom.h"
#include "url/origin.h"
diff --git a/chromium/media/mojo/clients/mojo_cdm.h b/chromium/media/mojo/clients/mojo_cdm.h
index 9c097cbbf50..a9f8fecac23 100644
--- a/chromium/media/mojo/clients/mojo_cdm.h
+++ b/chromium/media/mojo/clients/mojo_cdm.h
@@ -20,7 +20,7 @@
#include "media/base/cdm_promise_adapter.h"
#include "media/base/cdm_session_tracker.h"
#include "media/base/content_decryption_module.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
#include "mojo/public/cpp/bindings/binding.h"
diff --git a/chromium/media/mojo/clients/mojo_cdm_factory.cc b/chromium/media/mojo/clients/mojo_cdm_factory.cc
index d1103f5ac3d..93db446e523 100644
--- a/chromium/media/mojo/clients/mojo_cdm_factory.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_factory.cc
@@ -13,7 +13,7 @@
#include "media/cdm/aes_decryptor.h"
#include "media/mojo/buildflags.h"
#include "media/mojo/clients/mojo_cdm.h"
-#include "media/mojo/interfaces/interface_factory.mojom.h"
+#include "media/mojo/mojom/interface_factory.mojom.h"
#include "mojo/public/cpp/bindings/interface_request.h"
#include "url/origin.h"
diff --git a/chromium/media/mojo/clients/mojo_cdm_unittest.cc b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
index 72348ac5438..5ad59778d1e 100644
--- a/chromium/media/mojo/clients/mojo_cdm_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_cdm_unittest.cc
@@ -17,7 +17,7 @@
#include "media/base/mock_filters.h"
#include "media/cdm/default_cdm_factory.h"
#include "media/mojo/clients/mojo_cdm.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
#include "media/mojo/services/mojo_cdm_service.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/interface_request.h"
diff --git a/chromium/media/mojo/clients/mojo_decoder_factory.cc b/chromium/media/mojo/clients/mojo_decoder_factory.cc
index a435f3f81fe..74869f03384 100644
--- a/chromium/media/mojo/clients/mojo_decoder_factory.cc
+++ b/chromium/media/mojo/clients/mojo_decoder_factory.cc
@@ -13,8 +13,8 @@
#include "media/mojo/buildflags.h"
#include "media/mojo/clients/mojo_audio_decoder.h"
#include "media/mojo/clients/mojo_video_decoder.h"
-#include "media/mojo/interfaces/audio_decoder.mojom.h"
-#include "media/mojo/interfaces/interface_factory.mojom.h"
+#include "media/mojo/mojom/audio_decoder.mojom.h"
+#include "media/mojo/mojom/interface_factory.mojom.h"
#include "mojo/public/cpp/bindings/interface_request.h"
namespace media {
diff --git a/chromium/media/mojo/clients/mojo_decryptor.cc b/chromium/media/mojo/clients/mojo_decryptor.cc
index 24eedcb5d87..eb06e818f74 100644
--- a/chromium/media/mojo/clients/mojo_decryptor.cc
+++ b/chromium/media/mojo/clients/mojo_decryptor.cc
@@ -17,7 +17,7 @@
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
-#include "media/mojo/interfaces/decryptor.mojom.h"
+#include "media/mojo/mojom/decryptor.mojom.h"
#include "mojo/public/cpp/bindings/callback_helpers.h"
#include "services/service_manager/public/cpp/connect.h"
diff --git a/chromium/media/mojo/clients/mojo_decryptor.h b/chromium/media/mojo/clients/mojo_decryptor.h
index 20da1d9edb9..a8cf62bd6e1 100644
--- a/chromium/media/mojo/clients/mojo_decryptor.h
+++ b/chromium/media/mojo/clients/mojo_decryptor.h
@@ -12,7 +12,7 @@
#include "base/memory/weak_ptr.h"
#include "base/threading/thread_checker.h"
#include "media/base/decryptor.h"
-#include "media/mojo/interfaces/decryptor.mojom.h"
+#include "media/mojo/mojom/decryptor.mojom.h"
#include "mojo/public/cpp/bindings/binding.h"
namespace media {
diff --git a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
index 9e0c010471d..0980b1199b1 100644
--- a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
@@ -18,7 +18,7 @@
#include "media/base/video_frame.h"
#include "media/mojo/clients/mojo_decryptor.h"
#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
-#include "media/mojo/interfaces/decryptor.mojom.h"
+#include "media/mojo/mojom/decryptor.mojom.h"
#include "media/mojo/services/mojo_decryptor_service.h"
#include "mojo/public/cpp/bindings/binding.h"
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h b/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
index c30325b56db..e25d9d83d3c 100644
--- a/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
+++ b/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
@@ -11,7 +11,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "media/base/demuxer_stream.h"
-#include "media/mojo/interfaces/demuxer_stream.mojom.h"
+#include "media/mojo/mojom/demuxer_stream.mojom.h"
#include "mojo/public/cpp/bindings/binding.h"
namespace media {
diff --git a/chromium/media/mojo/clients/mojo_media_log_service.h b/chromium/media/mojo/clients/mojo_media_log_service.h
index 80d2a3ab165..1f107b73f8a 100644
--- a/chromium/media/mojo/clients/mojo_media_log_service.h
+++ b/chromium/media/mojo/clients/mojo_media_log_service.h
@@ -9,7 +9,7 @@
#include "base/macros.h"
#include "media/base/media_log.h"
-#include "media/mojo/interfaces/media_log.mojom.h"
+#include "media/mojo/mojom/media_log.mojom.h"
namespace media {
diff --git a/chromium/media/mojo/clients/mojo_renderer.cc b/chromium/media/mojo/clients/mojo_renderer.cc
index 66e74533804..c3414f55dff 100644
--- a/chromium/media/mojo/clients/mojo_renderer.cc
+++ b/chromium/media/mojo/clients/mojo_renderer.cc
@@ -115,14 +115,15 @@ void MojoRenderer::InitializeRendererFromUrl(media::RendererClient* client) {
mojom::RendererClientAssociatedPtrInfo client_ptr_info;
client_binding_.Bind(mojo::MakeRequest(&client_ptr_info));
- MediaUrlParams url_params = media_resource_->GetMediaUrlParams();
+ const MediaUrlParams& url_params = media_resource_->GetMediaUrlParams();
// Using base::Unretained(this) is safe because |this| owns
// |remote_renderer_|, and the callback won't be dispatched if
// |remote_renderer_| is destroyed.
mojom::MediaUrlParamsPtr media_url_params = mojom::MediaUrlParams::New(
url_params.media_url, url_params.site_for_cookies,
- url_params.allow_credentials, url_params.is_hls);
+ url_params.top_frame_origin, url_params.allow_credentials,
+ url_params.is_hls);
remote_renderer_->Initialize(
std::move(client_ptr_info), base::nullopt, std::move(media_url_params),
base::Bind(&MojoRenderer::OnInitialized, base::Unretained(this), client));
diff --git a/chromium/media/mojo/clients/mojo_renderer.h b/chromium/media/mojo/clients/mojo_renderer.h
index 99af3159acf..44e82fd5035 100644
--- a/chromium/media/mojo/clients/mojo_renderer.h
+++ b/chromium/media/mojo/clients/mojo_renderer.h
@@ -17,7 +17,7 @@
#include "media/base/demuxer_stream.h"
#include "media/base/renderer.h"
#include "media/base/time_delta_interpolator.h"
-#include "media/mojo/interfaces/renderer.mojom.h"
+#include "media/mojo/mojom/renderer.mojom.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
namespace base {
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.cc b/chromium/media/mojo/clients/mojo_renderer_factory.cc
index f0e95159bf0..9121d611b6c 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.cc
@@ -9,7 +9,7 @@
#include "base/single_thread_task_runner.h"
#include "build/build_config.h"
#include "media/mojo/clients/mojo_renderer.h"
-#include "media/mojo/interfaces/renderer_extensions.mojom.h"
+#include "media/mojo/mojom/renderer_extensions.mojom.h"
#include "media/renderers/decrypting_renderer.h"
#include "media/renderers/video_overlay_factory.h"
#include "mojo/public/cpp/bindings/interface_request.h"
@@ -46,6 +46,24 @@ std::unique_ptr<Renderer> MojoRendererFactory::CreateRenderer(
std::move(renderer_ptr));
}
+#if BUILDFLAG(ENABLE_CAST_RENDERER)
+std::unique_ptr<MojoRenderer> MojoRendererFactory::CreateCastRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ VideoRendererSink* video_renderer_sink) {
+ DCHECK(interface_factory_);
+
+ auto overlay_factory = std::make_unique<VideoOverlayFactory>();
+
+ mojom::RendererPtr renderer_ptr;
+ interface_factory_->CreateCastRenderer(overlay_factory->overlay_plane_id(),
+ mojo::MakeRequest(&renderer_ptr));
+
+ return std::make_unique<MojoRenderer>(
+ media_task_runner, std::move(overlay_factory), video_renderer_sink,
+ std::move(renderer_ptr));
+}
+#endif // BUILDFLAG(ENABLE_CAST_RENDERER)
+
#if defined(OS_ANDROID)
std::unique_ptr<MojoRenderer> MojoRendererFactory::CreateFlingingRenderer(
const std::string& presentation_id,
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.h b/chromium/media/mojo/clients/mojo_renderer_factory.h
index f4f08551af7..75e44283e5d 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.h
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.h
@@ -11,8 +11,9 @@
#include "base/macros.h"
#include "build/build_config.h"
#include "media/base/renderer_factory.h"
-#include "media/mojo/interfaces/interface_factory.mojom.h"
-#include "media/mojo/interfaces/renderer.mojom.h"
+#include "media/mojo/buildflags.h"
+#include "media/mojo/mojom/interface_factory.mojom.h"
+#include "media/mojo/mojom/renderer.mojom.h"
namespace service_manager {
class InterfaceProvider;
@@ -47,6 +48,12 @@ class MojoRendererFactory : public RendererFactory {
const RequestOverlayInfoCB& request_overlay_info_cb,
const gfx::ColorSpace& target_color_space) final;
+#if BUILDFLAG(ENABLE_CAST_RENDERER)
+ std::unique_ptr<MojoRenderer> CreateCastRenderer(
+ const scoped_refptr<base::SingleThreadTaskRunner>& media_task_runner,
+ VideoRendererSink* video_renderer_sink);
+#endif // BUILDFLAG(ENABLE_CAST_RENDERER)
+
#if defined(OS_ANDROID)
std::unique_ptr<MojoRenderer> CreateFlingingRenderer(
const std::string& presentation_id,
diff --git a/chromium/media/mojo/clients/mojo_renderer_unittest.cc b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
index d4f1b515dba..b947a13dc43 100644
--- a/chromium/media/mojo/clients/mojo_renderer_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_renderer_unittest.cc
@@ -20,8 +20,8 @@
#include "media/cdm/default_cdm_factory.h"
#include "media/mojo/clients/mojo_renderer.h"
#include "media/mojo/common/media_type_converters.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
-#include "media/mojo/interfaces/renderer.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/renderer.mojom.h"
#include "media/mojo/services/mojo_cdm_service.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "media/mojo/services/mojo_renderer_service.h"
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.cc b/chromium/media/mojo/clients/mojo_video_decoder.cc
index b92455641b5..15c76d59028 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_video_decoder.cc
@@ -24,7 +24,7 @@
#include "media/base/video_frame.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "media/video/video_decode_accelerator.h"
#include "mojo/public/cpp/bindings/interface_request.h"
@@ -118,6 +118,7 @@ MojoVideoDecoder::MojoVideoDecoder(
: task_runner_(task_runner),
remote_decoder_info_(remote_decoder.PassInterface()),
gpu_factories_(gpu_factories),
+ timestamps_(128),
writer_capacity_(
GetDefaultDecoderBufferConverterCapacity(DemuxerStream::VIDEO)),
client_binding_(this),
@@ -223,11 +224,9 @@ void MojoVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
return;
}
- int64_t timestamp = 0ll;
if (!buffer->end_of_stream()) {
- timestamp = buffer->timestamp().InMilliseconds();
- TRACE_EVENT_NESTABLE_ASYNC_BEGIN1("media", "MojoVideoDecoder::Decode",
- timestamp, "timestamp", timestamp);
+ timestamps_.Put(buffer->timestamp().InMilliseconds(),
+ base::TimeTicks::Now());
}
mojom::DecoderBufferPtr mojo_buffer =
@@ -241,10 +240,9 @@ void MojoVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
uint64_t decode_id = decode_counter_++;
pending_decodes_[decode_id] = std::move(bound_decode_cb);
- remote_decoder_->Decode(
- std::move(mojo_buffer),
- base::Bind(&MojoVideoDecoder::OnDecodeDone, base::Unretained(this),
- decode_id, timestamp));
+ remote_decoder_->Decode(std::move(mojo_buffer),
+ base::Bind(&MojoVideoDecoder::OnDecodeDone,
+ base::Unretained(this), decode_id));
}
void MojoVideoDecoder::OnVideoFrameDecoded(
@@ -265,15 +263,24 @@ void MojoVideoDecoder::OnVideoFrameDecoded(
release_token.value()));
}
const int64_t timestamp = frame->timestamp().InMilliseconds();
- TRACE_EVENT_NESTABLE_ASYNC_END1("media", "MojoVideoDecoder::Decode",
- timestamp, "timestamp", timestamp);
+ const auto timestamp_it = timestamps_.Peek(timestamp);
+ if (timestamp_it != timestamps_.end()) {
+ const auto decode_start_time = timestamp_it->second;
+ const auto decode_end_time = base::TimeTicks::Now();
+
+ TRACE_EVENT_NESTABLE_ASYNC_BEGIN_WITH_TIMESTAMP0(
+ "media", "MojoVideoDecoder::Decode", timestamp, decode_start_time);
+ TRACE_EVENT_NESTABLE_ASYNC_END_WITH_TIMESTAMP1(
+ "media", "MojoVideoDecoder::Decode", timestamp, decode_end_time,
+ "timestamp", timestamp);
+ UMA_HISTOGRAM_TIMES("Media.MojoVideoDecoder.Decode",
+ decode_end_time - decode_start_time);
+ }
output_cb_.Run(frame);
}
-void MojoVideoDecoder::OnDecodeDone(uint64_t decode_id,
- int64_t timestamp,
- DecodeStatus status) {
+void MojoVideoDecoder::OnDecodeDone(uint64_t decode_id, DecodeStatus status) {
DVLOG(3) << __func__;
DCHECK(task_runner_->BelongsToCurrentThread());
@@ -283,10 +290,6 @@ void MojoVideoDecoder::OnDecodeDone(uint64_t decode_id,
Stop();
return;
}
- if (status != DecodeStatus::OK) {
- TRACE_EVENT_NESTABLE_ASYNC_END1("media", "MojoVideoDecoder::Decode",
- timestamp, "timestamp", timestamp);
- }
DecodeCB decode_cb = std::move(it->second);
pending_decodes_.erase(it);
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.h b/chromium/media/mojo/clients/mojo_video_decoder.h
index d6030b0b32c..6d60605f678 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.h
+++ b/chromium/media/mojo/clients/mojo_video_decoder.h
@@ -5,13 +5,14 @@
#ifndef MEDIA_MOJO_CLIENTS_MOJO_VIDEO_DECODER_H_
#define MEDIA_MOJO_CLIENTS_MOJO_VIDEO_DECODER_H_
+#include "base/containers/mru_cache.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "media/base/video_decoder.h"
#include "media/base/video_frame.h"
#include "media/mojo/clients/mojo_media_log_service.h"
-#include "media/mojo/interfaces/video_decoder.mojom.h"
+#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/video/video_decode_accelerator.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
#include "ui/gfx/color_space.h"
@@ -74,7 +75,7 @@ class MojoVideoDecoder final : public VideoDecoder,
void OnInitializeDone(bool status,
bool needs_bitstream_conversion,
int32_t max_decode_requests);
- void OnDecodeDone(uint64_t decode_id, int64_t timestamp, DecodeStatus status);
+ void OnDecodeDone(uint64_t decode_id, DecodeStatus status);
void OnResetDone();
void BindRemoteDecoder();
@@ -104,6 +105,10 @@ class MojoVideoDecoder final : public VideoDecoder,
std::map<uint64_t, DecodeCB> pending_decodes_;
base::OnceClosure reset_cb_;
+ // DecodeBuffer/VideoFrame timestamps for histogram/tracing purposes. Must be
+ // large enough to account for any amount of frame reordering.
+ base::MRUCache<int64_t, base::TimeTicks> timestamps_;
+
mojom::VideoDecoderPtr remote_decoder_;
std::unique_ptr<MojoDecoderBufferWriter> mojo_decoder_buffer_writer_;
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
index f1a069857c0..eb54e736389 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
@@ -137,21 +137,34 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
DCHECK_EQ(PIXEL_FORMAT_I420, frame->format());
DCHECK_EQ(VideoFrame::STORAGE_SHMEM, frame->storage_type());
- DCHECK(frame->shared_memory_handle().IsValid());
+ DCHECK(frame->shm_region()->IsValid());
// Oftentimes |frame|'s underlying planes will be aligned and not tightly
// packed, so don't use VideoFrame::AllocationSize().
- const size_t allocation_size = frame->shared_memory_handle().GetSize();
-
- // WrapSharedMemoryHandle() takes ownership of the handle passed to it, but we
- // don't have ownership of frame->shared_memory_handle(), so Duplicate() it.
- //
- // TODO(https://crbug.com/793446): This should be changed to wrap the frame
- // buffer handle as read-only, but VideoFrame does not seem to guarantee that
- // its shared_memory_handle() is in fact read-only.
- mojo::ScopedSharedBufferHandle handle = mojo::WrapSharedMemoryHandle(
- frame->shared_memory_handle().Duplicate(), allocation_size,
- mojo::UnwrappedSharedMemoryHandleProtection::kReadWrite);
+ const size_t allocation_size = frame->shm_region()->GetSize();
+
+ // A MojoSharedBufferVideoFrame is created with an owned writable handle. As
+ // the handle in |frame| is not owned, a new region must be created and
+ // |frame| copied into it.
+ mojo::ScopedSharedBufferHandle dst_handle =
+ mojo::SharedBufferHandle::Create(allocation_size);
+ if (!dst_handle->is_valid()) {
+ DLOG(ERROR) << "Can't create new frame backing memory";
+ return;
+ }
+ mojo::ScopedSharedBufferMapping dst_mapping =
+ dst_handle->Map(allocation_size);
+ if (!dst_mapping) {
+ DLOG(ERROR) << "Can't map new frame backing memory";
+ return;
+ }
+ DCHECK(frame->shm_region());
+ base::WritableSharedMemoryMapping src_mapping = frame->shm_region()->Map();
+ if (!src_mapping.IsValid()) {
+ DLOG(ERROR) << "Can't map src frame backing memory";
+ return;
+ }
+ memcpy(dst_mapping.get(), src_mapping.memory(), allocation_size);
const size_t y_offset = frame->shared_memory_offset();
const size_t u_offset = y_offset + frame->data(VideoFrame::kUPlane) -
@@ -162,8 +175,8 @@ void MojoVideoEncodeAccelerator::Encode(scoped_refptr<VideoFrame> frame,
scoped_refptr<MojoSharedBufferVideoFrame> mojo_frame =
MojoSharedBufferVideoFrame::Create(
frame->format(), frame->coded_size(), frame->visible_rect(),
- frame->natural_size(), std::move(handle), allocation_size, y_offset,
- u_offset, v_offset, frame->stride(VideoFrame::kYPlane),
+ frame->natural_size(), std::move(dst_handle), allocation_size,
+ y_offset, u_offset, v_offset, frame->stride(VideoFrame::kYPlane),
frame->stride(VideoFrame::kUPlane),
frame->stride(VideoFrame::kVPlane), frame->timestamp());
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator.h b/chromium/media/mojo/clients/mojo_video_encode_accelerator.h
index 6b620938ec1..5730c531a4a 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator.h
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator.h
@@ -11,7 +11,7 @@
#include "base/sequence_checker.h"
#include "gpu/config/gpu_info.h"
-#include "media/mojo/interfaces/video_encode_accelerator.mojom.h"
+#include "media/mojo/mojom/video_encode_accelerator.mojom.h"
#include "media/video/video_encode_accelerator.h"
namespace media {
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc b/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc
index aaf4d8e55d8..aed41d70424 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator_unittest.cc
@@ -5,10 +5,10 @@
#include <stddef.h>
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "gpu/config/gpu_info.h"
#include "media/mojo/clients/mojo_video_encode_accelerator.h"
-#include "media/mojo/interfaces/video_encode_accelerator.mojom.h"
+#include "media/mojo/mojom/video_encode_accelerator.mojom.h"
#include "media/video/video_encode_accelerator.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -172,7 +172,7 @@ class MojoVideoEncodeAcceleratorTest : public ::testing::Test {
}
private:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
// This member holds on to the mock implementation of the "service" side.
mojo::StrongBindingPtr<mojom::VideoEncodeAccelerator> mojo_vea_binding_;
@@ -214,13 +214,18 @@ TEST_F(MojoVideoEncodeAcceleratorTest, EncodeOneFrame) {
}
{
- const scoped_refptr<VideoFrame> video_frame = VideoFrame::CreateFrame(
+ base::UnsafeSharedMemoryRegion shmem =
+ base::UnsafeSharedMemoryRegion::Create(
+ VideoFrame::AllocationSize(PIXEL_FORMAT_I420, kInputVisibleSize) *
+ 2);
+ ASSERT_TRUE(shmem.IsValid());
+ base::WritableSharedMemoryMapping mapping = shmem.Map();
+ ASSERT_TRUE(mapping.IsValid());
+ const scoped_refptr<VideoFrame> video_frame = VideoFrame::WrapExternalData(
PIXEL_FORMAT_I420, kInputVisibleSize, gfx::Rect(kInputVisibleSize),
- kInputVisibleSize, base::TimeDelta());
- base::SharedMemory shmem;
- shmem.CreateAnonymous(
- VideoFrame::AllocationSize(PIXEL_FORMAT_I420, kInputVisibleSize) * 2);
- video_frame->AddSharedMemoryHandle(shmem.handle());
+ kInputVisibleSize, mapping.GetMemoryAsSpan<uint8_t>().data(),
+ mapping.size(), base::TimeDelta());
+ video_frame->BackWithSharedMemory(&shmem);
const bool is_keyframe = true;
// The remote end of the mojo Pipe doesn't receive |video_frame| itself.
diff --git a/chromium/media/mojo/common/BUILD.gn b/chromium/media/mojo/common/BUILD.gn
index 969a9742cd2..e3ea90c87c1 100644
--- a/chromium/media/mojo/common/BUILD.gn
+++ b/chromium/media/mojo/common/BUILD.gn
@@ -19,17 +19,17 @@ jumbo_source_set("common") {
deps = [
":mojo_shared_buffer_video_frame",
"//base",
- "//gpu/ipc/common:struct_traits",
+ "//gpu/ipc/common:mojom_traits",
"//media",
- "//media/mojo/interfaces",
+ "//media/mojo/mojom",
"//mojo/public/cpp/bindings",
"//mojo/public/cpp/system",
"//ui/gfx/geometry",
- "//ui/gfx/geometry/mojo",
+ "//ui/gfx/geometry/mojom",
]
}
-# This is a separate target so that //media/mojo/interfaces can depend on it
+# This is a separate target so that //media/mojo/mojom can depend on it
# without creating a cycle.
source_set("mojo_shared_buffer_video_frame") {
sources = [
diff --git a/chromium/media/mojo/common/OWNERS b/chromium/media/mojo/common/OWNERS
index 2de9f87f038..0c3dcdd9eae 100644
--- a/chromium/media/mojo/common/OWNERS
+++ b/chromium/media/mojo/common/OWNERS
@@ -1,4 +1,4 @@
-per-file *_struct_traits*.*=set noparent
-per-file *_struct_traits*.*=file://ipc/SECURITY_OWNERS
+per-file *_mojom_traits*.*=set noparent
+per-file *_mojom_traits*.*=file://ipc/SECURITY_OWNERS
per-file *_type_converter*.*=set noparent
per-file *_type_converter*.*=file://ipc/SECURITY_OWNERS
diff --git a/chromium/media/mojo/common/media_type_converters.h b/chromium/media/mojo/common/media_type_converters.h
index 7d512755987..db71a2622c3 100644
--- a/chromium/media/mojo/common/media_type_converters.h
+++ b/chromium/media/mojo/common/media_type_converters.h
@@ -8,8 +8,8 @@
#include <memory>
#include "base/memory/ref_counted.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "mojo/public/cpp/bindings/type_converter.h"
namespace media {
diff --git a/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc b/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc
index d0abf642db0..0f5ba819adc 100644
--- a/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc
+++ b/chromium/media/mojo/common/mojo_data_pipe_read_write_unittest.cc
@@ -11,7 +11,7 @@
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -67,7 +67,7 @@ class MojoDataPipeReadWrite {
} // namespace
TEST(MojoDataPipeReadWriteTest, Normal) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
std::string kData = "hello, world";
MojoDataPipeReadWrite pipe_read_write_;
pipe_read_write_.WriteAndRead(reinterpret_cast<const uint8_t*>(kData.data()),
@@ -75,7 +75,7 @@ TEST(MojoDataPipeReadWriteTest, Normal) {
}
TEST(MojoDataPipeReadWriteTest, SequentialReading) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
std::string kData1 = "hello, world";
std::string kData2 = "Bye!";
MojoDataPipeReadWrite pipe_read_write_;
@@ -86,7 +86,7 @@ TEST(MojoDataPipeReadWriteTest, SequentialReading) {
}
TEST(MojoDataPipeReadWriteTest, LongerThanCapacity) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
std::string kData = "hello, world, hello, world, hello, world";
MojoDataPipeReadWrite pipe_read_write_(10);
pipe_read_write_.WriteAndRead(reinterpret_cast<const uint8_t*>(kData.data()),
@@ -94,7 +94,7 @@ TEST(MojoDataPipeReadWriteTest, LongerThanCapacity) {
}
TEST(MojoDataPipeReadWriteTest, DiscardDataInPipe) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
std::string kData1 = "to be discarded";
std::string kData2 = "hello, world, hello, world, hello, world";
MojoDataPipeReadWrite pipe_read_write_(10);
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter.h b/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
index 2fb6bd361df..9f780d84a92 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter.h
@@ -11,7 +11,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "media/base/demuxer_stream.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "mojo/public/cpp/system/data_pipe.h"
#include "mojo/public/cpp/system/simple_watcher.h"
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
index db659da2c8d..b69da37e5a3 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter_unittest.cc
@@ -11,7 +11,7 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decrypt_config.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -59,7 +59,7 @@ class MojoDecoderBufferConverter {
} // namespace
TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_Normal) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
const uint8_t kData[] = "hello, world";
const uint8_t kSideData[] = "sideshow bob";
const size_t kDataSize = base::size(kData);
@@ -79,7 +79,7 @@ TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_Normal) {
}
TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_EOS) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
scoped_refptr<DecoderBuffer> buffer(DecoderBuffer::CreateEOSBuffer());
MojoDecoderBufferConverter converter;
@@ -89,7 +89,7 @@ TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_EOS) {
// TODO(xhwang): Investigate whether we can get rid of zero-byte-buffer.
// See http://crbug.com/663438
TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_ZeroByteBuffer) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(0));
MojoDecoderBufferConverter converter;
@@ -97,7 +97,7 @@ TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_ZeroByteBuffer) {
}
TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_KeyFrame) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
const uint8_t kData[] = "hello, world";
const size_t kDataSize = base::size(kData);
@@ -111,7 +111,7 @@ TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_KeyFrame) {
}
TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_EncryptedBuffer) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
const uint8_t kData[] = "hello, world";
const size_t kDataSize = base::size(kData);
const char kKeyId[] = "00112233445566778899aabbccddeeff";
@@ -150,7 +150,7 @@ TEST(MojoDecoderBufferConverterTest, ConvertDecoderBuffer_EncryptedBuffer) {
// This test verifies that a DecoderBuffer larger than data-pipe capacity
// can be transmitted properly.
TEST(MojoDecoderBufferConverterTest, Chunked) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
const uint8_t kData[] = "Lorem ipsum dolor sit amet, consectetur cras amet";
const size_t kDataSize = base::size(kData);
scoped_refptr<DecoderBuffer> buffer =
@@ -163,7 +163,7 @@ TEST(MojoDecoderBufferConverterTest, Chunked) {
// This test verifies that MojoDecoderBufferReader::ReadCB is called with a
// NULL DecoderBuffer if data pipe is closed during transmission.
TEST(MojoDecoderBufferConverterTest, WriterSidePipeError) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
const uint8_t kData[] = "Lorem ipsum dolor sit amet, consectetur cras amet";
const size_t kDataSize = base::size(kData);
scoped_refptr<DecoderBuffer> media_buffer =
@@ -191,7 +191,7 @@ TEST(MojoDecoderBufferConverterTest, WriterSidePipeError) {
// This test verifies that MojoDecoderBuffer supports concurrent writes and
// reads.
TEST(MojoDecoderBufferConverterTest, ConcurrentDecoderBuffers) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
base::RunLoop run_loop;
// Prevent all of the buffers from fitting at once to exercise the chunking
@@ -237,7 +237,7 @@ TEST(MojoDecoderBufferConverterTest, ConcurrentDecoderBuffers) {
}
TEST(MojoDecoderBufferConverterTest, FlushWithoutRead) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
base::RunLoop run_loop;
base::MockCallback<base::OnceClosure> mock_flush_cb;
@@ -250,7 +250,7 @@ TEST(MojoDecoderBufferConverterTest, FlushWithoutRead) {
}
TEST(MojoDecoderBufferConverterTest, FlushAfterRead) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
base::RunLoop run_loop;
const uint8_t kData[] = "Lorem ipsum dolor sit amet, consectetur cras amet";
@@ -271,7 +271,7 @@ TEST(MojoDecoderBufferConverterTest, FlushAfterRead) {
}
TEST(MojoDecoderBufferConverterTest, FlushBeforeRead) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
base::RunLoop run_loop;
const uint8_t kData[] = "Lorem ipsum dolor sit amet, consectetur cras amet";
@@ -299,7 +299,7 @@ TEST(MojoDecoderBufferConverterTest, FlushBeforeRead) {
}
TEST(MojoDecoderBufferConverterTest, FlushBeforeChunkedRead) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
base::RunLoop run_loop;
const uint8_t kData[] = "Lorem ipsum dolor sit amet, consectetur cras amet";
@@ -328,7 +328,7 @@ TEST(MojoDecoderBufferConverterTest, FlushBeforeChunkedRead) {
}
TEST(MojoDecoderBufferConverterTest, FlushDuringChunkedRead) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
base::RunLoop run_loop;
const uint8_t kData[] = "Lorem ipsum dolor sit amet, consectetur cras amet";
@@ -357,7 +357,7 @@ TEST(MojoDecoderBufferConverterTest, FlushDuringChunkedRead) {
}
TEST(MojoDecoderBufferConverterTest, FlushDuringConcurrentReads) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::TaskEnvironment task_environment;
base::RunLoop run_loop;
// Prevent all of the buffers from fitting at once to exercise the chunking
diff --git a/chromium/media/mojo/common/mojo_shared_buffer_video_frame.h b/chromium/media/mojo/common/mojo_shared_buffer_video_frame.h
index fe852530aea..46aa3f94b6a 100644
--- a/chromium/media/mojo/common/mojo_shared_buffer_video_frame.h
+++ b/chromium/media/mojo/common/mojo_shared_buffer_video_frame.h
@@ -48,7 +48,7 @@ class MojoSharedBufferVideoFrame : public VideoFrame {
// Creates a MojoSharedBufferVideoFrame that uses the memory in |handle|.
// This will take ownership of |handle|, so the caller can no longer use it.
// |mojo_shared_buffer_done_cb|, if not null, is called on destruction,
- // and is passed ownership of |handle|.
+ // and is passed ownership of |handle|. |handle| must be writable.
static scoped_refptr<MojoSharedBufferVideoFrame> Create(
VideoPixelFormat format,
const gfx::Size& coded_size,
diff --git a/chromium/media/mojo/interfaces/cdm_key_information.typemap b/chromium/media/mojo/interfaces/cdm_key_information.typemap
deleted file mode 100644
index 89c293033a6..00000000000
--- a/chromium/media/mojo/interfaces/cdm_key_information.typemap
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2018 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/interfaces/content_decryption_module.mojom"
-
-public_headers = [ "//media/base/cdm_key_information.h" ]
-
-traits_headers =
- [ "//media/mojo/interfaces/cdm_key_information_mojom_traits.h" ]
-
-sources = [
- "//media/mojo/interfaces/cdm_key_information_mojom_traits.cc",
-]
-
-public_deps = [
- "//media",
-]
-
-type_mappings = [
- "media.mojom.CdmKeyInformation=std::unique_ptr<media::CdmKeyInformation>[move_only]",
- "media.mojom.CdmKeyStatus=media::CdmKeyInformation::KeyStatus",
-]
diff --git a/chromium/media/mojo/interfaces/hdr_metadata.typemap b/chromium/media/mojo/interfaces/hdr_metadata.typemap
deleted file mode 100644
index 6557c4b7a39..00000000000
--- a/chromium/media/mojo/interfaces/hdr_metadata.typemap
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/interfaces/media_types.mojom"
-public_headers = [ "//media/base/hdr_metadata.h" ]
-traits_headers = [ "//media/mojo/interfaces/hdr_metadata_struct_traits.h" ]
-type_mappings = [
- "media.mojom.MasteringMetadata=media::MasteringMetadata",
- "media.mojom.HDRMetadata=media::HDRMetadata",
-]
diff --git a/chromium/media/mojo/interfaces/media_types.typemap b/chromium/media/mojo/interfaces/media_types.typemap
deleted file mode 100644
index 48ff4b0c984..00000000000
--- a/chromium/media/mojo/interfaces/media_types.typemap
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/interfaces/media_types.mojom"
-
-public_headers = [
- "//media/base/audio_codecs.h",
- "//media/base/buffering_state.h",
- "//media/base/channel_layout.h",
- "//media/base/container_names.h",
- "//media/base/decode_status.h",
- "//media/base/decrypt_config.h",
- "//media/base/encryption_pattern.h",
- "//media/base/encryption_scheme.h",
- "//media/base/hdr_metadata.h",
- "//media/base/media_log_event.h",
- "//media/base/media_status.h",
- "//media/base/output_device_info.h",
- "//media/base/pipeline_status.h",
- "//media/base/sample_format.h",
- "//media/base/subsample_entry.h",
- "//media/base/video_codecs.h",
- "//media/base/video_transformation.h",
- "//media/base/video_types.h",
- "//media/base/waiting.h",
- "//media/base/watch_time_keys.h",
-]
-
-traits_headers = [
- "//media/base/ipc/media_param_traits_macros.h",
- "//media/mojo/interfaces/video_transformation_mojom_traits.h",
-]
-
-public_deps = [
- "//media",
- "//media/base/ipc",
-]
-
-sources = [
- "//media/mojo/interfaces/video_transformation_mojom_traits.cc",
- "//media/mojo/interfaces/video_transformation_mojom_traits.h",
-]
-
-type_mappings = [
- "media.mojom.AudioCodec=media::AudioCodec",
- "media.mojom.BufferingState=media::BufferingState",
- "media.mojom.BufferingStateChangeReason=media::BufferingStateChangeReason",
- "media.mojom.ChannelLayout=media::ChannelLayout",
- "media.mojom.ColorSpace=media::ColorSpace",
- "media.mojom.DecodeStatus=media::DecodeStatus",
- "media.mojom.EncryptionMode=media::EncryptionMode",
- "media.mojom.EncryptionScheme.CipherMode=media::EncryptionScheme::CipherMode",
- "media.mojom.MediaContainerName=media::container_names::MediaContainerName",
- "media.mojom.MediaLogEvent=media::MediaLogEvent",
- "media.mojom.OutputDeviceStatus=media::OutputDeviceStatus",
- "media.mojom.PipelineStatus=media::PipelineStatus",
- "media.mojom.SampleFormat=media::SampleFormat",
- "media.mojom.SubsampleEntry=media::SubsampleEntry",
- "media.mojom.VideoCodec=media::VideoCodec",
- "media.mojom.VideoCodecProfile=media::VideoCodecProfile",
- "media.mojom.VideoPixelFormat=media::VideoPixelFormat",
- "media.mojom.VideoRotation=media::VideoRotation",
- "media.mojom.VideoTransformation=media::VideoTransformation",
- "media.mojom.WaitingReason=media::WaitingReason",
- "media.mojom.WatchTimeKey=media::WatchTimeKey",
- "media.mojom.EncryptionPattern=media::EncryptionPattern",
- "media.mojom.MediaStatusState=media::MediaStatus::State",
-]
diff --git a/chromium/media/mojo/interfaces/pipeline_status.typemap b/chromium/media/mojo/interfaces/pipeline_status.typemap
deleted file mode 100644
index cb730c3be0f..00000000000
--- a/chromium/media/mojo/interfaces/pipeline_status.typemap
+++ /dev/null
@@ -1,11 +0,0 @@
-# Copyright 2019 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/interfaces/media_types.mojom"
-public_headers = [ "//media/base/pipeline_status.h" ]
-traits_headers = [ "//media/mojo/interfaces/pipeline_status_struct_traits.h" ]
-type_mappings = [
- "media.mojom.PipelineStatistics=media::PipelineStatistics",
- "media.mojom.PipelineDecoderInfo=media::PipelineDecoderInfo",
-]
diff --git a/chromium/media/mojo/interfaces/typemaps.gni b/chromium/media/mojo/interfaces/typemaps.gni
deleted file mode 100644
index d0c6945bafe..00000000000
--- a/chromium/media/mojo/interfaces/typemaps.gni
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2016 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-typemaps = [
- "//media/mojo/interfaces/audio_decoder_config.typemap",
- "//media/mojo/interfaces/audio_parameters.typemap",
- "//media/mojo/interfaces/cdm_key_information.typemap",
- "//media/mojo/interfaces/cdm_proxy.typemap",
- "//media/mojo/interfaces/content_decryption_module.typemap",
- "//media/mojo/interfaces/decryptor.typemap",
- "//media/mojo/interfaces/demuxer_stream.typemap",
- "//media/mojo/interfaces/encryption_scheme.typemap",
- "//media/mojo/interfaces/hdr_metadata.typemap",
- "//media/mojo/interfaces/media_drm_storage.typemap",
- "//media/mojo/interfaces/media_types.typemap",
- "//media/mojo/interfaces/pipeline_status.typemap",
- "//media/mojo/interfaces/video_color_space.typemap",
- "//media/mojo/interfaces/video_decoder.typemap",
- "//media/mojo/interfaces/video_decoder_config.typemap",
- "//media/mojo/interfaces/video_encode_accelerator.typemap",
- "//media/mojo/interfaces/video_frame.typemap",
-]
diff --git a/chromium/media/mojo/interfaces/video_color_space.typemap b/chromium/media/mojo/interfaces/video_color_space.typemap
deleted file mode 100644
index 70be1d9a279..00000000000
--- a/chromium/media/mojo/interfaces/video_color_space.typemap
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/interfaces/media_types.mojom"
-public_headers = [
- "//media/base/video_color_space.h",
- "//ui/gfx/color_space.h",
-]
-traits_headers = [ "//media/mojo/interfaces/video_color_space_struct_traits.h" ]
-type_mappings = [
- "media.mojom.VideoColorSpace.PrimaryID=media::VideoColorSpace::PrimaryID",
- "media.mojom.VideoColorSpace.TransferID=media::VideoColorSpace::TransferID",
- "media.mojom.VideoColorSpace.MatrixID=media::VideoColorSpace::MatrixID",
- "media.mojom.VideoColorSpace.RangeID=gfx::ColorSpace::RangeID",
- "media.mojom.VideoColorSpace=media::VideoColorSpace",
-]
diff --git a/chromium/media/mojo/interfaces/video_decoder.typemap b/chromium/media/mojo/interfaces/video_decoder.typemap
deleted file mode 100644
index c409de7e767..00000000000
--- a/chromium/media/mojo/interfaces/video_decoder.typemap
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/interfaces/video_decoder.mojom"
-
-public_headers = [
- "//media/base/overlay_info.h",
- "//media/video/supported_video_decoder_config.h",
-]
-
-traits_headers = [
- "//media/base/ipc/media_param_traits_macros.h",
- "//media/mojo/interfaces/supported_video_decoder_config_struct_traits.h",
-]
-
-sources = [
- "supported_video_decoder_config_struct_traits.cc",
- "supported_video_decoder_config_struct_traits.h",
-]
-
-deps = [
- "//media/gpu/ipc/common",
-]
-
-type_mappings = [
- "media.mojom.OverlayInfo=media::OverlayInfo",
- "media.mojom.SupportedVideoDecoderConfig=media::SupportedVideoDecoderConfig",
- "media.mojom.VideoDecoderImplementation=media::VideoDecoderImplementation",
-]
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator.typemap b/chromium/media/mojo/interfaces/video_encode_accelerator.typemap
deleted file mode 100644
index 24a7712fcf1..00000000000
--- a/chromium/media/mojo/interfaces/video_encode_accelerator.typemap
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2017 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-mojom = "//media/mojo/interfaces/video_encode_accelerator.mojom"
-
-public_headers = [ "//media/video/video_encode_accelerator.h" ]
-
-traits_headers =
- [ "//media/mojo/interfaces/video_encode_accelerator_mojom_traits.h" ]
-
-sources = [
- "//media/mojo/interfaces/video_encode_accelerator_mojom_traits.cc",
- "//media/mojo/interfaces/video_encode_accelerator_mojom_traits.h",
-]
-
-public_deps = [
- "//base",
- "//media",
-]
-
-deps = [
- "//base",
- "//media",
- "//media/base/ipc",
- "//ui/gfx/geometry/mojo:struct_traits",
-]
-
-type_mappings = [
- "media.mojom.BitstreamBufferMetadata=media::BitstreamBufferMetadata",
- "media.mojom.VideoBitrateAllocation=media::VideoBitrateAllocation",
- "media.mojom.VideoEncodeAccelerator.Error=media::VideoEncodeAccelerator::Error",
- "media.mojom.VideoEncodeAcceleratorConfig=media::VideoEncodeAccelerator::Config",
- "media.mojom.Vp8Metadata=media::Vp8Metadata",
-]
diff --git a/chromium/media/mojo/interfaces/BUILD.gn b/chromium/media/mojo/mojom/BUILD.gn
index 7e63d13eea8..b8dfc96dbb9 100644
--- a/chromium/media/mojo/interfaces/BUILD.gn
+++ b/chromium/media/mojo/mojom/BUILD.gn
@@ -5,7 +5,7 @@
import("//media/media_options.gni")
import("//mojo/public/tools/bindings/mojom.gni")
-mojom("interfaces") {
+mojom("mojom") {
# TODO(crbug.com/676224): Conditionally add source files in this list when we
# support EnabledIf attribute in mojom files.
sources = [
@@ -53,10 +53,11 @@ mojom("interfaces") {
public_deps = [
"//gpu/ipc/common:interfaces",
+ "//media/learning/mojo/public/mojom",
"//mojo/public/mojom/base",
"//services/service_manager/public/mojom",
- "//ui/gfx/geometry/mojo",
- "//ui/gfx/mojo",
+ "//ui/gfx/geometry/mojom",
+ "//ui/gfx/mojom",
"//url/mojom:url_mojom_gurl",
"//url/mojom:url_mojom_origin",
]
@@ -114,7 +115,7 @@ mojom("test_interfaces") {
"traits_test_service.mojom",
]
public_deps = [
- ":interfaces",
+ ":mojom",
]
}
@@ -122,11 +123,11 @@ source_set("unit_tests") {
testonly = true
sources = [
- "audio_decoder_config_struct_traits_unittest.cc",
+ "audio_decoder_config_mojom_traits_unittest.cc",
"cdm_key_information_mojom_traits_unittest.cc",
- "encryption_scheme_struct_traits_unittest.cc",
- "video_decoder_config_struct_traits_unittest.cc",
- "video_frame_struct_traits_unittest.cc",
+ "encryption_scheme_mojom_traits_unittest.cc",
+ "video_decoder_config_mojom_traits_unittest.cc",
+ "video_frame_mojom_traits_unittest.cc",
]
deps = [
diff --git a/chromium/media/mojo/mojom/OWNERS b/chromium/media/mojo/mojom/OWNERS
new file mode 100644
index 00000000000..ae29a36aac8
--- /dev/null
+++ b/chromium/media/mojo/mojom/OWNERS
@@ -0,0 +1,6 @@
+per-file *.mojom=set noparent
+per-file *.mojom=file://ipc/SECURITY_OWNERS
+per-file *_mojom_traits*.*=set noparent
+per-file *_mojom_traits*.*=file://ipc/SECURITY_OWNERS
+per-file *.typemap=set noparent
+per-file *.typemap=file://ipc/SECURITY_OWNERS
diff --git a/chromium/media/mojo/interfaces/android_overlay.mojom b/chromium/media/mojo/mojom/android_overlay.mojom
index eab18c900a3..7e93634fac5 100644
--- a/chromium/media/mojo/interfaces/android_overlay.mojom
+++ b/chromium/media/mojo/mojom/android_overlay.mojom
@@ -4,9 +4,9 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
// Provides mojo clients with AndroidOverlay instances. This will live in the
// browser, with clients in the GPU process or renderer. Note that if you're
diff --git a/chromium/media/mojo/interfaces/audio_data_pipe.mojom b/chromium/media/mojo/mojom/audio_data_pipe.mojom
index 188b6885c8a..188b6885c8a 100644
--- a/chromium/media/mojo/interfaces/audio_data_pipe.mojom
+++ b/chromium/media/mojo/mojom/audio_data_pipe.mojom
diff --git a/chromium/media/mojo/interfaces/audio_decoder.mojom b/chromium/media/mojo/mojom/audio_decoder.mojom
index 683624023c6..204bf4e1dfb 100644
--- a/chromium/media/mojo/interfaces/audio_decoder.mojom
+++ b/chromium/media/mojo/mojom/audio_decoder.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
interface AudioDecoder {
// Initialize the decoder. This must be called before any other method.
diff --git a/chromium/media/mojo/interfaces/audio_decoder_config.typemap b/chromium/media/mojo/mojom/audio_decoder_config.typemap
index d268ddc88bc..cb34d42083f 100644
--- a/chromium/media/mojo/interfaces/audio_decoder_config.typemap
+++ b/chromium/media/mojo/mojom/audio_decoder_config.typemap
@@ -2,15 +2,14 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/media_types.mojom"
+mojom = "//media/mojo/mojom/media_types.mojom"
public_headers = [ "//media/base/audio_decoder_config.h" ]
-traits_headers =
- [ "//media/mojo/interfaces/audio_decoder_config_struct_traits.h" ]
+traits_headers = [ "//media/mojo/mojom/audio_decoder_config_mojom_traits.h" ]
sources = [
- "//media/mojo/interfaces/audio_decoder_config_struct_traits.cc",
+ "//media/mojo/mojom/audio_decoder_config_mojom_traits.cc",
]
public_deps = [
diff --git a/chromium/media/mojo/interfaces/audio_decoder_config_struct_traits.cc b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc
index 844597dfae9..0aaae3d065d 100644
--- a/chromium/media/mojo/interfaces/audio_decoder_config_struct_traits.cc
+++ b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/audio_decoder_config_struct_traits.h"
+#include "media/mojo/mojom/audio_decoder_config_mojom_traits.h"
#include "mojo/public/cpp/base/time_mojom_traits.h"
diff --git a/chromium/media/mojo/interfaces/audio_decoder_config_struct_traits.h b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h
index e265ba0fd24..a786d2466a8 100644
--- a/chromium/media/mojo/interfaces/audio_decoder_config_struct_traits.h
+++ b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits.h
@@ -2,13 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_AUDIO_DECODER_CONFIG_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_AUDIO_DECODER_CONFIG_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_AUDIO_DECODER_CONFIG_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_AUDIO_DECODER_CONFIG_MOJOM_TRAITS_H_
#include "media/base/audio_decoder_config.h"
#include "media/base/ipc/media_param_traits.h"
-#include "media/mojo/interfaces/encryption_scheme_struct_traits.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/encryption_scheme_mojom_traits.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace mojo {
@@ -57,4 +57,4 @@ struct StructTraits<media::mojom::AudioDecoderConfigDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_AUDIO_DECODER_CONFIG_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_AUDIO_DECODER_CONFIG_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/audio_decoder_config_struct_traits_unittest.cc b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
index 6bf0f8e1d4f..f04e0cb3c35 100644
--- a/chromium/media/mojo/interfaces/audio_decoder_config_struct_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/audio_decoder_config_mojom_traits_unittest.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/audio_decoder_config_struct_traits.h"
+#include "media/mojo/mojom/audio_decoder_config_mojom_traits.h"
#include <utility>
diff --git a/chromium/media/mojo/interfaces/audio_input_stream.mojom b/chromium/media/mojo/mojom/audio_input_stream.mojom
index fd5df22c263..f77ac9dde6f 100644
--- a/chromium/media/mojo/interfaces/audio_input_stream.mojom
+++ b/chromium/media/mojo/mojom/audio_input_stream.mojom
@@ -4,8 +4,8 @@
module media.mojom;
-import "media/mojo/interfaces/audio_parameters.mojom";
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/audio_parameters.mojom";
+import "media/mojo/mojom/media_types.mojom";
// An interface for controlling an audio input stream.
// On error, the message pipe is closed.
diff --git a/chromium/media/mojo/interfaces/audio_logging.mojom b/chromium/media/mojo/mojom/audio_logging.mojom
index 777fd31852d..cb1983a38ea 100644
--- a/chromium/media/mojo/interfaces/audio_logging.mojom
+++ b/chromium/media/mojo/mojom/audio_logging.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/audio_parameters.mojom";
+import "media/mojo/mojom/audio_parameters.mojom";
// This interface is implemented by the browser process to log state information
// about an active audio component.
diff --git a/chromium/media/mojo/interfaces/audio_output_stream.mojom b/chromium/media/mojo/mojom/audio_output_stream.mojom
index 1253a928d8e..24f3934c9fc 100644
--- a/chromium/media/mojo/interfaces/audio_output_stream.mojom
+++ b/chromium/media/mojo/mojom/audio_output_stream.mojom
@@ -5,9 +5,9 @@
module media.mojom;
import "mojo/public/mojom/base/unguessable_token.mojom";
-import "media/mojo/interfaces/audio_data_pipe.mojom";
-import "media/mojo/interfaces/audio_parameters.mojom";
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/audio_data_pipe.mojom";
+import "media/mojo/mojom/audio_parameters.mojom";
+import "media/mojo/mojom/media_types.mojom";
// An interface for controlling an audio output stream.
// To close the stream, just close the message pipe.
diff --git a/chromium/media/mojo/interfaces/audio_parameters.mojom b/chromium/media/mojo/mojom/audio_parameters.mojom
index 077bf3e9dbf..077bf3e9dbf 100644
--- a/chromium/media/mojo/interfaces/audio_parameters.mojom
+++ b/chromium/media/mojo/mojom/audio_parameters.mojom
diff --git a/chromium/media/mojo/interfaces/audio_parameters.typemap b/chromium/media/mojo/mojom/audio_parameters.typemap
index c87c524e99c..1d9ef9a007d 100644
--- a/chromium/media/mojo/interfaces/audio_parameters.typemap
+++ b/chromium/media/mojo/mojom/audio_parameters.typemap
@@ -2,11 +2,11 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/audio_parameters.mojom"
+mojom = "//media/mojo/mojom/audio_parameters.mojom"
public_headers = [ "//media/base/audio_parameters.h" ]
traits_headers = [ "//media/base/ipc/media_param_traits.h" ]
deps = [
"//media",
- "//media/base/ipc"
+ "//media/base/ipc",
]
-type_mappings = [ "media.mojom.AudioParameters=media::AudioParameters" ]
+type_mappings = [ "media.mojom.AudioParameters=::media::AudioParameters" ]
diff --git a/chromium/media/mojo/interfaces/cast_application_media_info_manager.mojom b/chromium/media/mojo/mojom/cast_application_media_info_manager.mojom
index 77295ddc069..77295ddc069 100644
--- a/chromium/media/mojo/interfaces/cast_application_media_info_manager.mojom
+++ b/chromium/media/mojo/mojom/cast_application_media_info_manager.mojom
diff --git a/chromium/media/mojo/mojom/cdm_key_information.typemap b/chromium/media/mojo/mojom/cdm_key_information.typemap
new file mode 100644
index 00000000000..21d62224cf3
--- /dev/null
+++ b/chromium/media/mojo/mojom/cdm_key_information.typemap
@@ -0,0 +1,22 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/mojo/mojom/content_decryption_module.mojom"
+
+public_headers = [ "//media/base/cdm_key_information.h" ]
+
+traits_headers = [ "//media/mojo/mojom/cdm_key_information_mojom_traits.h" ]
+
+sources = [
+ "//media/mojo/mojom/cdm_key_information_mojom_traits.cc",
+]
+
+public_deps = [
+ "//media",
+]
+
+type_mappings = [
+ "media.mojom.CdmKeyInformation=::std::unique_ptr<::media::CdmKeyInformation>[move_only]",
+ "media.mojom.CdmKeyStatus=::media::CdmKeyInformation::KeyStatus",
+]
diff --git a/chromium/media/mojo/interfaces/cdm_key_information_mojom_traits.cc b/chromium/media/mojo/mojom/cdm_key_information_mojom_traits.cc
index 7a3ce049495..94a09fef5b2 100644
--- a/chromium/media/mojo/interfaces/cdm_key_information_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/cdm_key_information_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/cdm_key_information_mojom_traits.h"
+#include "media/mojo/mojom/cdm_key_information_mojom_traits.h"
#include "base/logging.h"
diff --git a/chromium/media/mojo/interfaces/cdm_key_information_mojom_traits.h b/chromium/media/mojo/mojom/cdm_key_information_mojom_traits.h
index eb047224e6f..f4a48fa7c18 100644
--- a/chromium/media/mojo/interfaces/cdm_key_information_mojom_traits.h
+++ b/chromium/media/mojo/mojom/cdm_key_information_mojom_traits.h
@@ -2,11 +2,11 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_CDM_KEY_INFORMATION_MOJOM_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_CDM_KEY_INFORMATION_MOJOM_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_CDM_KEY_INFORMATION_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_CDM_KEY_INFORMATION_MOJOM_TRAITS_H_
#include "media/base/cdm_key_information.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
namespace mojo {
@@ -44,4 +44,4 @@ struct StructTraits<media::mojom::CdmKeyInformationDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_CDM_KEY_INFORMATION_MOJOM_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_CDM_KEY_INFORMATION_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/cdm_key_information_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/cdm_key_information_mojom_traits_unittest.cc
index 916b4c713e9..51036815cd2 100644
--- a/chromium/media/mojo/interfaces/cdm_key_information_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/cdm_key_information_mojom_traits_unittest.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/cdm_key_information_mojom_traits.h"
+#include "media/mojo/mojom/cdm_key_information_mojom_traits.h"
#include "media/base/cdm_key_information.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/media/mojo/interfaces/cdm_proxy.mojom b/chromium/media/mojo/mojom/cdm_proxy.mojom
index b9170bb4e15..b9170bb4e15 100644
--- a/chromium/media/mojo/interfaces/cdm_proxy.mojom
+++ b/chromium/media/mojo/mojom/cdm_proxy.mojom
diff --git a/chromium/media/mojo/interfaces/cdm_proxy.typemap b/chromium/media/mojo/mojom/cdm_proxy.typemap
index 9b031100a0f..d2f05392218 100644
--- a/chromium/media/mojo/interfaces/cdm_proxy.typemap
+++ b/chromium/media/mojo/mojom/cdm_proxy.typemap
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/cdm_proxy.mojom"
+mojom = "//media/mojo/mojom/cdm_proxy.mojom"
public_headers = [ "//media/cdm/cdm_proxy.h" ]
@@ -14,8 +14,8 @@ deps = [
]
type_mappings = [
- "media.mojom.CdmProxy.Function=media::CdmProxy::Function",
- "media.mojom.CdmProxy.KeyType=media::CdmProxy::KeyType",
- "media.mojom.CdmProxy.Protocol=media::CdmProxy::Protocol",
- "media.mojom.CdmProxy.Status=media::CdmProxy::Status",
+ "media.mojom.CdmProxy.Function=::media::CdmProxy::Function",
+ "media.mojom.CdmProxy.KeyType=::media::CdmProxy::KeyType",
+ "media.mojom.CdmProxy.Protocol=::media::CdmProxy::Protocol",
+ "media.mojom.CdmProxy.Status=::media::CdmProxy::Status",
]
diff --git a/chromium/media/mojo/interfaces/cdm_service.mojom b/chromium/media/mojo/mojom/cdm_service.mojom
index 8067a565969..5c631ab857c 100644
--- a/chromium/media/mojo/interfaces/cdm_service.mojom
+++ b/chromium/media/mojo/mojom/cdm_service.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/content_decryption_module.mojom";
+import "media/mojo/mojom/content_decryption_module.mojom";
import "mojo/public/mojom/base/file_path.mojom";
import "services/service_manager/public/mojom/interface_provider.mojom";
diff --git a/chromium/media/mojo/interfaces/cdm_storage.mojom b/chromium/media/mojo/mojom/cdm_storage.mojom
index 6277c560cea..0ee20cc7c2e 100644
--- a/chromium/media/mojo/interfaces/cdm_storage.mojom
+++ b/chromium/media/mojo/mojom/cdm_storage.mojom
@@ -30,7 +30,8 @@ interface CdmStorage {
// Provides a way to access the contents of the file opened. When the connection
// to this object is broken, it is assumed that the file has been closed and
-// that no more operations will be performed on it.
+// that no more operations will be performed on it. Only 1 Read() or Write()
+// operation should be in flight at any time.
interface CdmFile {
enum Status {
kSuccess, // Operation succeeded.
diff --git a/chromium/media/mojo/interfaces/constants.mojom b/chromium/media/mojo/mojom/constants.mojom
index d7241013821..d7241013821 100644
--- a/chromium/media/mojo/interfaces/constants.mojom
+++ b/chromium/media/mojo/mojom/constants.mojom
diff --git a/chromium/media/mojo/interfaces/content_decryption_module.mojom b/chromium/media/mojo/mojom/content_decryption_module.mojom
index 61994ad3125..925255e5575 100644
--- a/chromium/media/mojo/interfaces/content_decryption_module.mojom
+++ b/chromium/media/mojo/mojom/content_decryption_module.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/decryptor.mojom";
+import "media/mojo/mojom/decryptor.mojom";
import "url/mojom/origin.mojom";
import "url/mojom/url.mojom";
diff --git a/chromium/media/mojo/interfaces/content_decryption_module.typemap b/chromium/media/mojo/mojom/content_decryption_module.typemap
index 45160097114..fee6331cefb 100644
--- a/chromium/media/mojo/interfaces/content_decryption_module.typemap
+++ b/chromium/media/mojo/mojom/content_decryption_module.typemap
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/content_decryption_module.mojom"
+mojom = "//media/mojo/mojom/content_decryption_module.mojom"
public_headers = [
"//media/base/cdm_config.h",
@@ -20,10 +20,10 @@ deps = [
]
type_mappings = [
- "media.mojom.CdmConfig=media::CdmConfig",
- "media.mojom.CdmPromiseResult.Exception=media::CdmPromise::Exception",
- "media.mojom.CdmSessionType=media::CdmSessionType",
- "media.mojom.CdmMessageType=media::CdmMessageType",
- "media.mojom.EmeInitDataType=media::EmeInitDataType",
- "media.mojom.HdcpVersion=media::HdcpVersion",
+ "media.mojom.CdmConfig=::media::CdmConfig",
+ "media.mojom.CdmPromiseResult.Exception=::media::CdmPromise::Exception",
+ "media.mojom.CdmSessionType=::media::CdmSessionType",
+ "media.mojom.CdmMessageType=::media::CdmMessageType",
+ "media.mojom.EmeInitDataType=::media::EmeInitDataType",
+ "media.mojom.HdcpVersion=::media::HdcpVersion",
]
diff --git a/chromium/media/mojo/interfaces/decryptor.mojom b/chromium/media/mojo/mojom/decryptor.mojom
index 041ca55d342..eec63fa4460 100644
--- a/chromium/media/mojo/interfaces/decryptor.mojom
+++ b/chromium/media/mojo/mojom/decryptor.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
// Interface for decrypting (and decoding) encrypted streams.
// See media/base/decryptor.h for details.
diff --git a/chromium/media/mojo/interfaces/decryptor.typemap b/chromium/media/mojo/mojom/decryptor.typemap
index 0d2977f0071..fa1cc53fc2a 100644
--- a/chromium/media/mojo/interfaces/decryptor.typemap
+++ b/chromium/media/mojo/mojom/decryptor.typemap
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/decryptor.mojom"
+mojom = "//media/mojo/mojom/decryptor.mojom"
public_headers = [ "//media/base/decryptor.h" ]
@@ -14,6 +14,6 @@ deps = [
]
type_mappings = [
- "media.mojom.Decryptor.Status=media::Decryptor::Status",
- "media.mojom.Decryptor.StreamType=media::Decryptor::StreamType",
+ "media.mojom.Decryptor.Status=::media::Decryptor::Status",
+ "media.mojom.Decryptor.StreamType=::media::Decryptor::StreamType",
]
diff --git a/chromium/media/mojo/interfaces/demuxer_stream.mojom b/chromium/media/mojo/mojom/demuxer_stream.mojom
index 3433ab33350..59db5ea85b5 100644
--- a/chromium/media/mojo/interfaces/demuxer_stream.mojom
+++ b/chromium/media/mojo/mojom/demuxer_stream.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
// DemuxerStream is modeled after media::DemuxerStream using mojo in order to
// enable proxying between a media::Pipeline and media::Renderer living in two
diff --git a/chromium/media/mojo/interfaces/demuxer_stream.typemap b/chromium/media/mojo/mojom/demuxer_stream.typemap
index 10cc17d6d1f..8bb732d0d7b 100644
--- a/chromium/media/mojo/interfaces/demuxer_stream.typemap
+++ b/chromium/media/mojo/mojom/demuxer_stream.typemap
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/demuxer_stream.mojom"
+mojom = "//media/mojo/mojom/demuxer_stream.mojom"
public_headers = [ "//media/base/demuxer_stream.h" ]
@@ -14,6 +14,6 @@ deps = [
]
type_mappings = [
- "media.mojom.DemuxerStream.Status=media::DemuxerStream::Status",
- "media.mojom.DemuxerStream.Type=media::DemuxerStream::Type",
+ "media.mojom.DemuxerStream.Status=::media::DemuxerStream::Status",
+ "media.mojom.DemuxerStream.Type=::media::DemuxerStream::Type",
]
diff --git a/chromium/media/mojo/interfaces/display_media_information.mojom b/chromium/media/mojo/mojom/display_media_information.mojom
index ad441305339..ad441305339 100644
--- a/chromium/media/mojo/interfaces/display_media_information.mojom
+++ b/chromium/media/mojo/mojom/display_media_information.mojom
diff --git a/chromium/media/mojo/interfaces/encryption_scheme.typemap b/chromium/media/mojo/mojom/encryption_scheme.typemap
index 5a77e92fbe4..f942f837dcd 100644
--- a/chromium/media/mojo/interfaces/encryption_scheme.typemap
+++ b/chromium/media/mojo/mojom/encryption_scheme.typemap
@@ -2,17 +2,17 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/media_types.mojom"
+mojom = "//media/mojo/mojom/media_types.mojom"
public_headers = [
"//media/base/encryption_scheme.h",
"//media/base/encryption_pattern.h",
]
-traits_headers = [ "//media/mojo/interfaces/encryption_scheme_struct_traits.h" ]
+traits_headers = [ "//media/mojo/mojom/encryption_scheme_mojom_traits.h" ]
sources = [
- "//media/mojo/interfaces/encryption_scheme_struct_traits.cc",
+ "//media/mojo/mojom/encryption_scheme_mojom_traits.cc",
]
public_deps = [
@@ -25,6 +25,6 @@ deps = [
# See media_types.typemap for enum mappings.
type_mappings = [
- "media.mojom.EncryptionPattern=media::EncryptionPattern",
- "media.mojom.EncryptionScheme=media::EncryptionScheme",
+ "media.mojom.EncryptionPattern=::media::EncryptionPattern",
+ "media.mojom.EncryptionScheme=::media::EncryptionScheme",
]
diff --git a/chromium/media/mojo/interfaces/encryption_scheme_struct_traits.cc b/chromium/media/mojo/mojom/encryption_scheme_mojom_traits.cc
index fb27d49136d..a3792694135 100644
--- a/chromium/media/mojo/interfaces/encryption_scheme_struct_traits.cc
+++ b/chromium/media/mojo/mojom/encryption_scheme_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/encryption_scheme_struct_traits.h"
+#include "media/mojo/mojom/encryption_scheme_mojom_traits.h"
namespace mojo {
diff --git a/chromium/media/mojo/interfaces/encryption_scheme_struct_traits.h b/chromium/media/mojo/mojom/encryption_scheme_mojom_traits.h
index 58de9352da9..c5166e566f8 100644
--- a/chromium/media/mojo/interfaces/encryption_scheme_struct_traits.h
+++ b/chromium/media/mojo/mojom/encryption_scheme_mojom_traits.h
@@ -2,13 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_ENCRYPTION_SCHEME_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_ENCRYPTION_SCHEME_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_ENCRYPTION_SCHEME_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_ENCRYPTION_SCHEME_MOJOM_TRAITS_H_
#include "media/base/encryption_pattern.h"
#include "media/base/encryption_scheme.h"
#include "media/base/ipc/media_param_traits.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace mojo {
@@ -46,4 +46,4 @@ struct StructTraits<media::mojom::EncryptionSchemeDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_ENCRYPTION_SCHEME_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_ENCRYPTION_SCHEME_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/encryption_scheme_struct_traits_unittest.cc b/chromium/media/mojo/mojom/encryption_scheme_mojom_traits_unittest.cc
index 7d3e649ade2..99cce0780be 100644
--- a/chromium/media/mojo/interfaces/encryption_scheme_struct_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/encryption_scheme_mojom_traits_unittest.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/encryption_scheme_struct_traits.h"
+#include "media/mojo/mojom/encryption_scheme_mojom_traits.h"
#include <utility>
diff --git a/chromium/media/mojo/mojom/hdr_metadata.typemap b/chromium/media/mojo/mojom/hdr_metadata.typemap
new file mode 100644
index 00000000000..15a921174cb
--- /dev/null
+++ b/chromium/media/mojo/mojom/hdr_metadata.typemap
@@ -0,0 +1,11 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/mojo/mojom/media_types.mojom"
+public_headers = [ "//media/base/hdr_metadata.h" ]
+traits_headers = [ "//media/mojo/mojom/hdr_metadata_mojom_traits.h" ]
+type_mappings = [
+ "media.mojom.MasteringMetadata=::media::MasteringMetadata",
+ "media.mojom.HDRMetadata=::media::HDRMetadata",
+]
diff --git a/chromium/media/mojo/interfaces/hdr_metadata_struct_traits.h b/chromium/media/mojo/mojom/hdr_metadata_mojom_traits.h
index 6ca3810ea49..8021460342c 100644
--- a/chromium/media/mojo/interfaces/hdr_metadata_struct_traits.h
+++ b/chromium/media/mojo/mojom/hdr_metadata_mojom_traits.h
@@ -2,11 +2,11 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_HDR_METADATA_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_HDR_METADATA_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_HDR_METADATA_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_HDR_METADATA_MOJOM_TRAITS_H_
#include "media/base/hdr_metadata.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace mojo {
@@ -75,4 +75,4 @@ struct StructTraits<media::mojom::HDRMetadataDataView, media::HDRMetadata> {
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_HDR_METADATA_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_HDR_METADATA_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/interface_factory.mojom b/chromium/media/mojo/mojom/interface_factory.mojom
index 38b42f10614..c9bdabe4044 100644
--- a/chromium/media/mojo/interfaces/interface_factory.mojom
+++ b/chromium/media/mojo/mojom/interface_factory.mojom
@@ -4,13 +4,13 @@
module media.mojom;
-import "media/mojo/interfaces/audio_decoder.mojom";
-import "media/mojo/interfaces/cdm_proxy.mojom";
-import "media/mojo/interfaces/decryptor.mojom";
-import "media/mojo/interfaces/content_decryption_module.mojom";
-import "media/mojo/interfaces/renderer.mojom";
-import "media/mojo/interfaces/renderer_extensions.mojom";
-import "media/mojo/interfaces/video_decoder.mojom";
+import "media/mojo/mojom/audio_decoder.mojom";
+import "media/mojo/mojom/cdm_proxy.mojom";
+import "media/mojo/mojom/decryptor.mojom";
+import "media/mojo/mojom/content_decryption_module.mojom";
+import "media/mojo/mojom/renderer.mojom";
+import "media/mojo/mojom/renderer_extensions.mojom";
+import "media/mojo/mojom/video_decoder.mojom";
import "mojo/public/mojom/base/token.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
diff --git a/chromium/media/mojo/interfaces/key_system_support.mojom b/chromium/media/mojo/mojom/key_system_support.mojom
index bf40634e7cd..99a807d57dd 100644
--- a/chromium/media/mojo/interfaces/key_system_support.mojom
+++ b/chromium/media/mojo/mojom/key_system_support.mojom
@@ -4,8 +4,8 @@
module media.mojom;
-import "media/mojo/interfaces/content_decryption_module.mojom";
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/content_decryption_module.mojom";
+import "media/mojo/mojom/media_types.mojom";
// TODO(xhwang): Use "set" instead of "array" if supported by mojom.
// TODO(crbug.com/796725) Find a way to include profiles and levels for
diff --git a/chromium/media/mojo/interfaces/media_drm_storage.mojom b/chromium/media/mojo/mojom/media_drm_storage.mojom
index ed9c7ca2b2a..ed9c7ca2b2a 100644
--- a/chromium/media/mojo/interfaces/media_drm_storage.mojom
+++ b/chromium/media/mojo/mojom/media_drm_storage.mojom
diff --git a/chromium/media/mojo/interfaces/media_drm_storage.typemap b/chromium/media/mojo/mojom/media_drm_storage.typemap
index e47e2e71ad9..26daeb1dfa3 100644
--- a/chromium/media/mojo/interfaces/media_drm_storage.typemap
+++ b/chromium/media/mojo/mojom/media_drm_storage.typemap
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/media_drm_storage.mojom"
+mojom = "//media/mojo/mojom/media_drm_storage.mojom"
os_whitelist = [ "android" ]
@@ -14,4 +14,4 @@ deps = [
"//media",
]
-type_mappings = [ "media.mojom.SessionData.KeyType=media::MediaDrmKeyType" ]
+type_mappings = [ "media.mojom.SessionData.KeyType=::media::MediaDrmKeyType" ]
diff --git a/chromium/media/mojo/interfaces/media_log.mojom b/chromium/media/mojo/mojom/media_log.mojom
index 05f3b64c980..20060936939 100644
--- a/chromium/media/mojo/interfaces/media_log.mojom
+++ b/chromium/media/mojo/mojom/media_log.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
interface MediaLog {
AddEvent(MediaLogEvent event);
diff --git a/chromium/media/mojo/interfaces/media_metrics_provider.mojom b/chromium/media/mojo/mojom/media_metrics_provider.mojom
index 9fccba4e4f7..9817ada84d4 100644
--- a/chromium/media/mojo/interfaces/media_metrics_provider.mojom
+++ b/chromium/media/mojo/mojom/media_metrics_provider.mojom
@@ -4,11 +4,12 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
-import "media/mojo/interfaces/video_decode_stats_recorder.mojom";
-import "media/mojo/interfaces/watch_time_recorder.mojom";
+import "media/learning/mojo/public/mojom/learning_task_controller.mojom";
+import "media/mojo/mojom/media_types.mojom";
+import "media/mojo/mojom/video_decode_stats_recorder.mojom";
+import "media/mojo/mojom/watch_time_recorder.mojom";
import "mojo/public/mojom/base/time.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
// Used for reporting to UMA and UKM. Represents the URL scheme of the src URL
// given to an <audio> or <video> tag. Always add new entries to the end.
@@ -61,6 +62,10 @@ interface MediaMetricsProvider {
// Creates a VideoDecodeStatsRecorder instance.
AcquireVideoDecodeStatsRecorder(VideoDecodeStatsRecorder& recorder);
+ // Returns a LearningTaskController for the given |taskName|.
+ AcquireLearningTaskController(
+ string taskName, media.learning.mojom.LearningTaskController& controller);
+
// Reports that bytes have been received by the media player.
AddBytesReceived(uint64 bytes_received);
diff --git a/chromium/media/mojo/interfaces/media_service.mojom b/chromium/media/mojo/mojom/media_service.mojom
index 7e4c2b6b558..89de3b21f8a 100644
--- a/chromium/media/mojo/interfaces/media_service.mojom
+++ b/chromium/media/mojo/mojom/media_service.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/interface_factory.mojom";
+import "media/mojo/mojom/interface_factory.mojom";
import "services/service_manager/public/mojom/interface_provider.mojom";
// A service to provide media InterfaceFactory, typically to the media pipeline
diff --git a/chromium/media/mojo/interfaces/media_types.mojom b/chromium/media/mojo/mojom/media_types.mojom
index fd4870cb4d4..fee3497a98b 100644
--- a/chromium/media/mojo/interfaces/media_types.mojom
+++ b/chromium/media/mojo/mojom/media_types.mojom
@@ -8,8 +8,8 @@ import "gpu/ipc/common/mailbox_holder.mojom";
import "gpu/ipc/common/vulkan_ycbcr_info.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/values.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
-import "ui/gfx/mojo/color_space.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
+import "ui/gfx/mojom/color_space.mojom";
// See media/base/audio_codecs.h for descriptions.
[Native]
diff --git a/chromium/media/mojo/mojom/media_types.typemap b/chromium/media/mojo/mojom/media_types.typemap
new file mode 100644
index 00000000000..02aaa323e58
--- /dev/null
+++ b/chromium/media/mojo/mojom/media_types.typemap
@@ -0,0 +1,69 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/mojo/mojom/media_types.mojom"
+
+public_headers = [
+ "//media/base/audio_codecs.h",
+ "//media/base/buffering_state.h",
+ "//media/base/channel_layout.h",
+ "//media/base/container_names.h",
+ "//media/base/decode_status.h",
+ "//media/base/decrypt_config.h",
+ "//media/base/encryption_pattern.h",
+ "//media/base/encryption_scheme.h",
+ "//media/base/hdr_metadata.h",
+ "//media/base/media_log_event.h",
+ "//media/base/media_status.h",
+ "//media/base/output_device_info.h",
+ "//media/base/pipeline_status.h",
+ "//media/base/sample_format.h",
+ "//media/base/subsample_entry.h",
+ "//media/base/video_codecs.h",
+ "//media/base/video_transformation.h",
+ "//media/base/video_types.h",
+ "//media/base/waiting.h",
+ "//media/base/watch_time_keys.h",
+]
+
+traits_headers = [
+ "//media/base/ipc/media_param_traits_macros.h",
+ "//media/mojo/mojom/video_transformation_mojom_traits.h",
+]
+
+public_deps = [
+ "//media",
+ "//media/base/ipc",
+]
+
+sources = [
+ "//media/mojo/mojom/video_transformation_mojom_traits.cc",
+ "//media/mojo/mojom/video_transformation_mojom_traits.h",
+]
+
+type_mappings = [
+ "media.mojom.AudioCodec=::media::AudioCodec",
+ "media.mojom.BufferingState=::media::BufferingState",
+ "media.mojom.BufferingStateChangeReason=::media::BufferingStateChangeReason",
+ "media.mojom.ChannelLayout=::media::ChannelLayout",
+ "media.mojom.ColorSpace=::media::ColorSpace",
+ "media.mojom.DecodeStatus=::media::DecodeStatus",
+ "media.mojom.EncryptionMode=::media::EncryptionMode",
+ "media.mojom.EncryptionScheme.CipherMode=::media::EncryptionScheme::CipherMode",
+ "media.mojom.MediaContainerName=::media::container_names::MediaContainerName",
+ "media.mojom.MediaLogEvent=::media::MediaLogEvent",
+ "media.mojom.OutputDeviceStatus=::media::OutputDeviceStatus",
+ "media.mojom.PipelineStatus=::media::PipelineStatus",
+ "media.mojom.SampleFormat=::media::SampleFormat",
+ "media.mojom.SubsampleEntry=::media::SubsampleEntry",
+ "media.mojom.VideoCodec=::media::VideoCodec",
+ "media.mojom.VideoCodecProfile=::media::VideoCodecProfile",
+ "media.mojom.VideoPixelFormat=::media::VideoPixelFormat",
+ "media.mojom.VideoRotation=::media::VideoRotation",
+ "media.mojom.VideoTransformation=::media::VideoTransformation",
+ "media.mojom.WaitingReason=::media::WaitingReason",
+ "media.mojom.WatchTimeKey=::media::WatchTimeKey",
+ "media.mojom.EncryptionPattern=::media::EncryptionPattern",
+ "media.mojom.MediaStatusState=::media::MediaStatus::State",
+]
diff --git a/chromium/media/mojo/interfaces/mirror_service_remoting.mojom b/chromium/media/mojo/mojom/mirror_service_remoting.mojom
index c50fe0a2590..a5f41efcdd4 100644
--- a/chromium/media/mojo/interfaces/mirror_service_remoting.mojom
+++ b/chromium/media/mojo/mojom/mirror_service_remoting.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/remoting_common.mojom";
+import "media/mojo/mojom/remoting_common.mojom";
// Interface used by the source to start/stop remoting and send data to the
// sink.
diff --git a/chromium/media/mojo/interfaces/output_protection.mojom b/chromium/media/mojo/mojom/output_protection.mojom
index 87dc10d37bf..87dc10d37bf 100644
--- a/chromium/media/mojo/interfaces/output_protection.mojom
+++ b/chromium/media/mojo/mojom/output_protection.mojom
diff --git a/chromium/media/mojo/mojom/pipeline_status.typemap b/chromium/media/mojo/mojom/pipeline_status.typemap
new file mode 100644
index 00000000000..be73386b42f
--- /dev/null
+++ b/chromium/media/mojo/mojom/pipeline_status.typemap
@@ -0,0 +1,11 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/mojo/mojom/media_types.mojom"
+public_headers = [ "//media/base/pipeline_status.h" ]
+traits_headers = [ "//media/mojo/mojom/pipeline_status_mojom_traits.h" ]
+type_mappings = [
+ "media.mojom.PipelineStatistics=::media::PipelineStatistics",
+ "media.mojom.PipelineDecoderInfo=::media::PipelineDecoderInfo",
+]
diff --git a/chromium/media/mojo/interfaces/pipeline_status_struct_traits.h b/chromium/media/mojo/mojom/pipeline_status_mojom_traits.h
index f5a0d69cc76..12be78a41fe 100644
--- a/chromium/media/mojo/interfaces/pipeline_status_struct_traits.h
+++ b/chromium/media/mojo/mojom/pipeline_status_mojom_traits.h
@@ -2,13 +2,13 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_PIPELINE_STATUS_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_PIPELINE_STATUS_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_PIPELINE_STATUS_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_PIPELINE_STATUS_MOJOM_TRAITS_H_
#include <string>
#include "media/base/pipeline_status.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace mojo {
@@ -72,4 +72,4 @@ struct StructTraits<media::mojom::PipelineDecoderInfoDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_PIPELINE_STATUS_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_PIPELINE_STATUS_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/platform_verification.mojom b/chromium/media/mojo/mojom/platform_verification.mojom
index 550ade050dd..550ade050dd 100644
--- a/chromium/media/mojo/interfaces/platform_verification.mojom
+++ b/chromium/media/mojo/mojom/platform_verification.mojom
diff --git a/chromium/media/mojo/interfaces/provision_fetcher.mojom b/chromium/media/mojo/mojom/provision_fetcher.mojom
index 74d6b85be92..74d6b85be92 100644
--- a/chromium/media/mojo/interfaces/provision_fetcher.mojom
+++ b/chromium/media/mojo/mojom/provision_fetcher.mojom
diff --git a/chromium/media/mojo/interfaces/remoting.mojom b/chromium/media/mojo/mojom/remoting.mojom
index fd36afc7c14..8c4227bf63d 100644
--- a/chromium/media/mojo/interfaces/remoting.mojom
+++ b/chromium/media/mojo/mojom/remoting.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/remoting_common.mojom";
+import "media/mojo/mojom/remoting_common.mojom";
interface RemoterFactory {
// Create a new Remoter associated with the given RemotingSource and bind it
diff --git a/chromium/media/mojo/interfaces/remoting_common.mojom b/chromium/media/mojo/mojom/remoting_common.mojom
index dd4f5b31619..dd4f5b31619 100644
--- a/chromium/media/mojo/interfaces/remoting_common.mojom
+++ b/chromium/media/mojo/mojom/remoting_common.mojom
diff --git a/chromium/media/mojo/interfaces/renderer.mojom b/chromium/media/mojo/mojom/renderer.mojom
index 2c180128158..c63bc1ad919 100644
--- a/chromium/media/mojo/interfaces/renderer.mojom
+++ b/chromium/media/mojo/mojom/renderer.mojom
@@ -4,17 +4,19 @@
module media.mojom;
-import "media/mojo/interfaces/demuxer_stream.mojom";
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/demuxer_stream.mojom";
+import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
import "url/mojom/url.mojom";
+import "url/mojom/origin.mojom";
// See media/base/media_url_params.h for descriptions.
struct MediaUrlParams {
url.mojom.Url media_url;
url.mojom.Url site_for_cookies;
+ url.mojom.Origin top_frame_origin;
bool allow_credentials;
bool is_hls;
};
diff --git a/chromium/media/mojo/interfaces/renderer_extensions.mojom b/chromium/media/mojo/mojom/renderer_extensions.mojom
index 595f0b1950e..2bd84f47308 100644
--- a/chromium/media/mojo/interfaces/renderer_extensions.mojom
+++ b/chromium/media/mojo/mojom/renderer_extensions.mojom
@@ -4,10 +4,10 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/time.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
// Extension of the mojo::RendererClient communication layer for HLS and Android
// software rendering fallback paths.
diff --git a/chromium/media/mojo/interfaces/supported_video_decoder_config_struct_traits.cc b/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.cc
index 812d86a63cc..95651813ee4 100644
--- a/chromium/media/mojo/interfaces/supported_video_decoder_config_struct_traits.cc
+++ b/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/supported_video_decoder_config_struct_traits.h"
+#include "media/mojo/mojom/supported_video_decoder_config_mojom_traits.h"
namespace mojo {
diff --git a/chromium/media/mojo/interfaces/supported_video_decoder_config_struct_traits.h b/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h
index 875615759f4..5ef4800dc37 100644
--- a/chromium/media/mojo/interfaces/supported_video_decoder_config_struct_traits.h
+++ b/chromium/media/mojo/mojom/supported_video_decoder_config_mojom_traits.h
@@ -2,14 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_SUPPORTED_VIDEO_DECODER_CONFIG_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_SUPPORTED_VIDEO_DECODER_CONFIG_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_SUPPORTED_VIDEO_DECODER_CONFIG_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_SUPPORTED_VIDEO_DECODER_CONFIG_MOJOM_TRAITS_H_
#include "media/base/ipc/media_param_traits.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
-#include "media/mojo/interfaces/video_decoder.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
+#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/video/supported_video_decoder_config.h"
-#include "ui/gfx/geometry/mojo/geometry_struct_traits.h"
+#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
namespace mojo {
@@ -51,4 +51,4 @@ struct StructTraits<media::mojom::SupportedVideoDecoderConfigDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_SUPPORTED_VIDEO_DECODER_CONFIG_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_SUPPORTED_VIDEO_DECODER_CONFIG_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/traits_test_service.mojom b/chromium/media/mojo/mojom/traits_test_service.mojom
index 2d58fa2e8bb..734581a6220 100644
--- a/chromium/media/mojo/interfaces/traits_test_service.mojom
+++ b/chromium/media/mojo/mojom/traits_test_service.mojom
@@ -4,7 +4,7 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
interface TraitsTestService {
[Sync]
diff --git a/chromium/media/mojo/mojom/typemaps.gni b/chromium/media/mojo/mojom/typemaps.gni
new file mode 100644
index 00000000000..dd991288068
--- /dev/null
+++ b/chromium/media/mojo/mojom/typemaps.gni
@@ -0,0 +1,23 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+typemaps = [
+ "//media/mojo/mojom/audio_decoder_config.typemap",
+ "//media/mojo/mojom/audio_parameters.typemap",
+ "//media/mojo/mojom/cdm_key_information.typemap",
+ "//media/mojo/mojom/cdm_proxy.typemap",
+ "//media/mojo/mojom/content_decryption_module.typemap",
+ "//media/mojo/mojom/decryptor.typemap",
+ "//media/mojo/mojom/demuxer_stream.typemap",
+ "//media/mojo/mojom/encryption_scheme.typemap",
+ "//media/mojo/mojom/hdr_metadata.typemap",
+ "//media/mojo/mojom/media_drm_storage.typemap",
+ "//media/mojo/mojom/media_types.typemap",
+ "//media/mojo/mojom/pipeline_status.typemap",
+ "//media/mojo/mojom/video_color_space.typemap",
+ "//media/mojo/mojom/video_decoder.typemap",
+ "//media/mojo/mojom/video_decoder_config.typemap",
+ "//media/mojo/mojom/video_encode_accelerator.typemap",
+ "//media/mojo/mojom/video_frame.typemap",
+]
diff --git a/chromium/media/mojo/mojom/video_color_space.typemap b/chromium/media/mojo/mojom/video_color_space.typemap
new file mode 100644
index 00000000000..12be2cb1cbf
--- /dev/null
+++ b/chromium/media/mojo/mojom/video_color_space.typemap
@@ -0,0 +1,17 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/mojo/mojom/media_types.mojom"
+public_headers = [
+ "//media/base/video_color_space.h",
+ "//ui/gfx/color_space.h",
+]
+traits_headers = [ "//media/mojo/mojom/video_color_space_mojom_traits.h" ]
+type_mappings = [
+ "media.mojom.VideoColorSpace.PrimaryID=::media::VideoColorSpace::PrimaryID",
+ "media.mojom.VideoColorSpace.TransferID=::media::VideoColorSpace::TransferID",
+ "media.mojom.VideoColorSpace.MatrixID=::media::VideoColorSpace::MatrixID",
+ "media.mojom.VideoColorSpace.RangeID=::gfx::ColorSpace::RangeID",
+ "media.mojom.VideoColorSpace=::media::VideoColorSpace",
+]
diff --git a/chromium/media/mojo/interfaces/video_color_space_struct_traits.h b/chromium/media/mojo/mojom/video_color_space_mojom_traits.h
index 6e6f7321407..dde2e551ea6 100644
--- a/chromium/media/mojo/interfaces/video_color_space_struct_traits.h
+++ b/chromium/media/mojo/mojom/video_color_space_mojom_traits.h
@@ -2,11 +2,11 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_VIDEO_COLOR_SPACE_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_VIDEO_COLOR_SPACE_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_VIDEO_COLOR_SPACE_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_VIDEO_COLOR_SPACE_MOJOM_TRAITS_H_
#include "media/base/video_color_space.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace mojo {
@@ -44,4 +44,4 @@ struct StructTraits<media::mojom::VideoColorSpaceDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_VIDEO_COLOR_SPACE_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_VIDEO_COLOR_SPACE_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/video_decode_perf_history.mojom b/chromium/media/mojo/mojom/video_decode_perf_history.mojom
index 7dffaaf4aa3..cd66b26bdb5 100644
--- a/chromium/media/mojo/interfaces/video_decode_perf_history.mojom
+++ b/chromium/media/mojo/mojom/video_decode_perf_history.mojom
@@ -4,8 +4,8 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "media/mojo/mojom/media_types.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
// This service will query the history of playback stats to evaluate how
// a video stream with the given configuration will perform.
diff --git a/chromium/media/mojo/interfaces/video_decode_stats_recorder.mojom b/chromium/media/mojo/mojom/video_decode_stats_recorder.mojom
index cfdb876e64b..d725cfb1e89 100644
--- a/chromium/media/mojo/interfaces/video_decode_stats_recorder.mojom
+++ b/chromium/media/mojo/mojom/video_decode_stats_recorder.mojom
@@ -4,8 +4,8 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "media/mojo/mojom/media_types.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
import "url/mojom/origin.mojom";
// Interface for media players in the renderer to send decode stats to the
diff --git a/chromium/media/mojo/interfaces/video_decoder.mojom b/chromium/media/mojo/mojom/video_decoder.mojom
index caf61179ef7..12b0e596079 100644
--- a/chromium/media/mojo/interfaces/video_decoder.mojom
+++ b/chromium/media/mojo/mojom/video_decoder.mojom
@@ -5,11 +5,11 @@
module media.mojom;
import "gpu/ipc/common/sync_token.mojom";
-import "media/mojo/interfaces/media_log.mojom";
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_log.mojom";
+import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/unguessable_token.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
-import "ui/gfx/mojo/color_space.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
+import "ui/gfx/mojom/color_space.mojom";
// Serializable rule for matching VideoDecoderConfigs.
struct SupportedVideoDecoderConfig {
diff --git a/chromium/media/mojo/mojom/video_decoder.typemap b/chromium/media/mojo/mojom/video_decoder.typemap
new file mode 100644
index 00000000000..2ffd19d5b68
--- /dev/null
+++ b/chromium/media/mojo/mojom/video_decoder.typemap
@@ -0,0 +1,30 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/mojo/mojom/video_decoder.mojom"
+
+public_headers = [
+ "//media/base/overlay_info.h",
+ "//media/video/supported_video_decoder_config.h",
+]
+
+traits_headers = [
+ "//media/base/ipc/media_param_traits_macros.h",
+ "//media/mojo/mojom/supported_video_decoder_config_mojom_traits.h",
+]
+
+sources = [
+ "supported_video_decoder_config_mojom_traits.cc",
+ "supported_video_decoder_config_mojom_traits.h",
+]
+
+deps = [
+ "//media/gpu/ipc/common",
+]
+
+type_mappings = [
+ "media.mojom.OverlayInfo=::media::OverlayInfo",
+ "media.mojom.SupportedVideoDecoderConfig=::media::SupportedVideoDecoderConfig",
+ "media.mojom.VideoDecoderImplementation=::media::VideoDecoderImplementation",
+]
diff --git a/chromium/media/mojo/interfaces/video_decoder_config.typemap b/chromium/media/mojo/mojom/video_decoder_config.typemap
index 41fbc5b2e25..e56c4b43a06 100644
--- a/chromium/media/mojo/interfaces/video_decoder_config.typemap
+++ b/chromium/media/mojo/mojom/video_decoder_config.typemap
@@ -2,15 +2,14 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/media_types.mojom"
+mojom = "//media/mojo/mojom/media_types.mojom"
public_headers = [ "//media/base/video_decoder_config.h" ]
-traits_headers =
- [ "//media/mojo/interfaces/video_decoder_config_struct_traits.h" ]
+traits_headers = [ "//media/mojo/mojom/video_decoder_config_mojom_traits.h" ]
sources = [
- "video_decoder_config_struct_traits.cc",
+ "video_decoder_config_mojom_traits.cc",
]
public_deps = [
@@ -20,8 +19,8 @@ public_deps = [
deps = [
"//media/base/ipc",
- "//ui/gfx/geometry/mojo:struct_traits",
+ "//ui/gfx/geometry/mojom:mojom_traits",
]
# See media_types.typemap for enum mappings.
-type_mappings = [ "media.mojom.VideoDecoderConfig=media::VideoDecoderConfig" ]
+type_mappings = [ "media.mojom.VideoDecoderConfig=::media::VideoDecoderConfig" ]
diff --git a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc
index 0b9f4c624fa..beda61c6b94 100644
--- a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.cc
+++ b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/video_decoder_config_struct_traits.h"
+#include "media/mojo/mojom/video_decoder_config_mojom_traits.h"
namespace mojo {
diff --git a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.h b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h
index e198f1f2a19..05fffca8499 100644
--- a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits.h
+++ b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h
@@ -2,17 +2,17 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_VIDEO_DECODER_CONFIG_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_VIDEO_DECODER_CONFIG_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_VIDEO_DECODER_CONFIG_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_VIDEO_DECODER_CONFIG_MOJOM_TRAITS_H_
#include "media/base/ipc/media_param_traits.h"
#include "media/base/video_decoder_config.h"
-#include "media/mojo/interfaces/encryption_scheme_struct_traits.h"
-#include "media/mojo/interfaces/hdr_metadata_struct_traits.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
-#include "media/mojo/interfaces/video_color_space_struct_traits.h"
-#include "media/mojo/interfaces/video_transformation_mojom_traits.h"
-#include "ui/gfx/geometry/mojo/geometry_struct_traits.h"
+#include "media/mojo/mojom/encryption_scheme_mojom_traits.h"
+#include "media/mojo/mojom/hdr_metadata_mojom_traits.h"
+#include "media/mojo/mojom/media_types.mojom.h"
+#include "media/mojo/mojom/video_color_space_mojom_traits.h"
+#include "media/mojo/mojom/video_transformation_mojom_traits.h"
+#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
namespace mojo {
@@ -76,4 +76,4 @@ struct StructTraits<media::mojom::VideoDecoderConfigDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_VIDEO_DECODER_CONFIG_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_VIDEO_DECODER_CONFIG_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits_unittest.cc b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
index 1ee3d5dc02d..002306a8ef7 100644
--- a/chromium/media/mojo/interfaces/video_decoder_config_struct_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/video_decoder_config_struct_traits.h"
+#include "media/mojo/mojom/video_decoder_config_mojom_traits.h"
#include <utility>
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator.mojom b/chromium/media/mojo/mojom/video_encode_accelerator.mojom
index 44602be9fcc..6aad693e81f 100644
--- a/chromium/media/mojo/interfaces/video_encode_accelerator.mojom
+++ b/chromium/media/mojo/mojom/video_encode_accelerator.mojom
@@ -4,9 +4,9 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/time.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
// This file is the Mojo version of the media::VideoEncodeAccelerator interface
// and describes the communication between a Client and a remote "service"
diff --git a/chromium/media/mojo/mojom/video_encode_accelerator.typemap b/chromium/media/mojo/mojom/video_encode_accelerator.typemap
new file mode 100644
index 00000000000..671239290fd
--- /dev/null
+++ b/chromium/media/mojo/mojom/video_encode_accelerator.typemap
@@ -0,0 +1,35 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mojom = "//media/mojo/mojom/video_encode_accelerator.mojom"
+
+public_headers = [ "//media/video/video_encode_accelerator.h" ]
+
+traits_headers =
+ [ "//media/mojo/mojom/video_encode_accelerator_mojom_traits.h" ]
+
+sources = [
+ "//media/mojo/mojom/video_encode_accelerator_mojom_traits.cc",
+ "//media/mojo/mojom/video_encode_accelerator_mojom_traits.h",
+]
+
+public_deps = [
+ "//base",
+ "//media",
+]
+
+deps = [
+ "//base",
+ "//media",
+ "//media/base/ipc",
+ "//ui/gfx/geometry/mojom:mojom_traits",
+]
+
+type_mappings = [
+ "media.mojom.BitstreamBufferMetadata=::media::BitstreamBufferMetadata",
+ "media.mojom.VideoBitrateAllocation=::media::VideoBitrateAllocation",
+ "media.mojom.VideoEncodeAccelerator.Error=::media::VideoEncodeAccelerator::Error",
+ "media.mojom.VideoEncodeAcceleratorConfig=::media::VideoEncodeAccelerator::Config",
+ "media.mojom.Vp8Metadata=::media::Vp8Metadata",
+]
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator_mojom_traits.cc b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
index efe9ef6be8d..6e63765ae19 100644
--- a/chromium/media/mojo/interfaces/video_encode_accelerator_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/video_encode_accelerator_mojom_traits.h"
+#include "media/mojo/mojom/video_encode_accelerator_mojom_traits.h"
#include "base/logging.h"
#include "base/optional.h"
diff --git a/chromium/media/mojo/interfaces/video_encode_accelerator_mojom_traits.h b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h
index 5787ad81b43..538730bc7ab 100644
--- a/chromium/media/mojo/interfaces/video_encode_accelerator_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_encode_accelerator_mojom_traits.h
@@ -2,14 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_VIDEO_ENCODE_ACCELERATOR_MOJOM_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_VIDEO_ENCODE_ACCELERATOR_MOJOM_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_VIDEO_ENCODE_ACCELERATOR_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_VIDEO_ENCODE_ACCELERATOR_MOJOM_TRAITS_H_
#include "media/base/ipc/media_param_traits.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
-#include "media/mojo/interfaces/video_encode_accelerator.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
+#include "media/mojo/mojom/video_encode_accelerator.mojom.h"
#include "media/video/video_encode_accelerator.h"
-#include "ui/gfx/geometry/mojo/geometry_struct_traits.h"
+#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
namespace mojo {
@@ -172,4 +172,4 @@ struct StructTraits<media::mojom::VideoEncodeAcceleratorConfigDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_VIDEO_ENCODE_ACCELERATOR_MOJOM_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_VIDEO_ENCODE_ACCELERATOR_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/video_frame.typemap b/chromium/media/mojo/mojom/video_frame.typemap
index 7b9c9850a13..36b853ae23c 100644
--- a/chromium/media/mojo/interfaces/video_frame.typemap
+++ b/chromium/media/mojo/mojom/video_frame.typemap
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-mojom = "//media/mojo/interfaces/media_types.mojom"
+mojom = "//media/mojo/mojom/media_types.mojom"
# Additional headers required by any code which would depend on the mojom
# definition of media.mojom.VideoFrame now that the typemap is applied. Any
@@ -14,10 +14,10 @@ public_headers = [
# Headers which contain the relevant StructTraits specialization(s) for any
# type mappings described by this file.
-traits_headers = [ "//media/mojo/interfaces/video_frame_struct_traits.h" ]
+traits_headers = [ "//media/mojo/mojom/video_frame_mojom_traits.h" ]
sources = [
- "video_frame_struct_traits.cc",
+ "video_frame_mojom_traits.cc",
]
# Target dependencies exposed by the public_headers and traits_headers.
@@ -27,10 +27,10 @@ public_deps = [
]
deps = [
- "//gpu/ipc/common:struct_traits",
+ "//gpu/ipc/common:mojom_traits",
"//media/base/ipc",
"//media/mojo/common:mojo_shared_buffer_video_frame",
- "//ui/gfx/geometry/mojo:struct_traits",
+ "//ui/gfx/geometry/mojom:mojom_traits",
]
-type_mappings = [ "media.mojom.VideoFrame=scoped_refptr<media::VideoFrame>[nullable_is_same_type]" ]
+type_mappings = [ "media.mojom.VideoFrame=::scoped_refptr<::media::VideoFrame>[nullable_is_same_type]" ]
diff --git a/chromium/media/mojo/interfaces/video_frame_struct_traits.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
index b5917d1c9f7..46995e0d419 100644
--- a/chromium/media/mojo/interfaces/video_frame_struct_traits.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/video_frame_struct_traits.h"
+#include "media/mojo/mojom/video_frame_mojom_traits.h"
#include <utility>
#include <vector>
@@ -14,7 +14,7 @@
#include "mojo/public/cpp/base/values_mojom_traits.h"
#include "mojo/public/cpp/system/handle.h"
#include "mojo/public/cpp/system/platform_handle.h"
-#include "ui/gfx/mojo/color_space_mojom_traits.h"
+#include "ui/gfx/mojom/color_space_mojom_traits.h"
namespace mojo {
@@ -31,9 +31,12 @@ media::mojom::VideoFrameDataPtr MakeVideoFrameData(
const media::MojoSharedBufferVideoFrame* mojo_frame =
static_cast<const media::MojoSharedBufferVideoFrame*>(input);
- // TODO(https://crbug.com/803136): This should duplicate as READ_ONLY, but
- // can't because there is no guarantee that the input handle is sharable as
- // read-only.
+ // Mojo shared buffer handles are always writable. For example,
+ // cdm_video_decoder in ToCdmVideoFrame maps a frame writable; these frames
+ // are returned via callback and reused in ToCdmVideoFrame. Since returning
+ // via callback involves a Clone(), and since cloning a region read-only
+ // makes both the source handle and the cloned handle read-only, it must be
+ // cloned writable.
mojo::ScopedSharedBufferHandle dup = mojo_frame->Handle().Clone(
mojo::SharedBufferHandle::AccessMode::READ_WRITE);
DCHECK(dup.is_valid());
diff --git a/chromium/media/mojo/interfaces/video_frame_struct_traits.h b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
index 6491b0f7470..196243b4817 100644
--- a/chromium/media/mojo/interfaces/video_frame_struct_traits.h
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
@@ -2,19 +2,19 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_VIDEO_FRAME_STRUCT_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_VIDEO_FRAME_STRUCT_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_VIDEO_FRAME_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_VIDEO_FRAME_MOJOM_TRAITS_H_
#include "base/memory/ref_counted.h"
#include "base/optional.h"
#include "base/values.h"
-#include "gpu/ipc/common/mailbox_holder_struct_traits.h"
+#include "gpu/ipc/common/mailbox_holder_mojom_traits.h"
#include "gpu/ipc/common/vulkan_ycbcr_info_mojom_traits.h"
#include "media/base/ipc/media_param_traits_macros.h"
#include "media/base/video_frame.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "mojo/public/cpp/bindings/struct_traits.h"
-#include "ui/gfx/geometry/mojo/geometry_struct_traits.h"
+#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
#include "ui/gfx/ipc/color/gfx_param_traits.h"
namespace mojo {
@@ -81,4 +81,4 @@ struct StructTraits<media::mojom::VideoFrameDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_VIDEO_FRAME_STRUCT_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_VIDEO_FRAME_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/video_frame_struct_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
index 03d560cff82..f66ff810dfb 100644
--- a/chromium/media/mojo/interfaces/video_frame_struct_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
@@ -2,17 +2,17 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/video_frame_struct_traits.h"
+#include "media/mojo/mojom/video_frame_mojom_traits.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "gpu/command_buffer/common/mailbox.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "gpu/command_buffer/common/sync_token.h"
#include "media/base/video_frame.h"
#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
-#include "media/mojo/interfaces/traits_test_service.mojom.h"
+#include "media/mojo/mojom/traits_test_service.mojom.h"
#include "mojo/public/cpp/bindings/binding_set.h"
#include "mojo/public/cpp/bindings/interface_request.h"
#include "mojo/public/cpp/system/buffer.h"
@@ -53,7 +53,7 @@ class VideoFrameStructTraitsTest : public testing::Test,
std::move(callback).Run(f);
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
mojo::BindingSet<TraitsTestService> traits_test_bindings_;
DISALLOW_COPY_AND_ASSIGN(VideoFrameStructTraitsTest);
diff --git a/chromium/media/mojo/interfaces/video_transformation_mojom_traits.cc b/chromium/media/mojo/mojom/video_transformation_mojom_traits.cc
index b8b51db6d34..9aebac3bf3f 100644
--- a/chromium/media/mojo/interfaces/video_transformation_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_transformation_mojom_traits.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/mojo/interfaces/video_transformation_mojom_traits.h"
+#include "media/mojo/mojom/video_transformation_mojom_traits.h"
namespace mojo {
diff --git a/chromium/media/mojo/interfaces/video_transformation_mojom_traits.h b/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
index fe9c6306563..f788f6fc6a0 100644
--- a/chromium/media/mojo/interfaces/video_transformation_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_transformation_mojom_traits.h
@@ -2,12 +2,12 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef MEDIA_MOJO_INTERFACES_VIDEO_TRANSFORMATION_MOJOM_TRAITS_H_
-#define MEDIA_MOJO_INTERFACES_VIDEO_TRANSFORMATION_MOJOM_TRAITS_H_
+#ifndef MEDIA_MOJO_MOJOM_VIDEO_TRANSFORMATION_MOJOM_TRAITS_H_
+#define MEDIA_MOJO_MOJOM_VIDEO_TRANSFORMATION_MOJOM_TRAITS_H_
#include "media/base/ipc/media_param_traits.h"
#include "media/base/video_transformation.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace mojo {
@@ -29,4 +29,4 @@ struct StructTraits<media::mojom::VideoTransformationDataView,
} // namespace mojo
-#endif // MEDIA_MOJO_INTERFACES_VIDEO_TRANSFORMATION_MOJOM_TRAITS_H_
+#endif // MEDIA_MOJO_MOJOM_VIDEO_TRANSFORMATION_MOJOM_TRAITS_H_
diff --git a/chromium/media/mojo/interfaces/watch_time_recorder.mojom b/chromium/media/mojo/mojom/watch_time_recorder.mojom
index bd3d1108dd4..82fd6caa5f6 100644
--- a/chromium/media/mojo/interfaces/watch_time_recorder.mojom
+++ b/chromium/media/mojo/mojom/watch_time_recorder.mojom
@@ -4,9 +4,9 @@
module media.mojom;
-import "media/mojo/interfaces/media_types.mojom";
+import "media/mojo/mojom/media_types.mojom";
import "mojo/public/mojom/base/time.mojom";
-import "ui/gfx/geometry/mojo/geometry.mojom";
+import "ui/gfx/geometry/mojom/geometry.mojom";
import "url/mojom/origin.mojom";
// Structure describing immutable properties for the current watch time report.
diff --git a/chromium/media/mojo/services/BUILD.gn b/chromium/media/mojo/services/BUILD.gn
index f6f8fc6bcb0..7d6f46a2e0c 100644
--- a/chromium/media/mojo/services/BUILD.gn
+++ b/chromium/media/mojo/services/BUILD.gn
@@ -80,7 +80,7 @@ jumbo_component("services") {
"//media/gpu",
"//media/gpu/ipc/common",
"//media/mojo:buildflags",
- "//media/mojo/interfaces",
+ "//media/mojo/mojom",
"//mojo/public/cpp/bindings",
"//mojo/public/cpp/system",
"//services/service_manager/public/cpp",
@@ -95,6 +95,7 @@ jumbo_component("services") {
"//media/gpu:buildflags",
"//media/gpu/ipc/service",
"//media/learning/impl",
+ "//media/learning/mojo:impl",
"//media/mojo/common",
"//media/mojo/common:mojo_shared_buffer_video_frame",
"//services/metrics/public/cpp:metrics_cpp",
@@ -149,8 +150,8 @@ source_set("cdm_manifest") {
]
deps = [
"//base",
- "//media/mojo/interfaces",
- "//media/mojo/interfaces:constants",
+ "//media/mojo/mojom",
+ "//media/mojo/mojom:constants",
"//services/service_manager/public/cpp",
]
}
@@ -163,8 +164,8 @@ source_set("media_manifest") {
deps = [
"//base",
"//media/mojo:buildflags",
- "//media/mojo/interfaces",
- "//media/mojo/interfaces:constants",
+ "//media/mojo/mojom",
+ "//media/mojo/mojom:constants",
"//services/service_manager/public/cpp",
]
if (is_chromecast) {
@@ -269,8 +270,8 @@ test("media_service_unittests") {
"//media/cdm:cdm_paths",
"//media/mojo/clients",
"//media/mojo/common",
- "//media/mojo/interfaces",
- "//media/mojo/interfaces:constants",
+ "//media/mojo/mojom",
+ "//media/mojo/mojom:constants",
"//mojo/core/test:run_all_unittests",
"//services/service_manager/public/cpp",
"//services/service_manager/public/cpp/test:test_support",
diff --git a/chromium/media/mojo/services/android_mojo_media_client.cc b/chromium/media/mojo/services/android_mojo_media_client.cc
index 80f273733b7..f0de6c3f517 100644
--- a/chromium/media/mojo/services/android_mojo_media_client.cc
+++ b/chromium/media/mojo/services/android_mojo_media_client.cc
@@ -13,8 +13,8 @@
#include "media/base/audio_decoder.h"
#include "media/base/cdm_factory.h"
#include "media/filters/android/media_codec_audio_decoder.h"
-#include "media/mojo/interfaces/media_drm_storage.mojom.h"
-#include "media/mojo/interfaces/provision_fetcher.mojom.h"
+#include "media/mojo/mojom/media_drm_storage.mojom.h"
+#include "media/mojo/mojom/provision_fetcher.mojom.h"
#include "media/mojo/services/android_mojo_util.h"
#include "services/service_manager/public/cpp/connect.h"
diff --git a/chromium/media/mojo/services/cdm_manifest.cc b/chromium/media/mojo/services/cdm_manifest.cc
index 2ed9f3381e6..05924c739b2 100644
--- a/chromium/media/mojo/services/cdm_manifest.cc
+++ b/chromium/media/mojo/services/cdm_manifest.cc
@@ -5,8 +5,8 @@
#include "media/mojo/services/cdm_manifest.h"
#include "base/no_destructor.h"
-#include "media/mojo/interfaces/cdm_service.mojom.h"
-#include "media/mojo/interfaces/constants.mojom.h"
+#include "media/mojo/mojom/cdm_service.mojom.h"
+#include "media/mojo/mojom/constants.mojom.h"
#include "services/service_manager/public/cpp/manifest_builder.h"
namespace media {
diff --git a/chromium/media/mojo/services/cdm_service.h b/chromium/media/mojo/services/cdm_service.h
index d6d16d4d246..dcd5dd751f7 100644
--- a/chromium/media/mojo/services/cdm_service.h
+++ b/chromium/media/mojo/services/cdm_service.h
@@ -10,8 +10,8 @@
#include "base/macros.h"
#include "build/build_config.h"
#include "media/media_buildflags.h"
-#include "media/mojo/interfaces/cdm_service.mojom.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/cdm_service.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
#include "media/mojo/services/deferred_destroy_strong_binding_set.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/binding_set.h"
diff --git a/chromium/media/mojo/services/cdm_service_unittest.cc b/chromium/media/mojo/services/cdm_service_unittest.cc
index 510e3caa5ef..a1fb3e78d1d 100644
--- a/chromium/media/mojo/services/cdm_service_unittest.cc
+++ b/chromium/media/mojo/services/cdm_service_unittest.cc
@@ -7,11 +7,11 @@
#include "base/bind.h"
#include "base/files/file_path.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/cdm/default_cdm_factory.h"
#include "media/media_buildflags.h"
-#include "media/mojo/interfaces/constants.mojom.h"
+#include "media/mojo/mojom/constants.mojom.h"
#include "media/mojo/services/cdm_service.h"
#include "media/mojo/services/media_interface_provider.h"
#include "services/service_manager/public/cpp/test/test_connector_factory.h"
@@ -127,7 +127,7 @@ class CdmServiceTest : public testing::Test {
CdmService* cdm_service() { return service_.get(); }
- base::test::ScopedTaskEnvironment task_environment_;
+ base::test::TaskEnvironment task_environment_;
mojom::CdmServicePtr cdm_service_ptr_;
mojom::CdmFactoryPtr cdm_factory_ptr_;
mojom::ContentDecryptionModulePtr cdm_ptr_;
@@ -207,8 +207,8 @@ TEST_F(CdmServiceTest, DestroyCdmFactory) {
}
// Same as DestroyCdmFactory test, but do not disable delayed service release.
-// TODO(xhwang): Use ScopedTaskEnvironment::TimeSource::MOCK_TIME and
-// ScopedTaskEnvironment::FastForwardBy() so we don't have to really wait for
+// TODO(xhwang): Use TaskEnvironment::TimeSource::MOCK_TIME and
+// TaskEnvironment::FastForwardBy() so we don't have to really wait for
// the delay in the test. But currently FastForwardBy() doesn't support delayed
// task yet.
TEST_F(CdmServiceTest, DestroyCdmFactory_DelayedServiceRelease) {
diff --git a/chromium/media/mojo/services/deferred_destroy_strong_binding_set_unittest.cc b/chromium/media/mojo/services/deferred_destroy_strong_binding_set_unittest.cc
index 7794340157c..585254cdd30 100644
--- a/chromium/media/mojo/services/deferred_destroy_strong_binding_set_unittest.cc
+++ b/chromium/media/mojo/services/deferred_destroy_strong_binding_set_unittest.cc
@@ -6,7 +6,7 @@
#include <utility>
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/mojo/services/deferred_destroy_strong_binding_set.h"
#include "mojo/public/interfaces/bindings/tests/ping_service.mojom.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -59,7 +59,7 @@ class DeferredDestroyStrongBindingSetTest : public testing::Test {
~DeferredDestroyStrongBindingSetTest() override = default;
protected:
- base::test::ScopedTaskEnvironment task_environment_;
+ base::test::TaskEnvironment task_environment_;
};
TEST_F(DeferredDestroyStrongBindingSetTest, Destructor) {
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.cc b/chromium/media/mojo/services/gpu_mojo_media_client.cc
index 7b4910f10ca..1d61efddee5 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.cc
@@ -21,7 +21,7 @@
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/ipc/service/media_gpu_channel_manager.h"
#include "media/gpu/ipc/service/vda_video_decoder.h"
-#include "media/mojo/interfaces/video_decoder.mojom.h"
+#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/video/video_decode_accelerator.h"
#if defined(OS_ANDROID)
@@ -34,8 +34,8 @@
#include "media/gpu/android/maybe_render_early_manager.h"
#include "media/gpu/android/media_codec_video_decoder.h"
#include "media/gpu/android/video_frame_factory_impl.h"
-#include "media/mojo/interfaces/media_drm_storage.mojom.h"
-#include "media/mojo/interfaces/provision_fetcher.mojom.h"
+#include "media/mojo/mojom/media_drm_storage.mojom.h"
+#include "media/mojo/mojom/provision_fetcher.mojom.h"
#include "media/mojo/services/mojo_media_drm_storage.h"
#include "media/mojo/services/mojo_provision_fetcher.h"
#include "services/service_manager/public/cpp/connect.h"
@@ -221,11 +221,10 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
MaybeRenderEarlyManager::Create(gpu_task_runner_)));
#elif defined(OS_CHROMEOS)
- std::unique_ptr<VideoDecoder> cros_video_decoder;
if (base::FeatureList::IsEnabled(kChromeosVideoDecoder)) {
#if BUILDFLAG(USE_V4L2_CODEC) || BUILDFLAG(USE_VAAPI)
auto frame_pool = std::make_unique<PlatformVideoFramePool>();
- auto frame_converter = std::make_unique<MailboxVideoFrameConverter>(
+ auto frame_converter = MailboxVideoFrameConverter::Create(
base::BindRepeating(&DmabufVideoFramePool::UnwrapFrame,
base::Unretained(frame_pool.get())),
gpu_task_runner_,
@@ -233,13 +232,9 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
media_gpu_channel_manager_,
command_buffer_id->channel_token,
command_buffer_id->route_id));
- cros_video_decoder = ChromeosVideoDecoderFactory::Create(
+ video_decoder = ChromeosVideoDecoderFactory::Create(
task_runner, std::move(frame_pool), std::move(frame_converter));
#endif // BUILDFLAG(USE_V4L2_CODEC) || BUILDFLAG(USE_VAAPI)
- }
-
- if (cros_video_decoder) {
- video_decoder = std::move(cros_video_decoder);
} else {
video_decoder = VdaVideoDecoder::Create(
task_runner, gpu_task_runner_, media_log->Clone(),
diff --git a/chromium/media/mojo/services/interface_factory_impl.cc b/chromium/media/mojo/services/interface_factory_impl.cc
index 77a4840fa28..90a2bc3c049 100644
--- a/chromium/media/mojo/services/interface_factory_impl.cc
+++ b/chromium/media/mojo/services/interface_factory_impl.cc
@@ -11,7 +11,7 @@
#include "base/logging.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
-#include "media/mojo/interfaces/renderer_extensions.mojom.h"
+#include "media/mojo/mojom/renderer_extensions.mojom.h"
#include "media/mojo/services/mojo_decryptor_service.h"
#include "media/mojo/services/mojo_media_client.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
diff --git a/chromium/media/mojo/services/interface_factory_impl.h b/chromium/media/mojo/services/interface_factory_impl.h
index bffc86c1a58..b1f93b65c34 100644
--- a/chromium/media/mojo/services/interface_factory_impl.h
+++ b/chromium/media/mojo/services/interface_factory_impl.h
@@ -12,13 +12,13 @@
#include "build/build_config.h"
#include "media/base/media_util.h"
#include "media/mojo/buildflags.h"
-#include "media/mojo/interfaces/audio_decoder.mojom.h"
-#include "media/mojo/interfaces/cdm_proxy.mojom.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
-#include "media/mojo/interfaces/decryptor.mojom.h"
-#include "media/mojo/interfaces/interface_factory.mojom.h"
-#include "media/mojo/interfaces/renderer.mojom.h"
-#include "media/mojo/interfaces/video_decoder.mojom.h"
+#include "media/mojo/mojom/audio_decoder.mojom.h"
+#include "media/mojo/mojom/cdm_proxy.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/decryptor.mojom.h"
+#include "media/mojo/mojom/interface_factory.mojom.h"
+#include "media/mojo/mojom/renderer.mojom.h"
+#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/deferred_destroy_strong_binding_set.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/strong_binding_set.h"
diff --git a/chromium/media/mojo/services/media_manifest.cc b/chromium/media/mojo/services/media_manifest.cc
index e5b88567282..9bf1eb04a3d 100644
--- a/chromium/media/mojo/services/media_manifest.cc
+++ b/chromium/media/mojo/services/media_manifest.cc
@@ -6,8 +6,8 @@
#include "base/no_destructor.h"
#include "media/mojo/buildflags.h"
-#include "media/mojo/interfaces/constants.mojom.h"
-#include "media/mojo/interfaces/media_service.mojom.h"
+#include "media/mojo/mojom/constants.mojom.h"
+#include "media/mojo/mojom/media_service.mojom.h"
#include "services/service_manager/public/cpp/manifest_builder.h"
#if defined(IS_CHROMECAST)
diff --git a/chromium/media/mojo/services/media_metrics_provider.cc b/chromium/media/mojo/services/media_metrics_provider.cc
index e671c6aed4f..5fc47500af9 100644
--- a/chromium/media/mojo/services/media_metrics_provider.cc
+++ b/chromium/media/mojo/services/media_metrics_provider.cc
@@ -11,6 +11,7 @@
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "build/build_config.h"
+#include "media/learning/mojo/mojo_learning_task_controller_service.h"
#include "media/mojo/services/video_decode_stats_recorder.h"
#include "media/mojo/services/watch_time_recorder.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
@@ -37,12 +38,14 @@ MediaMetricsProvider::MediaMetricsProvider(
FrameStatus is_top_frame,
ukm::SourceId source_id,
learning::FeatureValue origin,
- VideoDecodePerfHistory::SaveCallback save_cb)
+ VideoDecodePerfHistory::SaveCallback save_cb,
+ GetLearningSessionCallback learning_session_cb)
: player_id_(g_player_id++),
is_top_frame_(is_top_frame == FrameStatus::kTopFrame),
source_id_(source_id),
origin_(origin),
save_cb_(std::move(save_cb)),
+ learning_session_cb_(learning_session_cb),
uma_info_(is_incognito == BrowsingMode::kIncognito) {}
MediaMetricsProvider::~MediaMetricsProvider() {
@@ -175,16 +178,18 @@ void MediaMetricsProvider::ReportPipelineUMA() {
}
// static
-void MediaMetricsProvider::Create(BrowsingMode is_incognito,
- FrameStatus is_top_frame,
- GetSourceIdCallback get_source_id_cb,
- GetOriginCallback get_origin_cb,
- VideoDecodePerfHistory::SaveCallback save_cb,
- mojom::MediaMetricsProviderRequest request) {
+void MediaMetricsProvider::Create(
+ BrowsingMode is_incognito,
+ FrameStatus is_top_frame,
+ GetSourceIdCallback get_source_id_cb,
+ GetOriginCallback get_origin_cb,
+ VideoDecodePerfHistory::SaveCallback save_cb,
+ GetLearningSessionCallback learning_session_cb,
+ mojom::MediaMetricsProviderRequest request) {
mojo::MakeStrongBinding(
std::make_unique<MediaMetricsProvider>(
is_incognito, is_top_frame, get_source_id_cb.Run(),
- get_origin_cb.Run(), std::move(save_cb)),
+ get_origin_cb.Run(), std::move(save_cb), learning_session_cb),
std::move(request));
}
@@ -302,6 +307,29 @@ void MediaMetricsProvider::AcquireVideoDecodeStatsRecorder(
std::move(request));
}
+void MediaMetricsProvider::AcquireLearningTaskController(
+ const std::string& taskName,
+ media::learning::mojom::LearningTaskControllerRequest request) {
+ learning::LearningSession* session = learning_session_cb_.Run();
+ if (!session) {
+ DVLOG(3) << __func__ << " Ignoring request, unable to get LearningSession.";
+ return;
+ }
+
+ auto controller = session->GetController(taskName);
+
+ if (!controller) {
+ DVLOG(3) << __func__ << " Ignoring request, no controller found for task: '"
+ << taskName << "'.";
+ return;
+ }
+
+ mojo::MakeStrongBinding(
+ std::make_unique<learning::MojoLearningTaskControllerService>(
+ controller->GetLearningTask(), std::move(controller)),
+ std::move(request));
+}
+
void MediaMetricsProvider::AddBytesReceived(uint64_t bytes_received) {
total_bytes_received_ += bytes_received;
}
diff --git a/chromium/media/mojo/services/media_metrics_provider.h b/chromium/media/mojo/services/media_metrics_provider.h
index 5199d516eff..f02361859e2 100644
--- a/chromium/media/mojo/services/media_metrics_provider.h
+++ b/chromium/media/mojo/services/media_metrics_provider.h
@@ -11,8 +11,9 @@
#include "media/base/container_names.h"
#include "media/base/pipeline_status.h"
#include "media/base/timestamp_constants.h"
+#include "media/learning/common/learning_session.h"
#include "media/learning/common/value.h"
-#include "media/mojo/interfaces/media_metrics_provider.mojom.h"
+#include "media/mojo/mojom/media_metrics_provider.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/video_decode_perf_history.h"
#include "services/metrics/public/cpp/ukm_source_id.h"
@@ -29,11 +30,15 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
enum class FrameStatus : bool { kTopFrame, kNotTopFrame };
+ using GetLearningSessionCallback =
+ base::RepeatingCallback<learning::LearningSession*()>;
+
MediaMetricsProvider(BrowsingMode is_incognito,
FrameStatus is_top_frame,
ukm::SourceId source_id,
learning::FeatureValue origin,
- VideoDecodePerfHistory::SaveCallback save_cb);
+ VideoDecodePerfHistory::SaveCallback save_cb,
+ GetLearningSessionCallback learning_session_cb);
~MediaMetricsProvider() override;
// Callback for retrieving a ukm::SourceId.
@@ -57,6 +62,7 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
GetSourceIdCallback get_source_id_cb,
GetOriginCallback get_origin_cb,
VideoDecodePerfHistory::SaveCallback save_cb,
+ GetLearningSessionCallback learning_session_cb,
mojom::MediaMetricsProviderRequest request);
private:
@@ -99,6 +105,9 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
mojom::WatchTimeRecorderRequest request) override;
void AcquireVideoDecodeStatsRecorder(
mojom::VideoDecodeStatsRecorderRequest request) override;
+ void AcquireLearningTaskController(
+ const std::string& taskName,
+ media::learning::mojom::LearningTaskControllerRequest request) override;
void AddBytesReceived(uint64_t bytes_received) override;
void ReportPipelineUMA();
@@ -115,6 +124,7 @@ class MEDIA_MOJO_EXPORT MediaMetricsProvider
const learning::FeatureValue origin_;
const VideoDecodePerfHistory::SaveCallback save_cb_;
+ const GetLearningSessionCallback learning_session_cb_;
// UMA pipeline packaged data
PipelineInfo uma_info_;
diff --git a/chromium/media/mojo/services/media_metrics_provider_unittest.cc b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
index c06910547a3..0b497bbdb66 100644
--- a/chromium/media/mojo/services/media_metrics_provider_unittest.cc
+++ b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
@@ -46,7 +46,9 @@ class MediaMetricsProviderTest : public testing::Test {
base::BindRepeating(&MediaMetricsProviderTest::GetSourceId,
base::Unretained(this)),
base::BindRepeating([]() { return learning::FeatureValue(0); }),
- VideoDecodePerfHistory::SaveCallback(), mojo::MakeRequest(&provider_));
+ VideoDecodePerfHistory::SaveCallback(),
+ MediaMetricsProvider::GetLearningSessionCallback(),
+ mojo::MakeRequest(&provider_));
provider_->Initialize(is_mse, scheme);
}
diff --git a/chromium/media/mojo/services/media_service.h b/chromium/media/mojo/services/media_service.h
index 98972d56841..b569239ea47 100644
--- a/chromium/media/mojo/services/media_service.h
+++ b/chromium/media/mojo/services/media_service.h
@@ -9,8 +9,8 @@
#include "base/macros.h"
#include "build/build_config.h"
-#include "media/mojo/interfaces/interface_factory.mojom.h"
-#include "media/mojo/interfaces/media_service.mojom.h"
+#include "media/mojo/mojom/interface_factory.mojom.h"
+#include "media/mojo/mojom/media_service.mojom.h"
#include "media/mojo/services/deferred_destroy_strong_binding_set.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/binding_set.h"
diff --git a/chromium/media/mojo/services/media_service_unittest.cc b/chromium/media/mojo/services/media_service_unittest.cc
index c6000a6cd60..288d1c26052 100644
--- a/chromium/media/mojo/services/media_service_unittest.cc
+++ b/chromium/media/mojo/services/media_service_unittest.cc
@@ -12,7 +12,7 @@
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/task/post_task.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/base/cdm_config.h"
#include "media/base/mock_filters.h"
@@ -21,13 +21,13 @@
#include "media/mojo/clients/mojo_decryptor.h"
#include "media/mojo/clients/mojo_demuxer_stream_impl.h"
#include "media/mojo/common/media_type_converters.h"
-#include "media/mojo/interfaces/cdm_proxy.mojom.h"
-#include "media/mojo/interfaces/constants.mojom.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
-#include "media/mojo/interfaces/decryptor.mojom.h"
-#include "media/mojo/interfaces/interface_factory.mojom.h"
-#include "media/mojo/interfaces/media_service.mojom.h"
-#include "media/mojo/interfaces/renderer.mojom.h"
+#include "media/mojo/mojom/cdm_proxy.mojom.h"
+#include "media/mojo/mojom/constants.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/decryptor.mojom.h"
+#include "media/mojo/mojom/interface_factory.mojom.h"
+#include "media/mojo/mojom/media_service.mojom.h"
+#include "media/mojo/mojom/renderer.mojom.h"
#include "media/mojo/services/media_interface_provider.h"
#include "media/mojo/services/media_manifest.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
@@ -42,7 +42,7 @@
#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
#include "media/cdm/cdm_paths.h" // nogncheck
-#include "media/mojo/interfaces/cdm_proxy.mojom.h"
+#include "media/mojo/mojom/cdm_proxy.mojom.h"
#endif
namespace media {
@@ -276,7 +276,7 @@ class MediaServiceTest : public testing::Test {
MOCK_METHOD0(MediaServiceConnectionClosed, void());
protected:
- base::test::ScopedTaskEnvironment task_environment_;
+ base::test::TaskEnvironment task_environment_;
service_manager::TestServiceManager test_service_manager_;
service_manager::TestService test_service_;
diff --git a/chromium/media/mojo/services/mojo_audio_decoder_service.h b/chromium/media/mojo/services/mojo_audio_decoder_service.h
index dacbd8dca7e..43f977cdecd 100644
--- a/chromium/media/mojo/services/mojo_audio_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_audio_decoder_service.h
@@ -12,7 +12,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "media/base/audio_decoder.h"
-#include "media/mojo/interfaces/audio_decoder.mojom.h"
+#include "media/mojo/mojom/audio_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream.h b/chromium/media/mojo/services/mojo_audio_input_stream.h
index 48490c6d68c..ec2c3a86e72 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_input_stream.h
@@ -10,8 +10,8 @@
#include "base/sequence_checker.h"
#include "media/audio/audio_input_delegate.h"
-#include "media/mojo/interfaces/audio_data_pipe.mojom.h"
-#include "media/mojo/interfaces/audio_input_stream.mojom.h"
+#include "media/mojo/mojom/audio_data_pipe.mojom.h"
+#include "media/mojo/mojom/audio_input_stream.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/binding.h"
diff --git a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
index 4241233d3cc..1e0b5859500 100644
--- a/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_input_stream_unittest.cc
@@ -10,7 +10,7 @@
#include "base/memory/read_only_shared_memory_region.h"
#include "base/run_loop.h"
#include "base/sync_socket.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/audio/audio_input_controller.h"
#include "mojo/public/cpp/system/platform_handle.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -172,7 +172,7 @@ class MojoAudioInputStreamTest : public Test {
.WillOnce(SaveArg<0>(&delegate_event_handler_));
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
base::CancelableSyncSocket local_;
std::unique_ptr<TestCancelableSyncSocket> foreign_socket_;
base::ReadOnlySharedMemoryRegion mem_;
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream.cc b/chromium/media/mojo/services/mojo_audio_output_stream.cc
index 27cc7dc3da4..f3e4cde642d 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream.cc
@@ -11,7 +11,7 @@
#include "base/callback_helpers.h"
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/sync_socket.h"
-#include "media/mojo/interfaces/audio_data_pipe.mojom.h"
+#include "media/mojo/mojom/audio_data_pipe.mojom.h"
#include "mojo/public/cpp/system/platform_handle.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream.h b/chromium/media/mojo/services/mojo_audio_output_stream.h
index 0bed32d7148..c58814b0c4e 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream.h
+++ b/chromium/media/mojo/services/mojo_audio_output_stream.h
@@ -10,7 +10,7 @@
#include "base/sequence_checker.h"
#include "media/audio/audio_output_delegate.h"
-#include "media/mojo/interfaces/audio_output_stream.mojom.h"
+#include "media/mojo/mojom/audio_output_stream.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/binding.h"
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
index acfd83dbcb6..16795d5a38c 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "build/build_config.h"
-#include "media/mojo/interfaces/audio_data_pipe.mojom.h"
+#include "media/mojo/mojom/audio_data_pipe.mojom.h"
#include "mojo/public/cpp/bindings/message.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider.h b/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
index f3053bbc62e..63ff797d16f 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider.h
@@ -10,7 +10,7 @@
#include "base/sequence_checker.h"
#include "media/audio/audio_output_delegate.h"
-#include "media/mojo/interfaces/audio_output_stream.mojom.h"
+#include "media/mojo/mojom/audio_output_stream.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_audio_output_stream.h"
#include "mojo/public/cpp/bindings/binding.h"
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
index 3c70c0743e1..2b434970a9d 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind.h"
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "build/build_config.h"
#include "media/audio/audio_output_delegate.h"
#include "media/base/audio_parameters.h"
@@ -68,7 +68,7 @@ std::unique_ptr<AudioOutputDelegate> CreateFakeDelegate(
} // namespace
TEST(MojoAudioOutputStreamProviderTest, AcquireTwice_BadMessage) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::SingleThreadTaskEnvironment task_environment;
bool got_bad_message = false;
mojo::core::SetDefaultProcessErrorCallback(
base::BindRepeating([](bool* got_bad_message,
@@ -104,7 +104,7 @@ TEST(MojoAudioOutputStreamProviderTest, AcquireTwice_BadMessage) {
TEST(MojoAudioOutputStreamProviderTest,
Bitstream_BadMessageOnNonAndoirdPlatforms) {
- base::test::ScopedTaskEnvironment scoped_task_environment;
+ base::test::SingleThreadTaskEnvironment task_environment;
bool got_bad_message = false;
mojo::core::SetDefaultProcessErrorCallback(
base::BindRepeating([](bool* got_bad_message,
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
index bc447a35a28..62a409abb9f 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_unittest.cc
@@ -10,9 +10,9 @@
#include "base/memory/unsafe_shared_memory_region.h"
#include "base/run_loop.h"
#include "base/sync_socket.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/audio/audio_output_controller.h"
-#include "media/mojo/interfaces/audio_data_pipe.mojom.h"
+#include "media/mojo/mojom/audio_data_pipe.mojom.h"
#include "mojo/public/cpp/system/message_pipe.h"
#include "mojo/public/cpp/system/platform_handle.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -171,7 +171,7 @@ class MojoAudioOutputStreamTest : public Test {
.WillOnce(SaveArg<0>(&delegate_event_handler_));
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::SingleThreadTaskEnvironment task_environment_;
base::CancelableSyncSocket local_;
std::unique_ptr<TestCancelableSyncSocket> foreign_socket_;
base::UnsafeSharedMemoryRegion mem_;
diff --git a/chromium/media/mojo/services/mojo_cdm_file_io.h b/chromium/media/mojo/services/mojo_cdm_file_io.h
index a6ad0b366ff..f7fd9fc2ee3 100644
--- a/chromium/media/mojo/services/mojo_cdm_file_io.h
+++ b/chromium/media/mojo/services/mojo_cdm_file_io.h
@@ -14,7 +14,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "media/cdm/api/content_decryption_module.h"
-#include "media/mojo/interfaces/cdm_storage.mojom.h"
+#include "media/mojo/mojom/cdm_storage.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_cdm_file_io_unittest.cc b/chromium/media/mojo/services/mojo_cdm_file_io_unittest.cc
index b1145a3bc75..28315dff653 100644
--- a/chromium/media/mojo/services/mojo_cdm_file_io_unittest.cc
+++ b/chromium/media/mojo/services/mojo_cdm_file_io_unittest.cc
@@ -5,7 +5,7 @@
#include "media/mojo/services/mojo_cdm_file_io.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/cdm/api/content_decryption_module.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
#include "mojo/public/cpp/bindings/binding.h"
@@ -91,7 +91,7 @@ class MojoCdmFileIOTest : public testing::Test, public MojoCdmFileIO::Delegate {
void ReportFileReadSize(int file_size_bytes) override {}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<MojoCdmFileIO> file_io_;
std::unique_ptr<MockFileIOClient> client_;
mojom::CdmStoragePtr cdm_storage_ptr_;
diff --git a/chromium/media/mojo/services/mojo_cdm_helper.h b/chromium/media/mojo/services/mojo_cdm_helper.h
index ff092d6ba75..f8957322dd9 100644
--- a/chromium/media/mojo/services/mojo_cdm_helper.h
+++ b/chromium/media/mojo/services/mojo_cdm_helper.h
@@ -12,10 +12,10 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "media/cdm/cdm_auxiliary_helper.h"
-#include "media/mojo/interfaces/cdm_proxy.mojom.h"
-#include "media/mojo/interfaces/cdm_storage.mojom.h"
-#include "media/mojo/interfaces/output_protection.mojom.h"
-#include "media/mojo/interfaces/platform_verification.mojom.h"
+#include "media/mojo/mojom/cdm_proxy.mojom.h"
+#include "media/mojo/mojom/cdm_storage.mojom.h"
+#include "media/mojo/mojom/output_protection.mojom.h"
+#include "media/mojo/mojom/platform_verification.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_cdm_file_io.h"
#include "media/mojo/services/mojo_cdm_proxy.h"
diff --git a/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc b/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc
index ed99b129a97..a9680b1ee1f 100644
--- a/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc
+++ b/chromium/media/mojo/services/mojo_cdm_helper_unittest.cc
@@ -7,9 +7,9 @@
#include "base/bind.h"
#include "base/macros.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/cdm/api/content_decryption_module.h"
-#include "media/mojo/interfaces/cdm_storage.mojom.h"
+#include "media/mojo/mojom/cdm_storage.mojom.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "services/service_manager/public/cpp/binder_registry.h"
#include "services/service_manager/public/mojom/interface_provider.mojom.h"
@@ -94,7 +94,7 @@ class MojoCdmHelperTest : public testing::Test {
MojoCdmHelperTest() : helper_(&test_interface_provider_) {}
~MojoCdmHelperTest() override = default;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
TestInterfaceProvider test_interface_provider_;
MockFileIOClient file_io_client_;
MojoCdmHelper helper_;
diff --git a/chromium/media/mojo/services/mojo_cdm_promise.h b/chromium/media/mojo/services/mojo_cdm_promise.h
index 6ec9124dc62..38b79780759 100644
--- a/chromium/media/mojo/services/mojo_cdm_promise.h
+++ b/chromium/media/mojo/services/mojo_cdm_promise.h
@@ -10,7 +10,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "media/base/cdm_promise.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_cdm_proxy.h b/chromium/media/mojo/services/mojo_cdm_proxy.h
index c5fa0d0818d..045a5b89e0f 100644
--- a/chromium/media/mojo/services/mojo_cdm_proxy.h
+++ b/chromium/media/mojo/services/mojo_cdm_proxy.h
@@ -13,7 +13,7 @@
#include "base/memory/weak_ptr.h"
#include "media/base/cdm_context.h"
#include "media/cdm/api/content_decryption_module.h"
-#include "media/mojo/interfaces/cdm_proxy.mojom.h"
+#include "media/mojo/mojom/cdm_proxy.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/associated_binding.h"
diff --git a/chromium/media/mojo/services/mojo_cdm_proxy_service.cc b/chromium/media/mojo/services/mojo_cdm_proxy_service.cc
index a8f2aa81604..ac7866e4ce7 100644
--- a/chromium/media/mojo/services/mojo_cdm_proxy_service.cc
+++ b/chromium/media/mojo/services/mojo_cdm_proxy_service.cc
@@ -30,6 +30,10 @@ void MojoCdmProxyService::Initialize(
mojom::CdmProxyClientAssociatedPtrInfo client,
InitializeCallback callback) {
DVLOG(2) << __func__;
+
+ CHECK(!has_initialize_been_called_) << "Initialize should only happen once";
+ has_initialize_been_called_ = true;
+
client_.Bind(std::move(client));
cdm_proxy_->Initialize(
@@ -85,6 +89,9 @@ void MojoCdmProxyService::OnInitialized(InitializeCallback callback,
::media::CdmProxy::Status status,
::media::CdmProxy::Protocol protocol,
uint32_t crypto_session_id) {
+ CHECK_EQ(cdm_id_, CdmContext::kInvalidCdmId)
+ << "CDM proxy should only be created once.";
+
if (status == ::media::CdmProxy::Status::kOk)
cdm_id_ = context_->RegisterCdmProxy(this);
diff --git a/chromium/media/mojo/services/mojo_cdm_proxy_service.h b/chromium/media/mojo/services/mojo_cdm_proxy_service.h
index 0c6ca6de943..1ee107cba8f 100644
--- a/chromium/media/mojo/services/mojo_cdm_proxy_service.h
+++ b/chromium/media/mojo/services/mojo_cdm_proxy_service.h
@@ -14,7 +14,7 @@
#include "base/memory/weak_ptr.h"
#include "media/base/cdm_context.h"
#include "media/cdm/cdm_proxy.h"
-#include "media/mojo/interfaces/cdm_proxy.mojom.h"
+#include "media/mojo/mojom/cdm_proxy.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
namespace media {
@@ -64,6 +64,8 @@ class MEDIA_MOJO_EXPORT MojoCdmProxyService : public mojom::CdmProxy,
::media::CdmProxy::Protocol protocol,
uint32_t crypto_session_id);
+ bool has_initialize_been_called_ = false;
+
std::unique_ptr<::media::CdmProxy> cdm_proxy_;
MojoCdmServiceContext* const context_ = nullptr;
diff --git a/chromium/media/mojo/services/mojo_cdm_proxy_unittest.cc b/chromium/media/mojo/services/mojo_cdm_proxy_unittest.cc
index 8089c564624..3e564a4e61c 100644
--- a/chromium/media/mojo/services/mojo_cdm_proxy_unittest.cc
+++ b/chromium/media/mojo/services/mojo_cdm_proxy_unittest.cc
@@ -11,10 +11,11 @@
#include "base/memory/weak_ptr.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
+#include "base/test/gtest_util.h"
#include "base/test/test_message_loop.h"
#include "media/base/mock_filters.h"
#include "media/cdm/cdm_proxy_context.h"
-#include "media/mojo/interfaces/cdm_proxy.mojom.h"
+#include "media/mojo/mojom/cdm_proxy.mojom.h"
#include "media/mojo/services/mojo_cdm_proxy.h"
#include "media/mojo/services/mojo_cdm_proxy_service.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
@@ -295,6 +296,11 @@ TEST_F(MojoCdmProxyTest, Initialize_Failure) {
Initialize(Status::kFail);
}
+TEST_F(MojoCdmProxyTest, Initialize_Twice) {
+ Initialize();
+ EXPECT_CHECK_DEATH(Initialize());
+}
+
TEST_F(MojoCdmProxyTest, Process) {
Initialize();
Process();
diff --git a/chromium/media/mojo/services/mojo_cdm_service.h b/chromium/media/mojo/services/mojo_cdm_service.h
index 71d5b593b83..bfe7baf10bb 100644
--- a/chromium/media/mojo/services/mojo_cdm_service.h
+++ b/chromium/media/mojo/services/mojo_cdm_service.h
@@ -16,7 +16,7 @@
#include "base/memory/weak_ptr.h"
#include "media/base/content_decryption_module.h"
#include "media/base/eme_constants.h"
-#include "media/mojo/interfaces/content_decryption_module.mojom.h"
+#include "media/mojo/mojom/content_decryption_module.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_cdm_promise.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.cc b/chromium/media/mojo/services/mojo_decryptor_service.cc
index f1436059cb7..605d733676a 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.cc
+++ b/chromium/media/mojo/services/mojo_decryptor_service.cc
@@ -17,7 +17,7 @@
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/common/mojo_shared_buffer_video_frame.h"
-#include "media/mojo/interfaces/demuxer_stream.mojom.h"
+#include "media/mojo/mojom/demuxer_stream.mojom.h"
#include "media/mojo/services/mojo_cdm_service_context.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.h b/chromium/media/mojo/services/mojo_decryptor_service.h
index 2fba4375d57..b4fb2e7a699 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.h
+++ b/chromium/media/mojo/services/mojo_decryptor_service.h
@@ -15,7 +15,7 @@
#include "base/memory/weak_ptr.h"
#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
-#include "media/mojo/interfaces/decryptor.mojom.h"
+#include "media/mojo/mojom/decryptor.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
index 759031fba43..0b46158ee28 100644
--- a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
+++ b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
@@ -13,7 +13,7 @@
#include "media/base/audio_decoder_config.h"
#include "media/base/demuxer_stream.h"
#include "media/base/video_decoder_config.h"
-#include "media/mojo/interfaces/demuxer_stream.mojom.h"
+#include "media/mojo/mojom/demuxer_stream.mojom.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_media_client.h b/chromium/media/mojo/services/mojo_media_client.h
index 3f2bfeb1036..f64674d1374 100644
--- a/chromium/media/mojo/services/mojo_media_client.h
+++ b/chromium/media/mojo/services/mojo_media_client.h
@@ -15,7 +15,7 @@
#include "media/base/overlay_info.h"
#include "media/media_buildflags.h"
#include "media/mojo/buildflags.h"
-#include "media/mojo/interfaces/video_decoder.mojom.h"
+#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/video/supported_video_decoder_config.h"
diff --git a/chromium/media/mojo/services/mojo_media_drm_storage.cc b/chromium/media/mojo/services/mojo_media_drm_storage.cc
index f034f183229..ccb04bf1e2f 100644
--- a/chromium/media/mojo/services/mojo_media_drm_storage.cc
+++ b/chromium/media/mojo/services/mojo_media_drm_storage.cc
@@ -20,8 +20,7 @@ namespace media {
MojoMediaDrmStorage::MojoMediaDrmStorage(
mojom::MediaDrmStoragePtr media_drm_storage_ptr)
- : media_drm_storage_ptr_(std::move(media_drm_storage_ptr)),
- weak_factory_(this) {
+ : media_drm_storage_ptr_(std::move(media_drm_storage_ptr)) {
DVLOG(1) << __func__;
}
diff --git a/chromium/media/mojo/services/mojo_media_drm_storage.h b/chromium/media/mojo/services/mojo_media_drm_storage.h
index 4446b9b5038..b781c44a9e7 100644
--- a/chromium/media/mojo/services/mojo_media_drm_storage.h
+++ b/chromium/media/mojo/services/mojo_media_drm_storage.h
@@ -11,7 +11,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "media/base/android/media_drm_storage.h"
-#include "media/mojo/interfaces/media_drm_storage.mojom.h"
+#include "media/mojo/mojom/media_drm_storage.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
namespace media {
@@ -40,7 +40,7 @@ class MEDIA_MOJO_EXPORT MojoMediaDrmStorage : public MediaDrmStorage {
mojom::SessionDataPtr session_data);
mojom::MediaDrmStoragePtr media_drm_storage_ptr_;
- base::WeakPtrFactory<MojoMediaDrmStorage> weak_factory_;
+ base::WeakPtrFactory<MojoMediaDrmStorage> weak_factory_{this};
DISALLOW_COPY_AND_ASSIGN(MojoMediaDrmStorage);
};
diff --git a/chromium/media/mojo/services/mojo_media_log.h b/chromium/media/mojo/services/mojo_media_log.h
index e4e8b2ae1ae..15835c3c2c8 100644
--- a/chromium/media/mojo/services/mojo_media_log.h
+++ b/chromium/media/mojo/services/mojo_media_log.h
@@ -12,7 +12,7 @@
#include "base/memory/weak_ptr.h"
#include "base/sequenced_task_runner.h"
#include "media/base/media_log.h"
-#include "media/mojo/interfaces/media_log.mojom.h"
+#include "media/mojo/mojom/media_log.mojom.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_provision_fetcher.h b/chromium/media/mojo/services/mojo_provision_fetcher.h
index 6b069b31869..a70d299447e 100644
--- a/chromium/media/mojo/services/mojo_provision_fetcher.h
+++ b/chromium/media/mojo/services/mojo_provision_fetcher.h
@@ -9,7 +9,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "media/base/provision_fetcher.h"
-#include "media/mojo/interfaces/provision_fetcher.mojom.h"
+#include "media/mojo/mojom/provision_fetcher.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
namespace media {
diff --git a/chromium/media/mojo/services/mojo_renderer_service.cc b/chromium/media/mojo/services/mojo_renderer_service.cc
index 00ca2bbc2e6..a7e085d9d00 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.cc
+++ b/chromium/media/mojo/services/mojo_renderer_service.cc
@@ -83,10 +83,10 @@ void MojoRendererService::Initialize(
}
DCHECK(!media_url_params->media_url.is_empty());
- DCHECK(!media_url_params->site_for_cookies.is_empty());
media_resource_.reset(new MediaUrlDemuxer(
nullptr, media_url_params->media_url, media_url_params->site_for_cookies,
- media_url_params->allow_credentials, media_url_params->is_hls));
+ media_url_params->top_frame_origin, media_url_params->allow_credentials,
+ media_url_params->is_hls));
renderer_->Initialize(
media_resource_.get(), this,
base::Bind(&MojoRendererService::OnRendererInitializeDone, weak_this_,
diff --git a/chromium/media/mojo/services/mojo_renderer_service.h b/chromium/media/mojo/services/mojo_renderer_service.h
index c0b16bec964..9fe3da97175 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.h
+++ b/chromium/media/mojo/services/mojo_renderer_service.h
@@ -19,7 +19,7 @@
#include "media/base/media_resource.h"
#include "media/base/pipeline_status.h"
#include "media/base/renderer_client.h"
-#include "media/mojo/interfaces/renderer.mojom.h"
+#include "media/mojo/mojom/renderer.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.h b/chromium/media/mojo/services/mojo_video_decoder_service.h
index 5a97794af04..1c1f26c4e5f 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.h
@@ -15,7 +15,7 @@
#include "media/base/decode_status.h"
#include "media/base/overlay_info.h"
#include "media/base/video_decoder.h"
-#include "media/mojo/interfaces/video_decoder.mojom.h"
+#include "media/mojo/mojom/video_decoder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_media_client.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h b/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h
index ee9c65b8a60..86533c48b64 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_provider.h
@@ -7,7 +7,7 @@
#include "base/compiler_specific.h"
#include "base/macros.h"
-#include "media/mojo/interfaces/video_encode_accelerator.mojom.h"
+#include "media/mojo/mojom/video_encode_accelerator.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/mojo_video_encode_accelerator_service.h"
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
index 2ee86b1f329..7ac4c22e241 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_service.h
@@ -15,7 +15,7 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/sequence_checker.h"
-#include "media/mojo/interfaces/video_encode_accelerator.mojom.h"
+#include "media/mojo/mojom/video_encode_accelerator.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/video/video_encode_accelerator.h"
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc b/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
index 1b4f101bf7a..132759dcb46 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
@@ -7,9 +7,9 @@
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "gpu/config/gpu_preferences.h"
-#include "media/mojo/interfaces/video_encode_accelerator.mojom.h"
+#include "media/mojo/mojom/video_encode_accelerator.mojom.h"
#include "media/mojo/services/mojo_video_encode_accelerator_service.h"
#include "media/video/fake_video_encode_accelerator.h"
#include "media/video/video_encode_accelerator.h"
@@ -120,7 +120,7 @@ class MojoVideoEncodeAcceleratorServiceTest : public ::testing::Test {
}
private:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
mojo::StrongBindingPtr<mojom::VideoEncodeAcceleratorClient> mojo_vea_binding_;
diff --git a/chromium/media/mojo/services/test_helpers.h b/chromium/media/mojo/services/test_helpers.h
index eb5fb0c013d..17c37818513 100644
--- a/chromium/media/mojo/services/test_helpers.h
+++ b/chromium/media/mojo/services/test_helpers.h
@@ -7,7 +7,7 @@
#include <string>
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
namespace media {
diff --git a/chromium/media/mojo/services/test_mojo_media_client.cc b/chromium/media/mojo/services/test_mojo_media_client.cc
index c27c95e4eac..3b01e49cef3 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.cc
+++ b/chromium/media/mojo/services/test_mojo_media_client.cc
@@ -88,8 +88,17 @@ std::unique_ptr<Renderer> TestMojoMediaClient::CreateRenderer(
return renderer_factory_->CreateRenderer(
task_runner, task_runner, audio_sink.get(), video_sink_ptr,
RequestOverlayInfoCB(), gfx::ColorSpace());
+}
-} // namespace media
+#if BUILDFLAG(ENABLE_CAST_RENDERER)
+std::unique_ptr<Renderer> TestMojoMediaClient::CreateCastRenderer(
+ service_manager::mojom::InterfaceProvider* host_interfaces,
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
+ const base::UnguessableToken& /* overlay_plane_id */) {
+ return CreateRenderer(host_interfaces, task_runner, media_log, std::string());
+}
+#endif // BUILDFLAG(ENABLE_CAST_RENDERER)
std::unique_ptr<CdmFactory> TestMojoMediaClient::CreateCdmFactory(
service_manager::mojom::InterfaceProvider* /* host_interfaces */) {
diff --git a/chromium/media/mojo/services/test_mojo_media_client.h b/chromium/media/mojo/services/test_mojo_media_client.h
index d91254cc243..a0b09838be2 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.h
+++ b/chromium/media/mojo/services/test_mojo_media_client.h
@@ -34,6 +34,13 @@ class TestMojoMediaClient : public MojoMediaClient {
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
MediaLog* media_log,
const std::string& audio_device_id) final;
+#if BUILDFLAG(ENABLE_CAST_RENDERER)
+ std::unique_ptr<Renderer> CreateCastRenderer(
+ service_manager::mojom::InterfaceProvider* host_interfaces,
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MediaLog* media_log,
+ const base::UnguessableToken& overlay_plane_id) final;
+#endif // BUILDFLAG(ENABLE_CAST_RENDERER)
std::unique_ptr<CdmFactory> CreateCdmFactory(
service_manager::mojom::InterfaceProvider* /* host_interfaces */) final;
#if BUILDFLAG(ENABLE_LIBRARY_CDMS)
diff --git a/chromium/media/mojo/services/video_decode_perf_history.cc b/chromium/media/mojo/services/video_decode_perf_history.cc
index 3deeff99607..744faf16428 100644
--- a/chromium/media/mojo/services/video_decode_perf_history.cc
+++ b/chromium/media/mojo/services/video_decode_perf_history.cc
@@ -16,7 +16,7 @@
#include "media/base/media_switches.h"
#include "media/base/video_codecs.h"
#include "media/capabilities/learning_helper.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "mojo/public/cpp/bindings/strong_binding.h"
#include "services/metrics/public/cpp/ukm_builders.h"
#include "services/metrics/public/cpp/ukm_recorder.h"
@@ -32,11 +32,27 @@ const double kMaxSmoothDroppedFramesPercentParamDefault = .05;
const char VideoDecodePerfHistory::kMaxSmoothDroppedFramesPercentParamName[] =
"smooth_threshold";
+const char
+ VideoDecodePerfHistory::kEmeMaxSmoothDroppedFramesPercentParamName[] =
+ "eme_smooth_threshold";
+
// static
-double VideoDecodePerfHistory::GetMaxSmoothDroppedFramesPercent() {
- return base::GetFieldTrialParamByFeatureAsDouble(
+double VideoDecodePerfHistory::GetMaxSmoothDroppedFramesPercent(bool is_eme) {
+ double threshold = base::GetFieldTrialParamByFeatureAsDouble(
kMediaCapabilitiesWithParameters, kMaxSmoothDroppedFramesPercentParamName,
kMaxSmoothDroppedFramesPercentParamDefault);
+
+ // For EME, the precedence of overrides is:
+ // 1. EME specific override, |k*Eme*MaxSmoothDroppedFramesPercentParamName
+ // 2. Non-EME override, |kMaxSmoothDroppedFramesPercentParamName|
+ // 3. |kMaxSmoothDroppedFramesPercentParamDefault|
+ if (is_eme) {
+ threshold = base::GetFieldTrialParamByFeatureAsDouble(
+ kMediaCapabilitiesWithParameters,
+ kEmeMaxSmoothDroppedFramesPercentParamName, threshold);
+ }
+
+ return threshold;
}
VideoDecodePerfHistory::VideoDecodePerfHistory(
@@ -134,6 +150,7 @@ void VideoDecodePerfHistory::GetPerfInfo(mojom::PredictionFeaturesPtr features,
}
void VideoDecodePerfHistory::AssessStats(
+ const VideoDecodeStatsDB::VideoDescKey& key,
const VideoDecodeStatsDB::DecodeStatsEntry* stats,
bool* is_smooth,
bool* is_power_efficient) {
@@ -158,7 +175,9 @@ void VideoDecodePerfHistory::AssessStats(
*is_power_efficient =
percent_power_efficient >= kMinPowerEfficientDecodedFramePercent;
- *is_smooth = percent_dropped <= GetMaxSmoothDroppedFramesPercent();
+
+ *is_smooth = percent_dropped <=
+ GetMaxSmoothDroppedFramesPercent(!key.key_system.empty());
}
void VideoDecodePerfHistory::OnGotStatsForRequest(
@@ -175,7 +194,7 @@ void VideoDecodePerfHistory::OnGotStatsForRequest(
double percent_dropped = 0;
double percent_power_efficient = 0;
- AssessStats(stats.get(), &is_smooth, &is_power_efficient);
+ AssessStats(video_key, stats.get(), &is_smooth, &is_power_efficient);
if (stats && stats->frames_decoded) {
DCHECK(database_success);
@@ -333,7 +352,7 @@ void VideoDecodePerfHistory::ReportUkmMetrics(
bool past_is_smooth = false;
bool past_is_efficient = false;
- AssessStats(past_stats, &past_is_smooth, &past_is_efficient);
+ AssessStats(video_key, past_stats, &past_is_smooth, &past_is_efficient);
builder.SetPerf_ApiWouldClaimIsSmooth(past_is_smooth);
builder.SetPerf_ApiWouldClaimIsPowerEfficient(past_is_efficient);
if (past_stats) {
@@ -349,7 +368,7 @@ void VideoDecodePerfHistory::ReportUkmMetrics(
bool new_is_smooth = false;
bool new_is_efficient = false;
- AssessStats(&new_stats, &new_is_smooth, &new_is_efficient);
+ AssessStats(video_key, &new_stats, &new_is_smooth, &new_is_efficient);
builder.SetPerf_RecordIsSmooth(new_is_smooth);
builder.SetPerf_RecordIsPowerEfficient(new_is_efficient);
builder.SetPerf_VideoFramesDecoded(new_stats.frames_decoded);
diff --git a/chromium/media/mojo/services/video_decode_perf_history.h b/chromium/media/mojo/services/video_decode_perf_history.h
index 1f43cadb066..7ebf833f8fa 100644
--- a/chromium/media/mojo/services/video_decode_perf_history.h
+++ b/chromium/media/mojo/services/video_decode_perf_history.h
@@ -17,7 +17,7 @@
#include "media/capabilities/video_decode_stats_db.h"
#include "media/capabilities/video_decode_stats_db_provider.h"
#include "media/learning/impl/feature_provider.h"
-#include "media/mojo/interfaces/video_decode_perf_history.mojom.h"
+#include "media/mojo/mojom/video_decode_perf_history.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "mojo/public/cpp/bindings/binding_set.h"
#include "services/metrics/public/cpp/ukm_source_id.h"
@@ -53,6 +53,7 @@ class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
public base::SupportsUserData::Data {
public:
static const char kMaxSmoothDroppedFramesPercentParamName[];
+ static const char kEmeMaxSmoothDroppedFramesPercentParamName[];
explicit VideoDecodePerfHistory(
std::unique_ptr<VideoDecodeStatsDB> db,
@@ -95,7 +96,7 @@ class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
// Decode capabilities will be described as "smooth" whenever the percentage
// of dropped frames is less-than-or-equal-to this value.
- static double GetMaxSmoothDroppedFramesPercent();
+ static double GetMaxSmoothDroppedFramesPercent(bool is_eme);
// Track the status of database lazy initialization.
enum InitStatus {
@@ -162,7 +163,8 @@ class MEDIA_MOJO_EXPORT VideoDecodePerfHistory
const VideoDecodeStatsDB::DecodeStatsEntry& new_stats,
VideoDecodeStatsDB::DecodeStatsEntry* past_stats);
- void AssessStats(const VideoDecodeStatsDB::DecodeStatsEntry* stats,
+ void AssessStats(const VideoDecodeStatsDB::VideoDescKey& key,
+ const VideoDecodeStatsDB::DecodeStatsEntry* stats,
bool* is_smooth,
bool* is_power_efficient);
diff --git a/chromium/media/mojo/services/video_decode_perf_history_unittest.cc b/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
index a3f8d2966bd..04a6f9c5116 100644
--- a/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
+++ b/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
@@ -13,12 +13,12 @@
#include "base/task/post_task.h"
#include "base/test/bind_test_util.h"
#include "base/test/scoped_feature_list.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "components/ukm/test_ukm_recorder.h"
#include "media/base/key_systems.h"
#include "media/base/media_switches.h"
#include "media/capabilities/video_decode_stats_db.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "media/mojo/services/test_helpers.h"
#include "media/mojo/services/video_decode_perf_history.h"
#include "services/metrics/public/cpp/ukm_builders.h"
@@ -168,8 +168,8 @@ class VideoDecodePerfHistoryTest : public testing::Test {
GetFakeDB()->CompleteInitialize(initialize_success);
}
- double GetMaxSmoothDroppedFramesPercent() {
- return VideoDecodePerfHistory::GetMaxSmoothDroppedFramesPercent();
+ double GetMaxSmoothDroppedFramesPercent(bool is_eme = false) {
+ return VideoDecodePerfHistory::GetMaxSmoothDroppedFramesPercent(is_eme);
}
// Tests may set this as the callback for VideoDecodePerfHistory::GetPerfInfo
@@ -291,7 +291,7 @@ class VideoDecodePerfHistoryTest : public testing::Test {
// Verify past stats.
bool past_is_smooth = false;
bool past_is_efficient = false;
- perf_history_->AssessStats(old_stats.get(), &past_is_smooth,
+ perf_history_->AssessStats(key, old_stats.get(), &past_is_smooth,
&past_is_efficient);
EXPECT_UKM(UkmEntry::kPerf_ApiWouldClaimIsSmoothName, past_is_smooth);
EXPECT_UKM(UkmEntry::kPerf_ApiWouldClaimIsPowerEfficientName,
@@ -312,7 +312,8 @@ class VideoDecodePerfHistoryTest : public testing::Test {
new_targets.frames_power_efficient);
bool new_is_smooth = false;
bool new_is_efficient = false;
- perf_history_->AssessStats(&new_stats, &new_is_smooth, &new_is_efficient);
+ perf_history_->AssessStats(key, &new_stats, &new_is_smooth,
+ &new_is_efficient);
EXPECT_UKM(UkmEntry::kPerf_RecordIsSmoothName, new_is_smooth);
EXPECT_UKM(UkmEntry::kPerf_RecordIsPowerEfficientName, new_is_efficient);
EXPECT_UKM(UkmEntry::kPerf_VideoFramesDecodedName,
@@ -340,7 +341,7 @@ class VideoDecodePerfHistoryTest : public testing::Test {
static constexpr double kMinPowerEfficientDecodedFramePercent =
VideoDecodePerfHistory::kMinPowerEfficientDecodedFramePercent;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<ukm::TestAutoSetUkmRecorder> test_recorder_;
@@ -431,7 +432,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_Smooth) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -530,7 +531,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_PowerEfficient) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -558,7 +559,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetPerfInfo_FailedInitialize) {
GetFakeDB()->CompleteInitialize(false);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -617,7 +618,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, AppendAndDestroyStats) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -644,14 +645,14 @@ TEST_P(VideoDecodePerfHistoryParamTest, GetVideoDecodeStatsDB) {
base::BindOnce(&VideoDecodePerfHistoryTest::MockGetVideoDecodeStatsDBCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Complete successful deferred DB initialization (see comment at top of test)
if (params.defer_initialize) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -674,14 +675,14 @@ TEST_P(VideoDecodePerfHistoryParamTest,
base::BindOnce(&VideoDecodePerfHistoryTest::MockGetVideoDecodeStatsDBCB,
base::Unretained(this)));
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Complete failed deferred DB initialization (see comment at top of test)
if (params.defer_initialize) {
GetFakeDB()->CompleteInitialize(false);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -733,7 +734,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, FailedDatabaseGetForAppend) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -785,7 +786,7 @@ TEST_P(VideoDecodePerfHistoryParamTest, FailedDatabaseAppend) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
}
}
@@ -794,11 +795,17 @@ TEST_P(VideoDecodePerfHistoryParamTest, FailedDatabaseAppend) {
// To avoid race conditions when setting the parameter, the test sets it when
// starting and make sure the values recorded to the DB wouldn't be smooth per
// the default value.
-TEST_P(VideoDecodePerfHistoryParamTest, SmoothThresholdFinchOverride) {
+TEST_P(VideoDecodePerfHistoryParamTest,
+ SmoothThresholdFinchOverride_NoEmeOverride) {
base::test::ScopedFeatureList scoped_feature_list;
+ // EME and non EME threshold should initially be the same (neither is
+ // overridden).
double previous_smooth_dropped_frames_threshold =
- GetMaxSmoothDroppedFramesPercent();
+ GetMaxSmoothDroppedFramesPercent(false /* is_eme */);
+ EXPECT_EQ(previous_smooth_dropped_frames_threshold,
+ GetMaxSmoothDroppedFramesPercent(true /* is_eme */));
+
double new_smooth_dropped_frames_threshold =
previous_smooth_dropped_frames_threshold / 2;
@@ -818,8 +825,14 @@ TEST_P(VideoDecodePerfHistoryParamTest, SmoothThresholdFinchOverride) {
media::kMediaCapabilitiesWithParameters, &actual_trial_params));
EXPECT_EQ(trial_params, actual_trial_params);
+ // Non EME threshold is overridden.
+ EXPECT_EQ(new_smooth_dropped_frames_threshold,
+ GetMaxSmoothDroppedFramesPercent(false /* is_eme */));
+
+ // EME threshold is also implicitly overridden (we didn't set an EME specific
+ // value, so it should defer to the non-EME override).
EXPECT_EQ(new_smooth_dropped_frames_threshold,
- GetMaxSmoothDroppedFramesPercent());
+ GetMaxSmoothDroppedFramesPercent(true /* is_eme */));
// NOTE: The when the DB initialization is deferred, All EXPECT_CALLs are then
// delayed until we db_->CompleteInitialize(). testing::InSequence enforces
@@ -888,7 +901,127 @@ TEST_P(VideoDecodePerfHistoryParamTest, SmoothThresholdFinchOverride) {
GetFakeDB()->CompleteInitialize(true);
// Allow initialize-deferred API calls to complete.
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
+ }
+}
+
+TEST_P(VideoDecodePerfHistoryParamTest,
+ SmoothThresholdFinchOverride_WithEmeOverride) {
+ base::test::ScopedFeatureList scoped_feature_list;
+
+ // EME and non EME threshold should initially be the same (neither is
+ // overridden).
+ double previous_smooth_dropped_frames_threshold =
+ GetMaxSmoothDroppedFramesPercent(false /* is_eme */);
+ EXPECT_EQ(previous_smooth_dropped_frames_threshold,
+ GetMaxSmoothDroppedFramesPercent(true /* is_eme */));
+
+ double new_CLEAR_smooth_dropped_frames_threshold =
+ previous_smooth_dropped_frames_threshold / 2;
+ double new_EME_smooth_dropped_frames_threshold =
+ previous_smooth_dropped_frames_threshold / 3;
+
+ ASSERT_LT(new_CLEAR_smooth_dropped_frames_threshold,
+ previous_smooth_dropped_frames_threshold);
+ ASSERT_LT(new_EME_smooth_dropped_frames_threshold,
+ new_CLEAR_smooth_dropped_frames_threshold);
+
+ // Override field trial.
+ base::FieldTrialParams trial_params;
+ trial_params
+ [VideoDecodePerfHistory::kMaxSmoothDroppedFramesPercentParamName] =
+ base::NumberToString(new_CLEAR_smooth_dropped_frames_threshold);
+ trial_params
+ [VideoDecodePerfHistory::kEmeMaxSmoothDroppedFramesPercentParamName] =
+ base::NumberToString(new_EME_smooth_dropped_frames_threshold);
+
+ scoped_feature_list.InitAndEnableFeatureWithParameters(
+ media::kMediaCapabilitiesWithParameters, trial_params);
+
+ base::FieldTrialParams actual_trial_params;
+ EXPECT_TRUE(base::GetFieldTrialParamsByFeature(
+ media::kMediaCapabilitiesWithParameters, &actual_trial_params));
+ EXPECT_EQ(trial_params, actual_trial_params);
+
+ // Both thresholds should be overridden.
+ EXPECT_EQ(new_CLEAR_smooth_dropped_frames_threshold,
+ GetMaxSmoothDroppedFramesPercent(false /* is_eme */));
+ EXPECT_EQ(new_EME_smooth_dropped_frames_threshold,
+ GetMaxSmoothDroppedFramesPercent(true /* is_eme */));
+
+ // NOTE: The when the DB initialization is deferred, All EXPECT_CALLs are then
+ // delayed until we db_->CompleteInitialize(). testing::InSequence enforces
+ // that EXPECT_CALLs arrive in top-to-bottom order.
+ PerfHistoryTestParams params = GetParam();
+ testing::InSequence dummy;
+
+ // Complete initialization in advance of API calls when not asked to defer.
+ if (!params.defer_initialize)
+ PreInitializeDB(/* success */ true);
+
+ // First add 2 records to the history. The second record has a higher frame
+ // rate and a higher number of dropped frames such that it is "not smooth".
+ const VideoCodecProfile kKnownProfile = VP9PROFILE_PROFILE0;
+ const gfx::Size kKownSize(100, 200);
+ const int kSmoothFrameRatePrevious = 30;
+ const int kSmoothFrameRateNew = 90;
+ const int kFramesDecoded = 1000;
+ const int kNotPowerEfficientFramesDecoded = 0;
+
+ // Sets the ratio of dropped frames to qualify as NOT smooth. For CLEAR, use
+ // the previous smooth threshold. For EME, use the new CLEAR threshold to
+ // verify that the EME threshold is lower than CLEAR.
+ const int kSmoothFramesDroppedPrevious =
+ params.key_system.empty()
+ ? kFramesDecoded * previous_smooth_dropped_frames_threshold
+ : kFramesDecoded * new_CLEAR_smooth_dropped_frames_threshold;
+ // Sets the ratio of dropped frames to quality as smooth per the new threshold
+ // depending on whether the key indicates this record is EME.
+ const int kSmoothFramesDroppedNew =
+ params.key_system.empty()
+ ? kFramesDecoded * new_CLEAR_smooth_dropped_frames_threshold
+ : kFramesDecoded * new_EME_smooth_dropped_frames_threshold;
+
+ // Add the entry.
+ SavePerfRecord(
+ UkmVerifcation::kSaveTriggersUkm, kOrigin, kIsTopFrame,
+ MakeFeatures(kKnownProfile, kKownSize, kSmoothFrameRatePrevious,
+ params.key_system, params.use_hw_secure_codecs),
+ MakeTargets(kFramesDecoded, kSmoothFramesDroppedPrevious,
+ kNotPowerEfficientFramesDecoded),
+ kPlayerId);
+
+ SavePerfRecord(UkmVerifcation::kSaveTriggersUkm, kOrigin, kIsTopFrame,
+ MakeFeatures(kKnownProfile, kKownSize, kSmoothFrameRateNew,
+ params.key_system, params.use_hw_secure_codecs),
+ MakeTargets(kFramesDecoded, kSmoothFramesDroppedNew,
+ kNotPowerEfficientFramesDecoded),
+ kPlayerId);
+
+ // Verify perf history returns is_smooth = false for entry that would be
+ // smooth per previous smooth threshold.
+ EXPECT_CALL(*this, MockGetPerfInfoCB(kIsNotSmooth, kIsNotPowerEfficient));
+ perf_history_->GetPerfInfo(
+ MakeFeaturesPtr(kKnownProfile, kKownSize, kSmoothFrameRatePrevious,
+ params.key_system, params.use_hw_secure_codecs),
+ base::BindOnce(&VideoDecodePerfHistoryParamTest::MockGetPerfInfoCB,
+ base::Unretained(this)));
+
+ // Verify perf history returns is_smooth = true for entry that would be
+ // smooth per new smooth threshold.
+ EXPECT_CALL(*this, MockGetPerfInfoCB(kIsSmooth, kIsNotPowerEfficient));
+ perf_history_->GetPerfInfo(
+ MakeFeaturesPtr(kKnownProfile, kKownSize, kSmoothFrameRateNew,
+ params.key_system, params.use_hw_secure_codecs),
+ base::BindOnce(&VideoDecodePerfHistoryParamTest::MockGetPerfInfoCB,
+ base::Unretained(this)));
+
+ // Complete successful deferred DB initialization (see comment at top of test)
+ if (params.defer_initialize) {
+ GetFakeDB()->CompleteInitialize(true);
+
+ // Allow initialize-deferred API calls to complete.
+ task_environment_.RunUntilIdle();
}
}
diff --git a/chromium/media/mojo/services/video_decode_stats_recorder.h b/chromium/media/mojo/services/video_decode_stats_recorder.h
index ebef85acf47..558e957bbec 100644
--- a/chromium/media/mojo/services/video_decode_stats_recorder.h
+++ b/chromium/media/mojo/services/video_decode_stats_recorder.h
@@ -11,8 +11,8 @@
#include "base/time/time.h"
#include "media/base/video_codecs.h"
#include "media/learning/common/value.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
-#include "media/mojo/interfaces/video_decode_stats_recorder.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
+#include "media/mojo/mojom/video_decode_stats_recorder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "media/mojo/services/video_decode_perf_history.h"
#include "services/service_manager/public/cpp/bind_source_info.h"
diff --git a/chromium/media/mojo/services/video_decode_stats_recorder_unittest.cc b/chromium/media/mojo/services/video_decode_stats_recorder_unittest.cc
index ec70cd049d9..7c63a2d65b9 100644
--- a/chromium/media/mojo/services/video_decode_stats_recorder_unittest.cc
+++ b/chromium/media/mojo/services/video_decode_stats_recorder_unittest.cc
@@ -8,7 +8,7 @@
#include "base/memory/ptr_util.h"
#include "media/base/video_codecs.h"
#include "media/learning/common/value.h"
-#include "media/mojo/interfaces/media_types.mojom.h"
+#include "media/mojo/mojom/media_types.mojom.h"
#include "media/mojo/services/test_helpers.h"
#include "media/mojo/services/video_decode_stats_recorder.h"
#include "testing/gmock/include/gmock/gmock.h"
diff --git a/chromium/media/mojo/services/watch_time_recorder.cc b/chromium/media/mojo/services/watch_time_recorder.cc
index 2eaba1efe67..d3e60fce596 100644
--- a/chromium/media/mojo/services/watch_time_recorder.cc
+++ b/chromium/media/mojo/services/watch_time_recorder.cc
@@ -43,6 +43,7 @@ enum class VideoDecoderName : int {
kMojo = 5, // MojoVideoDecoder
kDecrypting = 6, // DecryptingVideoDecoder
kDav1d = 7, // Dav1dVideoDecoder
+ kFuchsia = 8, // FuchsiaVideoDecoder
};
static AudioDecoderName ConvertAudioDecoderNameToEnum(const std::string& name) {
@@ -81,6 +82,8 @@ static VideoDecoderName ConvertVideoDecoderNameToEnum(const std::string& name) {
return VideoDecoderName::kDecrypting;
case 0xcd46efa0:
return VideoDecoderName::kDav1d;
+ case 0x27b31c6a:
+ return VideoDecoderName::kFuchsia;
default:
DLOG_IF(WARNING, !name.empty())
<< "Unknown decoder name encountered; metrics need updating: "
diff --git a/chromium/media/mojo/services/watch_time_recorder.h b/chromium/media/mojo/services/watch_time_recorder.h
index 7d72beaeaee..9c843f23f78 100644
--- a/chromium/media/mojo/services/watch_time_recorder.h
+++ b/chromium/media/mojo/services/watch_time_recorder.h
@@ -14,7 +14,7 @@
#include "media/base/audio_codecs.h"
#include "media/base/pipeline_status.h"
#include "media/base/video_codecs.h"
-#include "media/mojo/interfaces/watch_time_recorder.mojom.h"
+#include "media/mojo/mojom/watch_time_recorder.mojom.h"
#include "media/mojo/services/media_mojo_export.h"
#include "services/metrics/public/cpp/ukm_source_id.h"
diff --git a/chromium/media/mojo/services/watch_time_recorder_unittest.cc b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
index 225daedcef2..91bb09a0f0c 100644
--- a/chromium/media/mojo/services/watch_time_recorder_unittest.cc
+++ b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
@@ -15,7 +15,7 @@
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "base/test/metrics/histogram_tester.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/test/test_message_loop.h"
#include "base/threading/thread_task_runner_handle.h"
#include "components/ukm/test_ukm_recorder.h"
@@ -63,7 +63,9 @@ class WatchTimeRecorderTest : public testing::Test {
base::Unretained(this)),
base::BindRepeating(
[]() { return learning::FeatureValue(0); }) /* origin callback */,
- VideoDecodePerfHistory::SaveCallback(), mojo::MakeRequest(&provider_));
+ VideoDecodePerfHistory::SaveCallback(),
+ MediaMetricsProvider::GetLearningSessionCallback(),
+ mojo::MakeRequest(&provider_));
}
~WatchTimeRecorderTest() override { base::RunLoop().RunUntilIdle(); }
@@ -160,7 +162,7 @@ class WatchTimeRecorderTest : public testing::Test {
MOCK_METHOD0(GetCurrentMediaTime, base::TimeDelta());
protected:
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
mojom::MediaMetricsProviderPtr provider_;
std::unique_ptr<base::HistogramTester> histogram_tester_;
std::unique_ptr<ukm::TestAutoSetUkmRecorder> test_recorder_;
@@ -1245,7 +1247,7 @@ TEST_F(WatchTimeRecorderTest, DISABLED_PrintExpectedDecoderNameHashes) {
"FFmpegAudioDecoder", "FFmpegVideoDecoder", "GpuVideoDecoder",
"MojoVideoDecoder", "MojoAudioDecoder", "VpxVideoDecoder",
"AomVideoDecoder", "DecryptingAudioDecoder", "DecryptingVideoDecoder",
- "Dav1dVideoDecoder"};
+ "Dav1dVideoDecoder", "FuchsiaVideoDecoder"};
printf("%18s = 0\n", "None");
for (const auto& name : kDecoderNames)
printf("%18s = 0x%x\n", name.c_str(), base::PersistentHash(name));
diff --git a/chromium/media/remoting/BUILD.gn b/chromium/media/remoting/BUILD.gn
index 6c626c4a9c4..07d372429a0 100644
--- a/chromium/media/remoting/BUILD.gn
+++ b/chromium/media/remoting/BUILD.gn
@@ -49,7 +49,7 @@ source_set("remoting") {
"//base",
"//media",
"//media/mojo/common",
- "//media/mojo/interfaces:remoting",
+ "//media/mojo/mojom:remoting",
"//mojo/public/cpp/bindings",
"//ui/gfx",
"//url",
@@ -82,7 +82,7 @@ source_set("media_remoting_tests") {
"//base/test:test_support",
"//media:test_support",
"//media/mojo/common",
- "//media/mojo/interfaces:remoting",
+ "//media/mojo/mojom:remoting",
"//testing/gmock",
"//testing/gtest",
"//url",
diff --git a/chromium/media/remoting/courier_renderer.h b/chromium/media/remoting/courier_renderer.h
index dd1e17ecfac..2b9cd94d21e 100644
--- a/chromium/media/remoting/courier_renderer.h
+++ b/chromium/media/remoting/courier_renderer.h
@@ -19,7 +19,7 @@
#include "base/timer/timer.h"
#include "media/base/pipeline_status.h"
#include "media/base/renderer.h"
-#include "media/mojo/interfaces/remoting.mojom.h"
+#include "media/mojo/mojom/remoting.mojom.h"
#include "media/remoting/metrics.h"
#include "media/remoting/rpc_broker.h"
#include "mojo/public/cpp/system/data_pipe.h"
diff --git a/chromium/media/remoting/courier_renderer_unittest.cc b/chromium/media/remoting/courier_renderer_unittest.cc
index 9887c904973..f47caf952da 100644
--- a/chromium/media/remoting/courier_renderer_unittest.cc
+++ b/chromium/media/remoting/courier_renderer_unittest.cc
@@ -8,8 +8,8 @@
#include "base/bind.h"
#include "base/run_loop.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/media_util.h"
#include "media/base/pipeline_status.h"
@@ -389,7 +389,7 @@ class CourierRendererTest : public testing::Test {
RunPendingTasks();
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
std::unique_ptr<RendererController> controller_;
std::unique_ptr<RendererClientImpl> render_client_;
std::unique_ptr<FakeMediaResource> media_resource_;
diff --git a/chromium/media/remoting/demuxer_stream_adapter.h b/chromium/media/remoting/demuxer_stream_adapter.h
index c7bf0e9ec13..9ff686eff58 100644
--- a/chromium/media/remoting/demuxer_stream_adapter.h
+++ b/chromium/media/remoting/demuxer_stream_adapter.h
@@ -18,7 +18,7 @@
#include "media/base/demuxer_stream.h"
#include "media/base/video_decoder_config.h"
#include "media/mojo/common/mojo_data_pipe_read_write.h"
-#include "media/mojo/interfaces/remoting.mojom.h"
+#include "media/mojo/mojom/remoting.mojom.h"
#include "media/remoting/rpc_broker.h"
#include "media/remoting/triggers.h"
#include "mojo/public/cpp/system/data_pipe.h"
diff --git a/chromium/media/remoting/end2end_test_renderer.cc b/chromium/media/remoting/end2end_test_renderer.cc
index 8eed302e87c..db4f1c14d6f 100644
--- a/chromium/media/remoting/end2end_test_renderer.cc
+++ b/chromium/media/remoting/end2end_test_renderer.cc
@@ -11,7 +11,7 @@
#include "base/callback.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/mojo/common/mojo_data_pipe_read_write.h"
-#include "media/mojo/interfaces/remoting.mojom.h"
+#include "media/mojo/mojom/remoting.mojom.h"
#include "media/remoting/courier_renderer.h"
#include "media/remoting/proto_utils.h"
#include "media/remoting/receiver.h"
diff --git a/chromium/media/remoting/fake_remoter.h b/chromium/media/remoting/fake_remoter.h
index 14ab5273cc0..31452fe1e16 100644
--- a/chromium/media/remoting/fake_remoter.h
+++ b/chromium/media/remoting/fake_remoter.h
@@ -7,7 +7,7 @@
#include "media/base/decoder_buffer.h"
#include "media/mojo/common/mojo_data_pipe_read_write.h"
-#include "media/mojo/interfaces/remoting.mojom.h"
+#include "media/mojo/mojom/remoting.mojom.h"
#include "mojo/public/cpp/bindings/binding.h"
namespace media {
diff --git a/chromium/media/remoting/media_remoting_rpc.proto b/chromium/media/remoting/media_remoting_rpc.proto
index 2abb9b3aa41..1a3b1837d25 100644
--- a/chromium/media/remoting/media_remoting_rpc.proto
+++ b/chromium/media/remoting/media_remoting_rpc.proto
@@ -211,7 +211,6 @@ message VideoDecoderConfig {
PIXEL_FORMAT_RGB24 = 12;
PIXEL_FORMAT_RGB32 = 13;
PIXEL_FORMAT_MJPEG = 14;
- PIXEL_FORMAT_MT21 = 15;
PIXEL_FORMAT_YUV420P9 = 16;
PIXEL_FORMAT_YUV420P10 = 17;
PIXEL_FORMAT_YUV422P9 = 18;
diff --git a/chromium/media/remoting/proto_enum_utils.cc b/chromium/media/remoting/proto_enum_utils.cc
index 6620bee2d27..f9851231bc7 100644
--- a/chromium/media/remoting/proto_enum_utils.cc
+++ b/chromium/media/remoting/proto_enum_utils.cc
@@ -352,7 +352,6 @@ base::Optional<VideoPixelFormat> ToMediaVideoPixelFormat(
CASE_RETURN_OTHER(PIXEL_FORMAT_XRGB);
CASE_RETURN_OTHER(PIXEL_FORMAT_RGB24);
CASE_RETURN_OTHER(PIXEL_FORMAT_MJPEG);
- CASE_RETURN_OTHER(PIXEL_FORMAT_MT21);
CASE_RETURN_OTHER(PIXEL_FORMAT_YUV420P9);
CASE_RETURN_OTHER(PIXEL_FORMAT_YUV420P10);
CASE_RETURN_OTHER(PIXEL_FORMAT_YUV422P9);
diff --git a/chromium/media/remoting/renderer_controller.h b/chromium/media/remoting/renderer_controller.h
index cb39abe4d43..c8ada1bc09c 100644
--- a/chromium/media/remoting/renderer_controller.h
+++ b/chromium/media/remoting/renderer_controller.h
@@ -16,8 +16,8 @@
#include "build/buildflag.h"
#include "media/base/media_observer.h"
#include "media/media_buildflags.h"
-#include "media/mojo/interfaces/remoting.mojom.h"
-#include "media/mojo/interfaces/remoting_common.mojom.h"
+#include "media/mojo/mojom/remoting.mojom.h"
+#include "media/mojo/mojom/remoting_common.mojom.h"
#include "media/remoting/metrics.h"
#include "mojo/public/cpp/bindings/binding.h"
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index 51f596e9fd5..a469a2aef7d 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -558,6 +558,13 @@ void AudioRendererImpl::OnDeviceInfoReceived(
audio_parameters_.set_latency_tag(AudioLatency::LATENCY_PLAYBACK);
+ if (!client_->IsVideoStreamAvailable()) {
+ // When video is not available, audio prefetch can be enabled. See
+ // crbug/988535.
+ audio_parameters_.set_effects(audio_parameters_.effects() |
+ ::media::AudioParameters::AUDIO_PREFETCH);
+ }
+
last_decoded_channel_layout_ =
stream->audio_decoder_config().channel_layout();
diff --git a/chromium/media/renderers/audio_renderer_impl_unittest.cc b/chromium/media/renderers/audio_renderer_impl_unittest.cc
index 2849580ff58..227601daa20 100644
--- a/chromium/media/renderers/audio_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/audio_renderer_impl_unittest.cc
@@ -16,8 +16,8 @@
#include "base/single_thread_task_runner.h"
#include "base/strings/stringprintf.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/audio_buffer_converter.h"
#include "media/base/fake_audio_renderer_sink.h"
@@ -507,7 +507,7 @@ class AudioRendererImplTest : public ::testing::Test, public RendererClient {
// Fixture members.
AudioParameters hardware_params_;
- base::test::ScopedTaskEnvironment task_environment_;
+ base::test::TaskEnvironment task_environment_;
const scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner_;
NullMediaLog media_log_;
std::unique_ptr<AudioRendererImpl> renderer_;
diff --git a/chromium/media/renderers/decrypting_renderer_unittest.cc b/chromium/media/renderers/decrypting_renderer_unittest.cc
index 728371c24a7..620eefa17fe 100644
--- a/chromium/media/renderers/decrypting_renderer_unittest.cc
+++ b/chromium/media/renderers/decrypting_renderer_unittest.cc
@@ -9,7 +9,7 @@
#include "base/single_thread_task_runner.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_util.h"
#include "media/base/mock_filters.h"
@@ -38,7 +38,7 @@ class DecryptingRendererTest : public testing::Test {
renderer_ = renderer.get();
decrypting_renderer_ = std::make_unique<DecryptingRenderer>(
std::move(renderer), &null_media_log_,
- scoped_task_environment_.GetMainThreadTaskRunner());
+ task_environment_.GetMainThreadTaskRunner());
EXPECT_CALL(cdm_context_, GetDecryptor())
.WillRepeatedly(Return(&decryptor_));
@@ -85,7 +85,7 @@ class DecryptingRendererTest : public testing::Test {
}
bool use_aes_decryptor_ = false;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
base::MockCallback<CdmAttachedCB> set_cdm_cb_;
base::MockCallback<PipelineStatusCB> renderer_init_cb_;
NullMediaLog null_media_log_;
@@ -108,7 +108,7 @@ TEST_F(DecryptingRendererTest, ClearStreams_NoCdm) {
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -126,7 +126,7 @@ TEST_F(DecryptingRendererTest, ClearStreams_AesDecryptor) {
decrypting_renderer_->SetCdm(&cdm_context_, set_cdm_cb_.Get());
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_TRUE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -144,7 +144,7 @@ TEST_F(DecryptingRendererTest, ClearStreams_OtherCdm) {
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
decrypting_renderer_->SetCdm(&cdm_context_, set_cdm_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -155,7 +155,7 @@ TEST_F(DecryptingRendererTest, EncryptedStreams_NoCdm) {
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -173,7 +173,7 @@ TEST_F(DecryptingRendererTest, EncryptedStreams_AesDecryptor) {
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
decrypting_renderer_->SetCdm(&cdm_context_, set_cdm_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_TRUE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -191,7 +191,7 @@ TEST_F(DecryptingRendererTest, EncryptedStreams_OtherCdm) {
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
decrypting_renderer_->SetCdm(&cdm_context_, set_cdm_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -209,7 +209,7 @@ TEST_F(DecryptingRendererTest, EncryptedStreams_AesDecryptor_CdmSetBeforeInit) {
decrypting_renderer_->SetCdm(&cdm_context_, set_cdm_cb_.Get());
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_TRUE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -227,7 +227,7 @@ TEST_F(DecryptingRendererTest, EncryptedStreams_OtherCdm_CdmSetBeforeInit) {
decrypting_renderer_->SetCdm(&cdm_context_, set_cdm_cb_.Get());
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -245,7 +245,7 @@ TEST_F(DecryptingRendererTest, EncryptedAndClearStream_OtherCdm) {
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
decrypting_renderer_->SetCdm(&cdm_context_, set_cdm_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
EXPECT_FALSE(decrypting_renderer_->HasDecryptingMediaResourceForTesting());
}
@@ -259,7 +259,7 @@ TEST_F(DecryptingRendererTest, DecryptingMediaResourceInitFails) {
decrypting_renderer_->Initialize(&media_resource_, &renderer_client_,
renderer_init_cb_.Get());
- scoped_task_environment_.RunUntilIdle();
+ task_environment_.RunUntilIdle();
// Cause a PIPELINE_ERROR_INITIALIZATION_FAILED error to be passed as a
// parameter to the initialization callback.
diff --git a/chromium/media/renderers/default_decoder_factory.cc b/chromium/media/renderers/default_decoder_factory.cc
index 755b06c9c0c..2a605c1204b 100644
--- a/chromium/media/renderers/default_decoder_factory.cc
+++ b/chromium/media/renderers/default_decoder_factory.cc
@@ -10,6 +10,7 @@
#include "base/single_thread_task_runner.h"
#include "build/build_config.h"
#include "build/buildflag.h"
+#include "components/viz/common/gpu/context_provider.h"
#include "media/base/decoder_factory.h"
#include "media/base/media_switches.h"
#include "media/media_buildflags.h"
@@ -104,7 +105,11 @@ void DefaultDecoderFactory::CreateVideoDecoders(
}
#if defined(OS_FUCHSIA)
- video_decoders->push_back(CreateFuchsiaVideoDecoder());
+ if (gpu_factories) {
+ video_decoders->push_back(CreateFuchsiaVideoDecoder(
+ gpu_factories->SharedImageInterface(),
+ gpu_factories->GetMediaContextProvider()->ContextSupport()));
+ }
#endif
#if BUILDFLAG(ENABLE_LIBVPX)
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index ca675304902..3655fa18557 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -14,7 +14,6 @@
#include "cc/paint/paint_flags.h"
#include "cc/paint/paint_image_builder.h"
#include "components/viz/common/gpu/context_provider.h"
-#include "components/viz/common/resources/resource_format.h"
#include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/client/context_support.h"
#include "gpu/command_buffer/client/gles2_interface.h"
@@ -91,7 +90,7 @@ sk_sp<SkImage> YUVGrBackendTexturesToSkImage(
gfx::ColorSpace video_color_space,
VideoPixelFormat video_format,
GrBackendTexture* yuv_textures,
- const GrBackendTexture& result_texture) {
+ GrBackendTexture* result_texture = nullptr) {
// TODO(hubbe): This should really default to rec709.
// https://crbug.com/828599
SkYUVColorSpace color_space = kRec601_SkYUVColorSpace;
@@ -99,62 +98,34 @@ sk_sp<SkImage> YUVGrBackendTexturesToSkImage(
switch (video_format) {
case PIXEL_FORMAT_NV12:
- return SkImage::MakeFromNV12TexturesCopyWithExternalBackend(
- gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin,
- result_texture);
+ if (result_texture) {
+ return SkImage::MakeFromNV12TexturesCopyWithExternalBackend(
+ gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin,
+ result_texture[0]);
+ } else {
+ return SkImage::MakeFromNV12TexturesCopy(
+ gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin);
+ }
case PIXEL_FORMAT_I420:
- return SkImage::MakeFromYUVTexturesCopyWithExternalBackend(
- gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin,
- result_texture);
+ if (result_texture) {
+ return SkImage::MakeFromYUVTexturesCopyWithExternalBackend(
+ gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin,
+ result_texture[0]);
+ } else {
+ return SkImage::MakeFromYUVTexturesCopy(
+ gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin);
+ }
default:
NOTREACHED();
return nullptr;
}
}
-// Helper class that begins/ends access to a mailbox within a scope. The mailbox
-// must have been imported into |texture|.
-class ScopedSharedImageAccess {
- public:
- ScopedSharedImageAccess(
- gpu::gles2::GLES2Interface* gl,
- GLuint texture,
- const gpu::Mailbox& mailbox,
- GLenum access = GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM)
- : gl(gl), texture(texture), is_shared_image(mailbox.IsSharedImage()) {
- if (is_shared_image)
- gl->BeginSharedImageAccessDirectCHROMIUM(texture, access);
- }
-
- ~ScopedSharedImageAccess() {
- if (is_shared_image)
- gl->EndSharedImageAccessDirectCHROMIUM(texture);
- }
-
- private:
- gpu::gles2::GLES2Interface* gl;
- GLuint texture;
- bool is_shared_image;
-};
-
-// Waits for a sync token and import the mailbox as texture.
-GLuint SynchronizeAndImportMailbox(gpu::gles2::GLES2Interface* gl,
- const gpu::SyncToken& sync_token,
- const gpu::Mailbox& mailbox) {
- gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData());
- if (mailbox.IsSharedImage()) {
- return gl->CreateAndTexStorage2DSharedImageCHROMIUM(mailbox.name);
- } else {
- return gl->CreateAndConsumeTextureCHROMIUM(mailbox.name);
- }
-}
-
static constexpr size_t kNumYUVPlanes = 3;
struct YUVPlaneTextureInfo {
GrGLTextureInfo texture = {0, 0};
GLint minFilter = 0;
GLint magFilter = 0;
- bool is_shared_image = false;
};
using YUVTexturesInfo = std::array<YUVPlaneTextureInfo, kNumYUVPlanes>;
@@ -176,15 +147,9 @@ YUVTexturesInfo GetYUVTexturesInfo(const VideoFrame* video_frame,
mailbox_holder.texture_target == GL_TEXTURE_RECTANGLE_ARB)
<< "Unsupported texture target " << std::hex << std::showbase
<< mailbox_holder.texture_target;
- yuv_textures_info[i].texture.fID = SynchronizeAndImportMailbox(
- gl, mailbox_holder.sync_token, mailbox_holder.mailbox);
- if (mailbox_holder.mailbox.IsSharedImage()) {
- yuv_textures_info[i].is_shared_image = true;
- gl->BeginSharedImageAccessDirectCHROMIUM(
- yuv_textures_info[i].texture.fID,
- GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
- }
-
+ gl->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData());
+ yuv_textures_info[i].texture.fID =
+ gl->CreateAndConsumeTextureCHROMIUM(mailbox_holder.mailbox.name);
yuv_textures_info[i].texture.fTarget = mailbox_holder.texture_target;
yuv_textures_info[i].texture.fFormat = skia_texture_format;
@@ -212,12 +177,47 @@ void DeleteYUVTextures(const VideoFrame* video_frame,
GL_TEXTURE_MIN_FILTER, yuv_textures_info[i].minFilter);
gl->TexParameteri(yuv_textures_info[i].texture.fTarget,
GL_TEXTURE_MAG_FILTER, yuv_textures_info[i].magFilter);
- if (yuv_textures_info[i].is_shared_image)
- gl->EndSharedImageAccessDirectCHROMIUM(yuv_textures_info[i].texture.fID);
gl->DeleteTextures(1, &yuv_textures_info[i].texture.fID);
}
}
+sk_sp<SkImage> NewSkImageFromVideoFrameYUVTextures(
+ const VideoFrame* video_frame,
+ viz::ContextProvider* context_provider) {
+ DCHECK(video_frame->HasTextures());
+ GrContext* gr_context = context_provider->GrContext();
+ DCHECK(gr_context);
+ // TODO: We should compare the DCHECK vs when UpdateLastImage calls this
+ // function. (crbug.com/674185)
+ DCHECK(video_frame->format() == PIXEL_FORMAT_I420 ||
+ video_frame->format() == PIXEL_FORMAT_NV12);
+
+ gfx::Size ya_tex_size = video_frame->coded_size();
+ gfx::Size uv_tex_size((ya_tex_size.width() + 1) / 2,
+ (ya_tex_size.height() + 1) / 2);
+
+ YUVTexturesInfo yuv_textures_info =
+ GetYUVTexturesInfo(video_frame, context_provider);
+
+ GrBackendTexture yuv_textures[3] = {
+ GrBackendTexture(ya_tex_size.width(), ya_tex_size.height(),
+ GrMipMapped::kNo, yuv_textures_info[0].texture),
+ GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
+ GrMipMapped::kNo, yuv_textures_info[1].texture),
+ GrBackendTexture(uv_tex_size.width(), uv_tex_size.height(),
+ GrMipMapped::kNo, yuv_textures_info[2].texture),
+ };
+
+ sk_sp<SkImage> img =
+ YUVGrBackendTexturesToSkImage(gr_context, video_frame->ColorSpace(),
+ video_frame->format(), yuv_textures);
+ gr_context->flush();
+
+ DeleteYUVTextures(video_frame, context_provider, yuv_textures_info);
+
+ return img;
+}
+
sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
const VideoFrame* video_frame,
viz::ContextProvider* context_provider,
@@ -251,9 +251,11 @@ sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
backend_texture.fID = texture_id;
backend_texture.fTarget = texture_target;
backend_texture.fFormat = GL_RGBA8;
- GrBackendTexture result_texture(video_frame->coded_size().width(),
- video_frame->coded_size().height(),
- GrMipMapped::kNo, backend_texture);
+ GrBackendTexture result_texture[1] = {
+ GrBackendTexture(video_frame->coded_size().width(),
+ video_frame->coded_size().height(), GrMipMapped::kNo,
+ backend_texture),
+ };
sk_sp<SkImage> img = YUVGrBackendTexturesToSkImage(
gr_context, video_frame->ColorSpace(), video_frame->format(),
@@ -265,15 +267,12 @@ sk_sp<SkImage> NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
return img;
}
-// Imports a VideoFrame that contains a single mailbox into a newly created GL
-// texture, after synchronization with the sync token. Returns the GL texture.
-// |mailbox| is set to the imported mailbox.
-GLuint ImportVideoFrameSingleMailbox(gpu::gles2::GLES2Interface* gl,
- VideoFrame* video_frame,
- gpu::Mailbox* mailbox) {
- DCHECK(video_frame->HasTextures());
- DCHECK_EQ(video_frame->NumTextures(), 1u);
-
+// Creates a SkImage from a |video_frame| backed by native resources.
+// The SkImage will take ownership of the underlying resource.
+sk_sp<SkImage> NewSkImageFromVideoFrameNative(
+ VideoFrame* video_frame,
+ viz::ContextProvider* context_provider,
+ bool wrap_texture) {
DCHECK(PIXEL_FORMAT_ARGB == video_frame->format() ||
PIXEL_FORMAT_XRGB == video_frame->format() ||
PIXEL_FORMAT_RGB24 == video_frame->format() ||
@@ -286,31 +285,43 @@ GLuint ImportVideoFrameSingleMailbox(gpu::gles2::GLES2Interface* gl,
DCHECK(mailbox_holder.texture_target == GL_TEXTURE_2D ||
mailbox_holder.texture_target == GL_TEXTURE_RECTANGLE_ARB ||
mailbox_holder.texture_target == GL_TEXTURE_EXTERNAL_OES)
+ << "Unsupported texture target " << std::hex << std::showbase
<< mailbox_holder.texture_target;
- *mailbox = mailbox_holder.mailbox;
- return SynchronizeAndImportMailbox(gl, mailbox_holder.sync_token, *mailbox);
-}
-
-// Wraps a GL RGBA texture into a SkImage.
-sk_sp<SkImage> WrapGLTexture(GLenum target,
- GLuint texture_id,
- const gfx::Size& size,
- const gfx::ColorSpace& color_space,
- viz::ContextProvider* context_provider) {
- GrGLTextureInfo texture_info;
- texture_info.fID = texture_id;
- texture_info.fTarget = target;
- // TODO(bsalomon): GrGLTextureInfo::fFormat and SkColorType passed to
- // SkImage factory should reflect video_frame->format(). Update once
- // Skia supports GL_RGB. skbug.com/7533
- texture_info.fFormat = GL_RGBA8_OES;
- GrBackendTexture backend_texture(size.width(), size.height(),
- GrMipMapped::kNo, texture_info);
- return SkImage::MakeFromTexture(
- context_provider->GrContext(), backend_texture, kTopLeft_GrSurfaceOrigin,
- kRGBA_8888_SkColorType, kPremul_SkAlphaType, color_space.ToSkColorSpace(),
- nullptr, nullptr);
+ gpu::gles2::GLES2Interface* gl = context_provider->ContextGL();
+ gl->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData());
+ GLuint frame_texture =
+ gl->CreateAndConsumeTextureCHROMIUM(mailbox_holder.mailbox.name);
+ unsigned source_texture = 0;
+ gfx::ColorSpace color_space_for_skia;
+ if (wrap_texture) {
+ // Fast path where we can avoid a copy, by having last_image_ directly wrap
+ // the VideoFrame texture.
+ source_texture = frame_texture;
+ color_space_for_skia = video_frame->ColorSpace();
+ } else {
+ gl->GenTextures(1, &source_texture);
+ DCHECK(source_texture);
+ gl->BindTexture(GL_TEXTURE_2D, source_texture);
+ gl->CopyTextureCHROMIUM(frame_texture, 0, GL_TEXTURE_2D, source_texture, 0,
+ GL_RGBA, GL_UNSIGNED_BYTE, false, false, false);
+ gl->DeleteTextures(1, &frame_texture);
+ }
+ GrGLTextureInfo source_texture_info;
+ source_texture_info.fID = source_texture;
+ source_texture_info.fTarget = GL_TEXTURE_2D;
+ // TODO(bsalomon): GrGLTextureInfo::fFormat and SkColorType passed to SkImage
+ // factory should reflect video_frame->format(). Update once Skia supports
+ // GL_RGB.
+ // skbug.com/7533
+ source_texture_info.fFormat = GL_RGBA8_OES;
+ GrBackendTexture source_backend_texture(
+ video_frame->coded_size().width(), video_frame->coded_size().height(),
+ GrMipMapped::kNo, source_texture_info);
+ return SkImage::MakeFromAdoptedTexture(
+ context_provider->GrContext(), source_backend_texture,
+ kTopLeft_GrSurfaceOrigin, kRGBA_8888_SkColorType, kPremul_SkAlphaType,
+ color_space_for_skia.ToSkColorSpace());
}
void VideoFrameCopyTextureOrSubTexture(gpu::gles2::GLES2Interface* gl,
@@ -558,7 +569,8 @@ void PaintCanvasVideoRenderer::Paint(scoped_refptr<VideoFrame> video_frame,
}
SkRect dest;
- dest.set(dest_rect.x(), dest_rect.y(), dest_rect.right(), dest_rect.bottom());
+ dest.setLTRB(dest_rect.x(), dest_rect.y(), dest_rect.right(),
+ dest_rect.bottom());
// Paint black rectangle if there isn't a frame available or the
// frame has an unexpected format.
@@ -582,14 +594,6 @@ void PaintCanvasVideoRenderer::Paint(scoped_refptr<VideoFrame> video_frame,
cc::PaintImage image = cache_->paint_image;
DCHECK(image);
- base::Optional<ScopedSharedImageAccess> source_access;
- if (video_frame->HasTextures()) {
- DCHECK(!cache_->source_mailbox.IsZero());
- DCHECK(cache_->source_texture);
- source_access.emplace(context_provider->ContextGL(), cache_->source_texture,
- cache_->source_mailbox);
- }
-
cc::PaintFlags video_flags;
video_flags.setAlpha(flags.getAlpha());
video_flags.setBlendMode(flags.getBlendMode());
@@ -656,7 +660,6 @@ void PaintCanvasVideoRenderer::Paint(scoped_refptr<VideoFrame> video_frame,
canvas->flush();
if (video_frame->HasTextures()) {
- source_access.reset();
// Synchronize |video_frame| with the read operations in UpdateLastImage(),
// which are triggered by canvas->flush().
SynchronizeVideoFrameRead(std::move(video_frame),
@@ -1029,7 +1032,6 @@ void PaintCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
case PIXEL_FORMAT_XRGB:
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_ABGR:
case PIXEL_FORMAT_XBGR:
case PIXEL_FORMAT_P016LE:
@@ -1054,16 +1056,19 @@ void PaintCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture(
DCHECK(video_frame);
DCHECK(video_frame->HasTextures());
- gpu::Mailbox mailbox;
+ const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0);
+ DCHECK(mailbox_holder.texture_target == GL_TEXTURE_2D ||
+ mailbox_holder.texture_target == GL_TEXTURE_RECTANGLE_ARB ||
+ mailbox_holder.texture_target == GL_TEXTURE_EXTERNAL_OES)
+ << mailbox_holder.texture_target;
+
+ gl->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData());
uint32_t source_texture =
- ImportVideoFrameSingleMailbox(gl, video_frame, &mailbox);
- {
- ScopedSharedImageAccess access(gl, source_texture, mailbox);
- VideoFrameCopyTextureOrSubTexture(
- gl, video_frame->coded_size(), video_frame->visible_rect(),
- source_texture, target, texture, internal_format, format, type, level,
- premultiply_alpha, flip_y);
- }
+ gl->CreateAndConsumeTextureCHROMIUM(mailbox_holder.mailbox.name);
+ VideoFrameCopyTextureOrSubTexture(gl, video_frame->coded_size(),
+ video_frame->visible_rect(), source_texture,
+ target, texture, internal_format, format,
+ type, level, premultiply_alpha, flip_y);
gl->DeleteTextures(1, &source_texture);
gl->ShallowFlushCHROMIUM();
// The caller must call SynchronizeVideoFrameRead() after this operation, but
@@ -1099,24 +1104,36 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
}
DCHECK(cache_);
- DCHECK(!cache_->source_mailbox.IsZero());
+ DCHECK(cache_->source_image);
+ GrBackendTexture backend_texture =
+ cache_->source_image->getBackendTexture(true);
+ if (!backend_texture.isValid())
+ return false;
+ GrGLTextureInfo texture_info;
+ if (!backend_texture.getGLTextureInfo(&texture_info))
+ return false;
+
gpu::gles2::GLES2Interface* canvas_gl = context_provider->ContextGL();
+ gpu::MailboxHolder mailbox_holder;
+ mailbox_holder.texture_target = texture_info.fTarget;
+ canvas_gl->ProduceTextureDirectCHROMIUM(texture_info.fID,
+ mailbox_holder.mailbox.name);
- gpu::SyncToken sync_token;
// Wait for mailbox creation on canvas context before consuming it and
// copying from it on the consumer context.
- canvas_gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
-
- uint32_t intermediate_texture = SynchronizeAndImportMailbox(
- destination_gl, sync_token, cache_->source_mailbox);
- {
- ScopedSharedImageAccess access(destination_gl, intermediate_texture,
- cache_->source_mailbox);
- VideoFrameCopyTextureOrSubTexture(
- destination_gl, cache_->coded_size, cache_->visible_rect,
- intermediate_texture, target, texture, internal_format, format, type,
- level, premultiply_alpha, flip_y);
- }
+ canvas_gl->GenUnverifiedSyncTokenCHROMIUM(
+ mailbox_holder.sync_token.GetData());
+
+ destination_gl->WaitSyncTokenCHROMIUM(
+ mailbox_holder.sync_token.GetConstData());
+ uint32_t intermediate_texture =
+ destination_gl->CreateAndConsumeTextureCHROMIUM(
+ mailbox_holder.mailbox.name);
+
+ VideoFrameCopyTextureOrSubTexture(
+ destination_gl, cache_->coded_size, cache_->visible_rect,
+ intermediate_texture, target, texture, internal_format, format, type,
+ level, premultiply_alpha, flip_y);
destination_gl->DeleteTextures(1, &intermediate_texture);
@@ -1283,7 +1300,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
// Upload the CPU-side SkImage into a GPU-side SkImage.
// (Note the original video_frame data is no longer used after this point.)
- yuv_images[plane] = plane_image_cpu->makeTextureImage(gr_context, nullptr);
+ yuv_images[plane] = plane_image_cpu->makeTextureImage(gr_context);
DCHECK(yuv_images[plane]);
// Extract the backend texture from the GPU-side image.
@@ -1305,8 +1322,10 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
yuv_cache_.mailbox = sii->CreateSharedImage(
viz::ResourceFormat::RGBA_8888, video_frame.coded_size(),
gfx::ColorSpace(), gpu::SHARED_IMAGE_USAGE_GLES2);
- yuv_cache_.texture = SynchronizeAndImportMailbox(
- source_gl, sii->GenUnverifiedSyncToken(), yuv_cache_.mailbox);
+ auto creation_sync_token = sii->GenUnverifiedSyncToken();
+ source_gl->WaitSyncTokenCHROMIUM(creation_sync_token.GetConstData());
+ yuv_cache_.texture = source_gl->CreateAndTexStorage2DSharedImageCHROMIUM(
+ yuv_cache_.mailbox.name);
}
// On the source GL context, do the YUV->RGB conversion using Skia.
@@ -1325,7 +1344,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
sk_sp<SkImage> yuv_image = YUVGrBackendTexturesToSkImage(
gr_context, video_frame.ColorSpace(), video_frame.format(),
- yuv_textures, result_texture);
+ yuv_textures, &result_texture);
gr_context->flush();
source_gl->EndSharedImageAccessDirectCHROMIUM(yuv_cache_.texture);
@@ -1343,19 +1362,25 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameYUVDataToGLTexture(
// On the destination GL context, do a copy (with cropping) into the
// destination texture.
- GLuint intermediate_texture = SynchronizeAndImportMailbox(
- destination_gl, post_conversion_sync_token, yuv_cache_.mailbox);
{
- ScopedSharedImageAccess access(destination_gl, intermediate_texture,
- yuv_cache_.mailbox);
+ destination_gl->WaitSyncTokenCHROMIUM(
+ post_conversion_sync_token.GetConstData());
+ GLuint intermediate_texture =
+ destination_gl->CreateAndTexStorage2DSharedImageCHROMIUM(
+ yuv_cache_.mailbox.name);
+ destination_gl->BeginSharedImageAccessDirectCHROMIUM(
+ intermediate_texture, GL_SHARED_IMAGE_ACCESS_MODE_READ_CHROMIUM);
+
VideoFrameCopyTextureOrSubTexture(
destination_gl, video_frame.coded_size(), video_frame.visible_rect(),
intermediate_texture, target, texture, internal_format, format, type,
level, premultiply_alpha, flip_y);
+
+ destination_gl->EndSharedImageAccessDirectCHROMIUM(intermediate_texture);
+ destination_gl->DeleteTextures(1, &intermediate_texture);
+ destination_gl->GenUnverifiedSyncTokenCHROMIUM(
+ yuv_cache_.sync_token.GetData());
}
- destination_gl->DeleteTextures(1, &intermediate_texture);
- destination_gl->GenUnverifiedSyncTokenCHROMIUM(
- yuv_cache_.sync_token.GetData());
// video_frame->UpdateReleaseSyncToken is not necessary since the video frame
// data we used was CPU-side (IsMappable) to begin with. If there were any
@@ -1448,21 +1473,7 @@ void PaintCanvasVideoRenderer::ResetCache() {
PaintCanvasVideoRenderer::Cache::Cache(int frame_id) : frame_id(frame_id) {}
-PaintCanvasVideoRenderer::Cache::~Cache() {
- if (!context_provider)
- return;
-
- DCHECK(!source_mailbox.IsZero());
- DCHECK(source_texture);
- auto* gl = context_provider->ContextGL();
- gl->DeleteTextures(1, &source_texture);
- if (!wraps_video_frame_texture) {
- gpu::SyncToken sync_token;
- gl->GenUnverifiedSyncTokenCHROMIUM(sync_token.GetData());
- auto* sii = context_provider->SharedImageInterface();
- sii->DestroySharedImage(sync_token, source_mailbox);
- }
-}
+PaintCanvasVideoRenderer::Cache::~Cache() = default;
bool PaintCanvasVideoRenderer::UpdateLastImage(
scoped_refptr<VideoFrame> video_frame,
@@ -1470,13 +1481,14 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
bool allow_wrap_texture) {
DCHECK(!cache_ || !cache_->wraps_video_frame_texture);
if (!cache_ || video_frame->unique_id() != cache_->frame_id ||
- cache_->source_mailbox.IsZero()) {
+ !cache_->source_image) {
+ cache_.emplace(video_frame->unique_id());
+
auto paint_image_builder =
cc::PaintImageBuilder::WithDefault()
.set_id(renderer_stable_id_)
.set_animation_type(cc::PaintImage::AnimationType::VIDEO)
.set_completion_state(cc::PaintImage::CompletionState::DONE);
-
// Generate a new image.
// Note: Skia will hold onto |video_frame| via |video_generator| only when
// |video_frame| is software.
@@ -1485,73 +1497,27 @@ bool PaintCanvasVideoRenderer::UpdateLastImage(
if (video_frame->HasTextures()) {
DCHECK(context_provider);
DCHECK(context_provider->GrContext());
- auto* gl = context_provider->ContextGL();
- DCHECK(gl);
-
- sk_sp<SkImage> source_image;
-
- if (allow_wrap_texture && video_frame->NumTextures() == 1) {
- cache_.emplace(video_frame->unique_id());
- cache_->source_texture = ImportVideoFrameSingleMailbox(
- gl, video_frame.get(), &cache_->source_mailbox);
- cache_->wraps_video_frame_texture = true;
- source_image =
- WrapGLTexture(video_frame->mailbox_holder(0).texture_target,
- cache_->source_texture, video_frame->coded_size(),
- video_frame->ColorSpace(), context_provider);
+ DCHECK(context_provider->ContextGL());
+ if (video_frame->NumTextures() > 1) {
+ cache_->source_image = NewSkImageFromVideoFrameYUVTextures(
+ video_frame.get(), context_provider);
} else {
- if (cache_ && cache_->context_provider == context_provider &&
- cache_->coded_size == video_frame->coded_size()) {
- // We can reuse the shared image from the previous cache.
- cache_->frame_id = video_frame->unique_id();
- } else {
- cache_.emplace(video_frame->unique_id());
- auto* sii = context_provider->SharedImageInterface();
- cache_->source_mailbox = sii->CreateSharedImage(
- viz::ResourceFormat::RGBA_8888, video_frame->coded_size(),
- gfx::ColorSpace(), gpu::SHARED_IMAGE_USAGE_GLES2);
- cache_->source_texture = SynchronizeAndImportMailbox(
- gl, sii->GenUnverifiedSyncToken(), cache_->source_mailbox);
- }
- ScopedSharedImageAccess dest_access(
- gl, cache_->source_texture, cache_->source_mailbox,
- GL_SHARED_IMAGE_ACCESS_MODE_READWRITE_CHROMIUM);
- if (video_frame->NumTextures() == 1) {
- gpu::Mailbox mailbox;
- GLuint frame_texture =
- ImportVideoFrameSingleMailbox(gl, video_frame.get(), &mailbox);
- {
- ScopedSharedImageAccess access(gl, frame_texture, mailbox);
- gl->CopySubTextureCHROMIUM(frame_texture, 0, GL_TEXTURE_2D,
- cache_->source_texture, 0, 0, 0, 0, 0,
- video_frame->coded_size().width(),
- video_frame->coded_size().height(),
- GL_FALSE, GL_FALSE, GL_FALSE);
- }
- gl->DeleteTextures(1, &frame_texture);
- source_image = WrapGLTexture(GL_TEXTURE_2D, cache_->source_texture,
- video_frame->coded_size(),
- gfx::ColorSpace(), context_provider);
- } else {
- source_image = NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
- video_frame.get(), context_provider, GL_TEXTURE_2D,
- cache_->source_texture);
- }
- context_provider->GrContext()->flush();
+ cache_->source_image = NewSkImageFromVideoFrameNative(
+ video_frame.get(), context_provider, allow_wrap_texture);
+ cache_->wraps_video_frame_texture = allow_wrap_texture;
}
- if (!source_image) {
+ if (!cache_->source_image) {
// Couldn't create the SkImage.
cache_.reset();
return false;
}
- cache_->context_provider = context_provider;
cache_->coded_size = video_frame->coded_size();
cache_->visible_rect = video_frame->visible_rect();
paint_image_builder.set_image(
- source_image->makeSubset(gfx::RectToSkIRect(cache_->visible_rect)),
+ cache_->source_image->makeSubset(
+ gfx::RectToSkIRect(cache_->visible_rect)),
cc::PaintImage::GetNextContentId());
} else {
- cache_.emplace(video_frame->unique_id());
paint_image_builder.set_paint_image_generator(
sk_make_sp<VideoImageGenerator>(video_frame));
}
@@ -1589,11 +1555,11 @@ bool PaintCanvasVideoRenderer::PrepareVideoFrame(
DCHECK(context_provider);
DCHECK(context_provider->GrContext());
DCHECK(context_provider->ContextGL());
- sk_sp<SkImage> source_image;
if (video_frame->NumTextures() > 1) {
- source_image = NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
- video_frame.get(), context_provider, textureTarget, texture);
- if (!source_image) {
+ cache_->source_image =
+ NewSkImageFromVideoFrameYUVTexturesWithExternalBackend(
+ video_frame.get(), context_provider, textureTarget, texture);
+ if (!cache_->source_image) {
// Couldn't create the SkImage.
cache_.reset();
return false;
@@ -1605,9 +1571,9 @@ bool PaintCanvasVideoRenderer::PrepareVideoFrame(
}
cache_->coded_size = video_frame->coded_size();
cache_->visible_rect = video_frame->visible_rect();
- paint_image_builder.set_image(
- source_image->makeSubset(gfx::RectToSkIRect(cache_->visible_rect)),
- cc::PaintImage::GetNextContentId());
+ paint_image_builder.set_image(cache_->source_image->makeSubset(
+ gfx::RectToSkIRect(cache_->visible_rect)),
+ cc::PaintImage::GetNextContentId());
} else {
paint_image_builder.set_paint_image_generator(
sk_make_sp<VideoImageGenerator>(video_frame));
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.h b/chromium/media/renderers/paint_canvas_video_renderer.h
index 2628f28725d..4664b93f567 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.h
+++ b/chromium/media/renderers/paint_canvas_video_renderer.h
@@ -17,7 +17,6 @@
#include "cc/paint/paint_canvas.h"
#include "cc/paint/paint_flags.h"
#include "cc/paint/paint_image.h"
-#include "gpu/command_buffer/common/mailbox.h"
#include "media/base/media_export.h"
#include "media/base/timestamp_constants.h"
#include "media/base/video_frame.h"
@@ -199,22 +198,14 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// to the visible size of the VideoFrame. Its contents are generated lazily.
cc::PaintImage paint_image;
- // The context provider used to generate |source_mailbox| and
- // |source_texture|. This is only set if the VideoFrame was texture-backed.
- scoped_refptr<viz::ContextProvider> context_provider;
-
- // The mailbox for the source texture. This can be either the source
- // VideoFrame's texture (if |wraps_video_frame_texture| is true) or a newly
- // allocated shared image (if |wraps_video_frame_texture| is false) if a
- // copy or conversion was necessary.
- // This is only set if the VideoFrame was texture-backed.
- gpu::Mailbox source_mailbox;
-
- // The texture ID created when importing |source_mailbox|.
+ // A SkImage that contain the source texture for |paint_image|. This can be
+ // either the source VideoFrame's texture (if wraps_video_frame_texture is
+ // true) or a newly allocated texture (if wraps_video_frame_texture is
+ // false) if a copy or conversion was necessary.
// This is only set if the VideoFrame was texture-backed.
- uint32_t source_texture = 0;
+ sk_sp<SkImage> source_image;
- // The allocated size of |source_mailbox|.
+ // The allocated size of |source_image|.
// This is only set if the VideoFrame was texture-backed.
gfx::Size coded_size;
@@ -223,8 +214,8 @@ class MEDIA_EXPORT PaintCanvasVideoRenderer {
// This is only set if the VideoFrame was texture-backed.
gfx::Rect visible_rect;
- // Whether |source_mailbox| directly points to a texture of the VideoFrame
- // (if true), or to an allocated shared image (if false).
+ // Whether |source_image| directly points to a texture of the VideoFrame
+ // (if true), or to an allocated texture (if false).
bool wraps_video_frame_texture = false;
};
diff --git a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
index e94736cbcef..ac88538262b 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer_unittest.cc
@@ -8,7 +8,7 @@
#include "base/bind.h"
#include "base/macros.h"
#include "base/memory/aligned_memory.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "cc/paint/paint_flags.h"
#include "cc/paint/skia_paint_canvas.h"
#include "components/viz/common/gpu/context_provider.h"
@@ -384,7 +384,7 @@ class PaintCanvasVideoRendererTest : public testing::Test {
SkBitmap bitmap_;
cc::SkiaPaintCanvas target_canvas_;
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
DISALLOW_COPY_AND_ASSIGN(PaintCanvasVideoRendererTest);
};
diff --git a/chromium/media/renderers/renderer_impl.cc b/chromium/media/renderers/renderer_impl.cc
index 7f36ded2a2a..8a12f7751db 100644
--- a/chromium/media/renderers/renderer_impl.cc
+++ b/chromium/media/renderers/renderer_impl.cc
@@ -35,8 +35,10 @@ static const int kDefaultVideoUnderflowThresholdMs = 3000;
class RendererImpl::RendererClientInternal final : public RendererClient {
public:
- RendererClientInternal(DemuxerStream::Type type, RendererImpl* renderer)
- : type_(type), renderer_(renderer) {
+ RendererClientInternal(DemuxerStream::Type type,
+ RendererImpl* renderer,
+ MediaResource* media_resource)
+ : type_(type), renderer_(renderer), media_resource_(media_resource) {
DCHECK((type_ == DemuxerStream::AUDIO) || (type_ == DemuxerStream::VIDEO));
}
@@ -67,9 +69,14 @@ class RendererImpl::RendererClientInternal final : public RendererClient {
renderer_->OnVideoOpacityChange(opaque);
}
+ bool IsVideoStreamAvailable() override {
+ return media_resource_->GetFirstStream(::media::DemuxerStream::VIDEO);
+ }
+
private:
DemuxerStream::Type type_;
RendererImpl* renderer_;
+ MediaResource* media_resource_;
};
RendererImpl::RendererImpl(
@@ -369,7 +376,7 @@ void RendererImpl::InitializeAudioRenderer() {
current_audio_stream_ = audio_stream;
audio_renderer_client_.reset(
- new RendererClientInternal(DemuxerStream::AUDIO, this));
+ new RendererClientInternal(DemuxerStream::AUDIO, this, media_resource_));
// Note: After the initialization of a renderer, error events from it may
// happen at any time and all future calls must guard against STATE_ERROR.
audio_renderer_->Initialize(audio_stream, cdm_context_,
@@ -420,7 +427,7 @@ void RendererImpl::InitializeVideoRenderer() {
current_video_stream_ = video_stream;
video_renderer_client_.reset(
- new RendererClientInternal(DemuxerStream::VIDEO, this));
+ new RendererClientInternal(DemuxerStream::VIDEO, this, media_resource_));
video_renderer_->Initialize(
video_stream, cdm_context_, video_renderer_client_.get(),
base::Bind(&RendererImpl::GetWallClockTimes, base::Unretained(this)),
diff --git a/chromium/media/renderers/renderer_impl_unittest.cc b/chromium/media/renderers/renderer_impl_unittest.cc
index 30b46649a46..9e1da061d25 100644
--- a/chromium/media/renderers/renderer_impl_unittest.cc
+++ b/chromium/media/renderers/renderer_impl_unittest.cc
@@ -13,8 +13,8 @@
#include "base/optional.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/mock_filters.h"
#include "media/base/test_helpers.h"
@@ -82,7 +82,7 @@ class RendererImplTest : public ::testing::Test {
video_renderer_(new StrictMock<MockVideoRenderer>()),
audio_renderer_(new StrictMock<MockAudioRenderer>()),
renderer_impl_(
- new RendererImpl(scoped_task_environment_.GetMainThreadTaskRunner(),
+ new RendererImpl(task_environment_.GetMainThreadTaskRunner(),
std::unique_ptr<AudioRenderer>(audio_renderer_),
std::unique_ptr<VideoRenderer>(video_renderer_))),
cdm_context_(new StrictMock<MockCdmContext>()),
@@ -346,7 +346,7 @@ class RendererImplTest : public ::testing::Test {
}
// Fixture members.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
StrictMock<CallbackHelper> callbacks_;
base::SimpleTestTickClock test_tick_clock_;
diff --git a/chromium/media/renderers/video_renderer_impl_unittest.cc b/chromium/media/renderers/video_renderer_impl_unittest.cc
index 3caab785841..c59374cc3f4 100644
--- a/chromium/media/renderers/video_renderer_impl_unittest.cc
+++ b/chromium/media/renderers/video_renderer_impl_unittest.cc
@@ -21,8 +21,8 @@
#include "base/strings/stringprintf.h"
#include "base/synchronization/lock.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_task_environment.h"
#include "base/test/simple_test_tick_clock.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/data_buffer.h"
#include "media/base/limits.h"
@@ -304,7 +304,7 @@ class VideoRendererImplTest : public testing::Test {
MOCK_METHOD0(OnSimulateDecodeDelay, base::TimeDelta(void));
protected:
- base::test::ScopedTaskEnvironment task_environment_;
+ base::test::TaskEnvironment task_environment_;
NullMediaLog media_log_;
// Fixture members.
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
index 9108d4f8a47..de1357af5ce 100644
--- a/chromium/media/renderers/video_resource_updater.cc
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -70,7 +70,13 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
case PIXEL_FORMAT_UYVY:
case PIXEL_FORMAT_ABGR:
DCHECK_EQ(num_textures, 1);
- buffer_formats[0] = gfx::BufferFormat::RGBA_8888;
+ // This maps VideoPixelFormat back to GMB BufferFormat
+ // NOTE: ABGR == RGBA and ARGB == BGRA, they differ only byte order
+ // See: VideoFormat function in gpu_memory_buffer_video_frame_pool
+ // https://cs.chromium.org/chromium/src/media/video/gpu_memory_buffer_video_frame_pool.cc?type=cs&g=0&l=281
+ buffer_formats[0] = (format == PIXEL_FORMAT_ABGR)
+ ? gfx::BufferFormat::RGBA_8888
+ : gfx::BufferFormat::BGRA_8888;
switch (target) {
case GL_TEXTURE_EXTERNAL_OES:
if (use_stream_video_draw_quad)
@@ -87,26 +93,28 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
}
break;
case PIXEL_FORMAT_I420:
- DCHECK(num_textures == 3);
+ DCHECK_EQ(num_textures, 3);
buffer_formats[0] = gfx::BufferFormat::R_8;
buffer_formats[1] = gfx::BufferFormat::R_8;
buffer_formats[2] = gfx::BufferFormat::R_8;
return VideoFrameResourceType::YUV;
+
case PIXEL_FORMAT_NV12:
- DCHECK(target == GL_TEXTURE_EXTERNAL_OES || target == GL_TEXTURE_2D ||
- target == GL_TEXTURE_RECTANGLE_ARB)
+ // |target| is set to 0 for Vulkan textures.
+ DCHECK(target == 0 || target == GL_TEXTURE_EXTERNAL_OES ||
+ target == GL_TEXTURE_2D || target == GL_TEXTURE_RECTANGLE_ARB)
<< "Unsupported target " << gl::GLEnums::GetStringEnum(target);
- DCHECK(num_textures <= 2);
- // Single plane textures can be sampled as RGB.
- if (num_textures == 2) {
- buffer_formats[0] = gfx::BufferFormat::R_8;
- buffer_formats[1] = gfx::BufferFormat::RG_88;
- return VideoFrameResourceType::YUV;
+ if (num_textures == 1) {
+ // Single-texture multi-planar frames can be sampled as RGB.
+ buffer_formats[0] = gfx::BufferFormat::YUV_420_BIPLANAR;
+ return VideoFrameResourceType::RGB;
}
- buffer_formats[0] = gfx::BufferFormat::YUV_420_BIPLANAR;
- return VideoFrameResourceType::RGB;
+ buffer_formats[0] = gfx::BufferFormat::R_8;
+ buffer_formats[1] = gfx::BufferFormat::RG_88;
+ return VideoFrameResourceType::YUV;
+
case PIXEL_FORMAT_YV12:
case PIXEL_FORMAT_I422:
case PIXEL_FORMAT_I444:
@@ -115,7 +123,6 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
case PIXEL_FORMAT_YUY2:
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV422P9:
case PIXEL_FORMAT_YUV444P9:
diff --git a/chromium/media/renderers/video_resource_updater_unittest.cc b/chromium/media/renderers/video_resource_updater_unittest.cc
index 8e4edf88b2e..cdec9b5faf4 100644
--- a/chromium/media/renderers/video_resource_updater_unittest.cc
+++ b/chromium/media/renderers/video_resource_updater_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind.h"
#include "base/memory/read_only_shared_memory_region.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "components/viz/client/client_resource_provider.h"
#include "components/viz/client/shared_bitmap_reporter.h"
#include "components/viz/test/fake_output_surface.h"
@@ -245,7 +245,7 @@ class VideoResourceUpdaterTest : public testing::Test {
// VideoResourceUpdater registers as a MemoryDumpProvider, which requires
// a TaskRunner.
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
UploadCounterGLES2Interface* gl_;
scoped_refptr<viz::TestContextProvider> context_provider_;
FakeSharedBitmapReporter shared_bitmap_reporter_;
diff --git a/chromium/media/test/BUILD.gn b/chromium/media/test/BUILD.gn
index 9b58d43acfc..8014803751d 100644
--- a/chromium/media/test/BUILD.gn
+++ b/chromium/media/test/BUILD.gn
@@ -125,8 +125,8 @@ source_set("mojo_pipeline_integration_tests") {
"//media:media_buildflags",
"//media:test_support",
"//media/mojo/clients",
- "//media/mojo/interfaces",
- "//media/mojo/interfaces:constants",
+ "//media/mojo/mojom",
+ "//media/mojo/mojom:constants",
"//media/mojo/services",
"//media/mojo/services:media_manifest",
"//services/service_manager/public/cpp",
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
index 80d81ba745f..2ed39255cbb 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -239,9 +239,6 @@ gfx::BufferFormat GpuMemoryBufferFormat(
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
DCHECK_LE(plane, 1u);
return plane == 0 ? gfx::BufferFormat::R_8 : gfx::BufferFormat::RG_88;
- case GpuVideoAcceleratorFactories::OutputFormat::UYVY:
- DCHECK_EQ(0u, plane);
- return gfx::BufferFormat::UYVY_422;
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
DCHECK_EQ(0u, plane);
return gfx::BufferFormat::BGRX_1010102;
@@ -265,7 +262,6 @@ gfx::BufferFormat GpuMemoryBufferFormat(
size_t PlanesPerCopy(GpuVideoAcceleratorFactories::OutputFormat format) {
switch (format) {
case GpuVideoAcceleratorFactories::OutputFormat::I420:
- case GpuVideoAcceleratorFactories::OutputFormat::UYVY:
case GpuVideoAcceleratorFactories::OutputFormat::RGBA:
case GpuVideoAcceleratorFactories::OutputFormat::BGRA:
return 1;
@@ -290,8 +286,6 @@ VideoPixelFormat VideoFormat(
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
return PIXEL_FORMAT_NV12;
- case GpuVideoAcceleratorFactories::OutputFormat::UYVY:
- return PIXEL_FORMAT_UYVY;
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
case GpuVideoAcceleratorFactories::OutputFormat::BGRA:
return PIXEL_FORMAT_ARGB;
@@ -314,8 +308,6 @@ size_t NumGpuMemoryBuffers(GpuVideoAcceleratorFactories::OutputFormat format) {
return 1;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
return 2;
- case GpuVideoAcceleratorFactories::OutputFormat::UYVY:
- return 1;
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
case GpuVideoAcceleratorFactories::OutputFormat::XB30:
return 1;
@@ -413,38 +405,6 @@ void CopyRowsToNV12Buffer(int first_row,
rows);
}
-void CopyRowsToUYVYBuffer(int first_row,
- int rows,
- int width,
- const VideoFrame* source_frame,
- uint8_t* output,
- int dest_stride,
- base::OnceClosure done) {
- base::ScopedClosureRunner done_runner(std::move(done));
- TRACE_EVENT2("media", "CopyRowsToUYVYBuffer", "bytes_per_row", width * 2,
- "rows", rows);
-
- if (!output)
- return;
-
- DCHECK_NE(dest_stride, 0);
- DCHECK_LE(width, std::abs(dest_stride / 2));
- DCHECK_EQ(0, first_row % 2);
- DCHECK(source_frame->format() == PIXEL_FORMAT_I420 ||
- source_frame->format() == PIXEL_FORMAT_YV12);
- libyuv::I420ToUYVY(
- source_frame->visible_data(VideoFrame::kYPlane) +
- first_row * source_frame->stride(VideoFrame::kYPlane),
- source_frame->stride(VideoFrame::kYPlane),
- source_frame->visible_data(VideoFrame::kUPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kUPlane),
- source_frame->stride(VideoFrame::kUPlane),
- source_frame->visible_data(VideoFrame::kVPlane) +
- first_row / 2 * source_frame->stride(VideoFrame::kVPlane),
- source_frame->stride(VideoFrame::kVPlane),
- output + first_row * dest_stride, dest_stride, width, rows);
-}
-
void CopyRowsToRGB10Buffer(bool is_argb,
int first_row,
int rows,
@@ -558,7 +518,6 @@ gfx::Size CodedSize(const VideoFrame* video_frame,
output = gfx::Size((video_frame->visible_rect().width() + 1) & ~1,
(video_frame->visible_rect().height() + 1) & ~1);
break;
- case GpuVideoAcceleratorFactories::OutputFormat::UYVY:
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
case GpuVideoAcceleratorFactories::OutputFormat::XB30:
case GpuVideoAcceleratorFactories::OutputFormat::RGBA:
@@ -634,7 +593,6 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
case PIXEL_FORMAT_XRGB:
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_MJPEG:
- case PIXEL_FORMAT_MT21:
case PIXEL_FORMAT_YUV422P9:
case PIXEL_FORMAT_YUV444P9:
case PIXEL_FORMAT_YUV422P10:
@@ -852,18 +810,6 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CopyVideoFrameToGpuMemoryBuffers(
break;
}
- case GpuVideoAcceleratorFactories::OutputFormat::UYVY:
- // Using base::Unretained(video_frame) here is safe because |barrier|
- // keeps refptr of |video_frame| until all copy tasks are done.
- worker_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&CopyRowsToUYVYBuffer, row, rows_to_copy,
- coded_size.width(),
- base::Unretained(video_frame.get()),
- static_cast<uint8_t*>(buffer->memory(0)),
- buffer->stride(0), barrier));
- break;
-
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
case GpuVideoAcceleratorFactories::OutputFormat::XB30: {
const bool is_argb = output_format_ ==
@@ -967,19 +913,21 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
frame->set_color_space(video_frame->ColorSpace());
bool allow_overlay = false;
+#if defined(OS_WIN)
+ // Windows direct composition path only supports dual GMB NV12 video overlays.
+ allow_overlay = (output_format_ ==
+ GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB);
+#else
switch (output_format_) {
case GpuVideoAcceleratorFactories::OutputFormat::I420:
allow_overlay =
video_frame->metadata()->IsTrue(VideoFrameMetadata::ALLOW_OVERLAY);
break;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
- case GpuVideoAcceleratorFactories::OutputFormat::UYVY:
allow_overlay = true;
break;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
-#if defined(OS_WIN)
- allow_overlay = true;
-#endif
+ // Only used on Windows where we can't use single NV12 textures.
break;
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
case GpuVideoAcceleratorFactories::OutputFormat::XB30:
@@ -998,7 +946,7 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
case GpuVideoAcceleratorFactories::OutputFormat::UNDEFINED:
break;
}
-
+#endif // OS_WIN
frame->metadata()->MergeMetadataFrom(video_frame->metadata());
frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY,
allow_overlay);
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
index fac4b284e0b..1e6098ec4d7 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
@@ -269,24 +269,6 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, DropResourceWhenSizeIsDifferent) {
EXPECT_TRUE(sii_->CheckSharedImageExists(frame->mailbox_holder(i).mailbox));
}
-TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareUYUVFrame) {
- scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10);
- scoped_refptr<VideoFrame> frame;
- mock_gpu_factories_->SetVideoFrameOutputFormat(
- media::GpuVideoAcceleratorFactories::OutputFormat::UYVY);
- gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
- software_frame, base::BindOnce(MaybeCreateHardwareFrameCallback, &frame));
-
- RunUntilIdle();
-
- EXPECT_NE(software_frame.get(), frame.get());
- EXPECT_EQ(PIXEL_FORMAT_UYVY, frame->format());
- EXPECT_EQ(1u, frame->NumTextures());
- EXPECT_EQ(1u, sii_->shared_image_count());
- EXPECT_TRUE(frame->metadata()->IsTrue(
- media::VideoFrameMetadata::READ_LOCK_FENCES_ENABLED));
-}
-
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareNV12Frame) {
scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10);
scoped_refptr<VideoFrame> frame;
@@ -544,7 +526,7 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, StaleFramesAreExpired) {
// copy in flight at any time.
TEST_F(GpuMemoryBufferVideoFramePoolTest, AtMostOneCopyInFlight) {
mock_gpu_factories_->SetVideoFrameOutputFormat(
- media::GpuVideoAcceleratorFactories::OutputFormat::UYVY);
+ media::GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB);
scoped_refptr<VideoFrame> software_frame_1 = CreateTestYUVVideoFrame(10);
scoped_refptr<VideoFrame> frame_1;
diff --git a/chromium/media/video/gpu_video_accelerator_factories.h b/chromium/media/video/gpu_video_accelerator_factories.h
index 36eed9096a3..6ab560711be 100644
--- a/chromium/media/video/gpu_video_accelerator_factories.h
+++ b/chromium/media/video/gpu_video_accelerator_factories.h
@@ -13,6 +13,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "base/unguessable_token.h"
#include "gpu/command_buffer/client/gles2_interface.h"
#include "gpu/command_buffer/common/mailbox.h"
@@ -40,7 +41,7 @@ class SharedImageInterface;
}
namespace viz {
-class ContextProviderCommandBuffer;
+class ContextProvider;
} // namespace viz
namespace media {
@@ -60,7 +61,6 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
enum class OutputFormat {
UNDEFINED = 0, // Unset state
I420, // 3 x R8 GMBs
- UYVY, // One 422 GMB
NV12_SINGLE_GMB, // One NV12 GMB
NV12_DUAL_GMB, // One R8, one RG88 GMB
XR30, // 10:10:10:2 BGRX in one GMB (Usually Mac)
@@ -129,6 +129,10 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
virtual std::unique_ptr<base::SharedMemory> CreateSharedMemory(
size_t size) = 0;
+ // Allocate & return an unsafe shared memory region
+ virtual base::UnsafeSharedMemoryRegion CreateSharedMemoryRegion(
+ size_t size) = 0;
+
// Returns the task runner the video accelerator runs on.
virtual scoped_refptr<base::SingleThreadTaskRunner> GetTaskRunner() = 0;
@@ -136,8 +140,7 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
virtual VideoEncodeAccelerator::SupportedProfiles
GetVideoEncodeAcceleratorSupportedProfiles() = 0;
- virtual scoped_refptr<viz::ContextProviderCommandBuffer>
- GetMediaContextProvider() = 0;
+ virtual scoped_refptr<viz::ContextProvider> GetMediaContextProvider() = 0;
// Sets the current pipeline rendering color space.
virtual void SetRenderingColorSpace(const gfx::ColorSpace& color_space) = 0;
diff --git a/chromium/media/video/h264_level_limits.cc b/chromium/media/video/h264_level_limits.cc
index 5437e7bd9e9..a51d34a1472 100644
--- a/chromium/media/video/h264_level_limits.cc
+++ b/chromium/media/video/h264_level_limits.cc
@@ -5,6 +5,7 @@
#include "media/video/h264_level_limits.h"
#include "base/logging.h"
+#include "base/stl_util.h"
#include "media/video/h264_parser.h"
namespace media {
@@ -141,4 +142,27 @@ bool CheckH264LevelLimits(VideoCodecProfile profile,
return true;
}
+base::Optional<uint8_t> FindValidH264Level(VideoCodecProfile profile,
+ uint32_t bitrate,
+ uint32_t framerate,
+ uint32_t framesize_in_mbs) {
+ constexpr uint8_t kH264Levels[] = {
+ H264SPS::kLevelIDC1p0, H264SPS::kLevelIDC1B, H264SPS::kLevelIDC1p1,
+ H264SPS::kLevelIDC1p2, H264SPS::kLevelIDC1p3, H264SPS::kLevelIDC2p0,
+ H264SPS::kLevelIDC2p1, H264SPS::kLevelIDC2p2, H264SPS::kLevelIDC3p0,
+ H264SPS::kLevelIDC3p1, H264SPS::kLevelIDC3p2, H264SPS::kLevelIDC4p0,
+ H264SPS::kLevelIDC4p1, H264SPS::kLevelIDC4p2, H264SPS::kLevelIDC5p0,
+ H264SPS::kLevelIDC5p1, H264SPS::kLevelIDC5p2, H264SPS::kLevelIDC6p0,
+ H264SPS::kLevelIDC6p1, H264SPS::kLevelIDC6p2,
+ };
+
+ for (const uint8_t level : kH264Levels) {
+ if (CheckH264LevelLimits(profile, level, bitrate, framerate,
+ framesize_in_mbs)) {
+ return level;
+ }
+ }
+ return base::nullopt;
+}
+
} // namespace media
diff --git a/chromium/media/video/h264_level_limits.h b/chromium/media/video/h264_level_limits.h
index 0772dd28139..fcc8eb8c8b1 100644
--- a/chromium/media/video/h264_level_limits.h
+++ b/chromium/media/video/h264_level_limits.h
@@ -7,6 +7,7 @@
#include <stddef.h>
+#include "base/optional.h"
#include "media/base/media_export.h"
#include "media/base/video_codecs.h"
@@ -37,6 +38,14 @@ bool MEDIA_EXPORT CheckH264LevelLimits(VideoCodecProfile profile,
uint32_t framerate,
uint32_t framesize_in_mbs);
+// Return a minimum level that comforts Table A-1 in spec with |profile|,
+// |bitrate|, |framerate| and |framesize_in_mbs|. If there is no proper level,
+// returns base::nullopt.
+base::Optional<uint8_t> MEDIA_EXPORT
+FindValidH264Level(VideoCodecProfile profile,
+ uint32_t bitrate,
+ uint32_t framerate,
+ uint32_t framesize_in_mbs);
} // namespace media
#endif // MEDIA_VIDEO_H264_LEVEL_LIMITS_H_
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.cc b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
index 6ceef38d9d8..8fe8147146a 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.cc
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
@@ -7,6 +7,7 @@
#include <memory>
#include "base/memory/ptr_util.h"
+#include "base/memory/unsafe_shared_memory_region.h"
#include "ui/gfx/buffer_format_util.h"
#include "ui/gfx/gpu_memory_buffer.h"
@@ -27,7 +28,6 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
DCHECK(gfx::BufferFormat::R_8 == format_ ||
gfx::BufferFormat::RG_88 == format_ ||
gfx::BufferFormat::YUV_420_BIPLANAR == format_ ||
- gfx::BufferFormat::UYVY_422 == format_ ||
gfx::BufferFormat::BGRX_1010102 == format_ ||
gfx::BufferFormat::RGBX_1010102 == format_ ||
gfx::BufferFormat::RGBA_8888 == format_ ||
@@ -123,6 +123,11 @@ MockGpuVideoAcceleratorFactories::CreateSharedMemory(size_t size) {
return nullptr;
}
+base::UnsafeSharedMemoryRegion
+MockGpuVideoAcceleratorFactories::CreateSharedMemoryRegion(size_t size) {
+ return base::UnsafeSharedMemoryRegion::Create(size);
+}
+
std::unique_ptr<VideoEncodeAccelerator>
MockGpuVideoAcceleratorFactories::CreateVideoEncodeAccelerator() {
return base::WrapUnique(DoCreateVideoEncodeAccelerator());
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.h b/chromium/media/video/mock_gpu_video_accelerator_factories.h
index e9711317623..0f8c4fe3825 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.h
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.h
@@ -50,8 +50,7 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
MOCK_METHOD0(GetTaskRunner, scoped_refptr<base::SingleThreadTaskRunner>());
MOCK_METHOD0(GetVideoEncodeAcceleratorSupportedProfiles,
VideoEncodeAccelerator::SupportedProfiles());
- MOCK_METHOD0(GetMediaContextProvider,
- scoped_refptr<viz::ContextProviderCommandBuffer>());
+ MOCK_METHOD0(GetMediaContextProvider, scoped_refptr<viz::ContextProvider>());
MOCK_METHOD1(SetRenderingColorSpace, void(const gfx::ColorSpace&));
std::unique_ptr<gfx::GpuMemoryBuffer> CreateGpuMemoryBuffer(
@@ -83,6 +82,9 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
std::unique_ptr<base::SharedMemory> CreateSharedMemory(size_t size) override;
+ // Allocate & return a read-only shared memory region
+ base::UnsafeSharedMemoryRegion CreateSharedMemoryRegion(size_t size) override;
+
std::unique_ptr<VideoEncodeAccelerator> CreateVideoEncodeAccelerator()
override;
diff --git a/chromium/media/video/video_encode_accelerator.cc b/chromium/media/video/video_encode_accelerator.cc
index 67d6cedc3ba..3e61991f53e 100644
--- a/chromium/media/video/video_encode_accelerator.cc
+++ b/chromium/media/video/video_encode_accelerator.cc
@@ -52,8 +52,7 @@ VideoEncodeAccelerator::Config::Config(
initial_framerate(initial_framerate.value_or(
VideoEncodeAccelerator::kDefaultFramerate)),
gop_length(gop_length),
- h264_output_level(h264_output_level.value_or(
- VideoEncodeAccelerator::kDefaultH264Level)),
+ h264_output_level(h264_output_level),
storage_type(storage_type),
content_type(content_type) {}
diff --git a/chromium/media/video/video_encode_accelerator.h b/chromium/media/video/video_encode_accelerator.h
index 90ca6a11fbf..b394f4b995c 100644
--- a/chromium/media/video/video_encode_accelerator.h
+++ b/chromium/media/video/video_encode_accelerator.h
@@ -98,11 +98,8 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
kErrorMax = kPlatformFailureError
};
- // Unified default values for all VEA implementations.
- enum {
- kDefaultFramerate = 30,
- kDefaultH264Level = H264SPS::kLevelIDC4p0,
- };
+ // A default framerate for all VEA implementations.
+ enum { kDefaultFramerate = 30 };
// Parameters required for VEA initialization.
struct MEDIA_EXPORT Config {
@@ -154,11 +151,9 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
base::Optional<uint32_t> gop_length;
// Codec level of encoded output stream for H264 only. This value should
- // be aligned to the H264 standard definition of SPS.level_idc. The only
- // exception is in Main and Baseline profile we still use
- // |h264_output_level|=9 for Level 1b, which should set level_idc to 11 and
- // constraint_set3_flag to 1 (Spec A.3.1 and A.3.2). This is optional and
- // use |kDefaultH264Level| if not given.
+ // be aligned to the H264 standard definition of SPS.level_idc.
+ // If this is not given, VideoEncodeAccelerator selects one of proper H.264
+ // levels for |input_visible_size| and |initial_framerate|.
base::Optional<uint8_t> h264_output_level;
// The storage type of video frame provided on Encode().
diff --git a/chromium/media/webrtc/OWNERS b/chromium/media/webrtc/OWNERS
index 82cf799ac2b..3e75ff446b5 100644
--- a/chromium/media/webrtc/OWNERS
+++ b/chromium/media/webrtc/OWNERS
@@ -1,5 +1,4 @@
olka@chromium.org
-maxmorin@chromium.org
dalecurtis@chromium.org
miu@chromium.org
ossu@chromium.org
diff --git a/chromium/media/webrtc/audio_processor.cc b/chromium/media/webrtc/audio_processor.cc
index be4a916c518..1a281effeef 100644
--- a/chromium/media/webrtc/audio_processor.cc
+++ b/chromium/media/webrtc/audio_processor.cc
@@ -118,12 +118,14 @@ void AudioProcessor::AnalyzePlayout(const AudioBus& audio,
constexpr int kMaxChannels = 2;
DCHECK_GE(parameters.channels(), 1);
- DCHECK_LE(parameters.channels(), kMaxChannels);
const float* channel_ptrs[kMaxChannels];
channel_ptrs[0] = audio.channel(0);
webrtc::AudioProcessing::ChannelLayout webrtc_layout =
webrtc::AudioProcessing::ChannelLayout::kMono;
- if (parameters.channels() == 2) {
+ // Limit the number of channels to two (stereo) even in a multi-channel case.
+ // TODO(crbug.com/982276): process all channels when multi-channel AEC is
+ // supported.
+ if (parameters.channels() > 1) {
channel_ptrs[1] = audio.channel(1);
webrtc_layout = webrtc::AudioProcessing::ChannelLayout::kStereo;
}
@@ -148,8 +150,9 @@ void AudioProcessor::StartEchoCancellationDump(base::File file) {
if (!audio_processing_) {
// The destructor of File is blocking. Post it to a task runner to avoid
// blocking the main thread.
- base::PostTaskWithTraits(
- FROM_HERE, {base::TaskPriority::LOWEST, base::MayBlock()},
+ base::PostTask(
+ FROM_HERE,
+ {base::ThreadPool(), base::TaskPriority::LOWEST, base::MayBlock()},
base::BindOnce([](base::File) {}, std::move(file)));
return;
}
diff --git a/chromium/media/webrtc/audio_processor_unittest.cc b/chromium/media/webrtc/audio_processor_unittest.cc
index 31d5d1eeea2..aaa2171f353 100644
--- a/chromium/media/webrtc/audio_processor_unittest.cc
+++ b/chromium/media/webrtc/audio_processor_unittest.cc
@@ -16,7 +16,7 @@
#include "base/memory/aligned_memory.h"
#include "base/path_service.h"
#include "base/stl_util.h"
-#include "base/test/scoped_task_environment.h"
+#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
#include "build/build_config.h"
@@ -152,7 +152,7 @@ class WebRtcAudioProcessorTest : public ::testing::Test {
return settings;
}
- base::test::ScopedTaskEnvironment scoped_task_environment_;
+ base::test::TaskEnvironment task_environment_;
media::AudioParameters params_;
};