summaryrefslogtreecommitdiff
path: root/chromium/media
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2021-03-12 09:13:00 +0100
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2021-03-16 09:58:26 +0000
commit03561cae90f1d99b5c54b1ef3be69f10e882b25e (patch)
treecc5f0958e823c044e7ae51cc0117fe51432abe5e /chromium/media
parentfa98118a45f7e169f8846086dc2c22c49a8ba310 (diff)
downloadqtwebengine-chromium-03561cae90f1d99b5c54b1ef3be69f10e882b25e.tar.gz
BASELINE: Update Chromium to 88.0.4324.208
Change-Id: I3ae87d23e4eff4b4a469685658740a213600c667 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/media')
-rw-r--r--chromium/media/BUILD.gn11
-rw-r--r--chromium/media/DEPS2
-rw-r--r--chromium/media/DIR_METADATA11
-rw-r--r--chromium/media/OWNERS2
-rw-r--r--chromium/media/audio/BUILD.gn10
-rw-r--r--chromium/media/audio/DIR_METADATA11
-rw-r--r--chromium/media/audio/OWNERS2
-rw-r--r--chromium/media/audio/alive_checker.cc2
-rw-r--r--chromium/media/audio/android/audio_track_output_stream.cc2
-rw-r--r--chromium/media/audio/audio_device_thread.cc14
-rw-r--r--chromium/media/audio/audio_encoders_unittest.cc10
-rw-r--r--chromium/media/audio/audio_features.cc5
-rw-r--r--chromium/media/audio/audio_features.h5
-rw-r--r--chromium/media/audio/audio_input_unittest.cc10
-rw-r--r--chromium/media/audio/audio_manager.cc2
-rw-r--r--chromium/media/audio/audio_manager_base.cc5
-rw-r--r--chromium/media/audio/audio_manager_unittest.cc31
-rw-r--r--chromium/media/audio/audio_opus_encoder.cc23
-rw-r--r--chromium/media/audio/audio_opus_encoder.h7
-rw-r--r--chromium/media/audio/audio_output_device_unittest.cc2
-rw-r--r--chromium/media/audio/audio_output_dispatcher_impl.cc14
-rw-r--r--chromium/media/audio/audio_output_dispatcher_impl.h8
-rw-r--r--chromium/media/audio/audio_output_proxy_unittest.cc26
-rw-r--r--chromium/media/audio/audio_output_resampler.cc2
-rw-r--r--chromium/media/audio/audio_output_stream_sink.cc2
-rw-r--r--chromium/media/audio/audio_output_unittest.cc2
-rw-r--r--chromium/media/audio/audio_pcm_encoder.cc4
-rw-r--r--chromium/media/audio/audio_pcm_encoder.h1
-rw-r--r--chromium/media/audio/cras/audio_manager_chromeos.cc4
-rw-r--r--chromium/media/audio/cras/audio_manager_chromeos.h3
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.cc37
-rw-r--r--chromium/media/audio/cras/audio_manager_cras.h3
-rw-r--r--chromium/media/audio/cras/audio_manager_cras_base.h3
-rw-r--r--chromium/media/audio/cras/cras_input.cc2
-rw-r--r--chromium/media/audio/cras/cras_unified.cc2
-rw-r--r--chromium/media/audio/cras/cras_util.cc132
-rw-r--r--chromium/media/audio/cras/cras_util.h40
-rw-r--r--chromium/media/audio/fake_audio_input_stream.cc2
-rw-r--r--chromium/media/audio/fake_audio_output_stream.cc2
-rw-r--r--chromium/media/audio/fuchsia/DIR_METADATA10
-rw-r--r--chromium/media/audio/fuchsia/OWNERS5
-rw-r--r--chromium/media/audio/linux/audio_manager_linux.cc5
-rw-r--r--chromium/media/audio/mac/audio_auhal_mac.cc2
-rw-r--r--chromium/media/audio/mac/audio_device_listener_mac_unittest.cc2
-rw-r--r--chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc7
-rw-r--r--chromium/media/audio/pulse/audio_manager_pulse.cc3
-rw-r--r--chromium/media/audio/win/audio_device_listener_win_unittest.cc2
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.cc307
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win.h22
-rw-r--r--chromium/media/audio/win/audio_low_latency_input_win_unittest.cc37
-rw-r--r--chromium/media/audio/win/audio_low_latency_output_win_unittest.cc2
-rw-r--r--chromium/media/audio/win/audio_manager_win.cc2
-rw-r--r--chromium/media/audio/win/audio_output_win_unittest.cc2
-rw-r--r--chromium/media/audio/win/audio_session_event_listener_win_unittest.cc2
-rw-r--r--chromium/media/audio/win/waveout_output_win.cc4
-rw-r--r--chromium/media/base/BUILD.gn37
-rw-r--r--chromium/media/base/android/jni_hdr_metadata.cc4
-rw-r--r--chromium/media/base/android/jni_hdr_metadata.h6
-rw-r--r--chromium/media/base/android/media_codec_bridge_impl.h4
-rw-r--r--chromium/media/base/android/media_drm_bridge.cc7
-rw-r--r--chromium/media/base/android/media_drm_bridge.h2
-rw-r--r--chromium/media/base/android/media_drm_bridge_unittest.cc6
-rw-r--r--chromium/media/base/android/media_service_throttler.cc2
-rw-r--r--chromium/media/base/async_destroy_video_encoder.h7
-rw-r--r--chromium/media/base/audio_buffer.cc68
-rw-r--r--chromium/media/base/audio_buffer.h4
-rw-r--r--chromium/media/base/audio_buffer_queue_unittest.cc44
-rw-r--r--chromium/media/base/audio_buffer_unittest.cc102
-rw-r--r--chromium/media/base/audio_converter.cc2
-rw-r--r--chromium/media/base/audio_discard_helper_unittest.cc2
-rw-r--r--chromium/media/base/audio_encoder.cc14
-rw-r--r--chromium/media/base/audio_encoder.h22
-rw-r--r--chromium/media/base/audio_latency.cc3
-rw-r--r--chromium/media/base/audio_pull_fifo_unittest.cc2
-rw-r--r--chromium/media/base/audio_push_fifo.h3
-rw-r--r--chromium/media/base/audio_push_fifo_unittest.cc2
-rw-r--r--chromium/media/base/audio_renderer_mixer_input_unittest.cc2
-rw-r--r--chromium/media/base/audio_renderer_mixer_unittest.cc2
-rw-r--r--chromium/media/base/cdm_context.cc7
-rw-r--r--chromium/media/base/cdm_context.h13
-rw-r--r--chromium/media/base/decoder_factory.cc4
-rw-r--r--chromium/media/base/decoder_factory.h6
-rw-r--r--chromium/media/base/demuxer.h6
-rw-r--r--chromium/media/base/demuxer_memory_limit.h16
-rw-r--r--chromium/media/base/demuxer_memory_limit_android.cc13
-rw-r--r--chromium/media/base/demuxer_memory_limit_cast.cc62
-rw-r--r--chromium/media/base/demuxer_memory_limit_cast_unittest.cc106
-rw-r--r--chromium/media/base/demuxer_memory_limit_default.cc13
-rw-r--r--chromium/media/base/demuxer_memory_limit_low.cc13
-rw-r--r--chromium/media/base/fake_audio_worker.cc2
-rw-r--r--chromium/media/base/fake_single_thread_task_runner.h2
-rw-r--r--chromium/media/base/fallback_video_decoder.cc2
-rw-r--r--chromium/media/base/fallback_video_decoder_unittest.cc2
-rw-r--r--chromium/media/base/ipc/media_param_traits.cc1
-rw-r--r--chromium/media/base/ipc/media_param_traits_macros.h6
-rw-r--r--chromium/media/base/keyboard_event_counter.cc33
-rw-r--r--chromium/media/base/keyboard_event_counter.h48
-rw-r--r--chromium/media/base/keyboard_event_counter_unittest.cc38
-rw-r--r--chromium/media/base/logging_override_if_enabled.h11
-rw-r--r--chromium/media/base/mac/BUILD.gn11
-rw-r--r--chromium/media/base/mac/color_space_util_mac.h24
-rw-r--r--chromium/media/base/mac/color_space_util_mac.mm285
-rw-r--r--chromium/media/base/mac/video_frame_mac.cc3
-rw-r--r--chromium/media/base/media_log.h10
-rw-r--r--chromium/media/base/media_serializers.h6
-rw-r--r--chromium/media/base/media_switches.cc63
-rw-r--r--chromium/media/base/media_switches.h28
-rw-r--r--chromium/media/base/media_types.h2
-rw-r--r--chromium/media/base/media_url_demuxer_unittest.cc2
-rw-r--r--chromium/media/base/mock_filters.cc5
-rw-r--r--chromium/media/base/mock_filters.h38
-rw-r--r--chromium/media/base/multi_channel_resampler.cc2
-rw-r--r--chromium/media/base/multi_channel_resampler_unittest.cc2
-rw-r--r--chromium/media/base/offloading_video_encoder.cc89
-rw-r--r--chromium/media/base/offloading_video_encoder.h67
-rw-r--r--chromium/media/base/offloading_video_encoder_unittest.cc148
-rw-r--r--chromium/media/base/pipeline_impl.cc5
-rw-r--r--chromium/media/base/provision_fetcher.h3
-rw-r--r--chromium/media/base/sinc_resampler_perftest.cc2
-rw-r--r--chromium/media/base/sinc_resampler_unittest.cc2
-rw-r--r--chromium/media/base/status.h63
-rw-r--r--chromium/media/base/status_unittest.cc48
-rw-r--r--chromium/media/base/supported_types.cc15
-rw-r--r--chromium/media/base/supported_types_unittest.cc13
-rw-r--r--chromium/media/base/test_helpers.cc40
-rw-r--r--chromium/media/base/test_helpers.h12
-rw-r--r--chromium/media/base/user_input_monitor_linux.cc262
-rw-r--r--chromium/media/base/user_input_monitor_unittest.cc20
-rw-r--r--chromium/media/base/user_input_monitor_win.cc7
-rw-r--r--chromium/media/base/video_codecs.cc87
-rw-r--r--chromium/media/base/video_codecs.h5
-rw-r--r--chromium/media/base/video_decoder_config.cc45
-rw-r--r--chromium/media/base/video_decoder_config.h12
-rw-r--r--chromium/media/base/video_decoder_config_unittest.cc9
-rw-r--r--chromium/media/base/video_encoder.h13
-rw-r--r--chromium/media/base/video_frame.cc44
-rw-r--r--chromium/media/base/video_frame.h21
-rw-r--r--chromium/media/base/video_frame_feedback.h6
-rw-r--r--chromium/media/base/video_frame_metadata.cc1
-rw-r--r--chromium/media/base/video_frame_metadata.h7
-rw-r--r--chromium/media/base/video_frame_unittest.cc1
-rw-r--r--chromium/media/base/video_thumbnail_decoder.cc2
-rw-r--r--chromium/media/base/video_thumbnail_decoder_unittest.cc2
-rw-r--r--chromium/media/base/win/BUILD.gn9
-rw-r--r--chromium/media/base/win/d3d11_mocks.cc3
-rw-r--r--chromium/media/base/win/d3d11_mocks.h270
-rw-r--r--chromium/media/base/win/test_utils.h3
-rw-r--r--chromium/media/blink/BUILD.gn5
-rw-r--r--chromium/media/blink/DEPS1
-rw-r--r--chromium/media/blink/multibuffer_data_source_unittest.cc1
-rw-r--r--chromium/media/blink/multibuffer_unittest.cc1
-rw-r--r--chromium/media/blink/resource_multibuffer_data_provider.cc6
-rw-r--r--chromium/media/blink/resource_multibuffer_data_provider.h1
-rw-r--r--chromium/media/blink/url_index_unittest.cc1
-rw-r--r--chromium/media/blink/video_frame_compositor.cc7
-rw-r--r--chromium/media/blink/video_frame_compositor.h6
-rw-r--r--chromium/media/blink/video_frame_compositor_unittest.cc136
-rw-r--r--chromium/media/blink/watch_time_component.cc133
-rw-r--r--chromium/media/blink/watch_time_component.h135
-rw-r--r--chromium/media/blink/watch_time_component_unittest.cc302
-rw-r--r--chromium/media/blink/watch_time_reporter.cc685
-rw-r--r--chromium/media/blink/watch_time_reporter.h269
-rw-r--r--chromium/media/blink/watch_time_reporter_unittest.cc55
-rw-r--r--chromium/media/blink/webmediaplayer_impl.cc76
-rw-r--r--chromium/media/blink/webmediaplayer_impl.h13
-rw-r--r--chromium/media/blink/webmediaplayer_impl_unittest.cc50
-rw-r--r--chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc2
-rw-r--r--chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc2
-rw-r--r--chromium/media/capture/BUILD.gn89
-rw-r--r--chromium/media/capture/OWNERS2
-rw-r--r--chromium/media/capture/content/DIR_METADATA11
-rw-r--r--chromium/media/capture/content/OWNERS2
-rw-r--r--chromium/media/capture/content/android/screen_capture_machine_android.cc2
-rw-r--r--chromium/media/capture/mojom/video_capture_types.mojom1
-rw-r--r--chromium/media/capture/mojom/video_capture_types_mojom_traits.cc8
-rw-r--r--chromium/media/capture/video/DIR_METADATA12
-rw-r--r--chromium/media/capture/video/OWNERS3
-rw-r--r--chromium/media/capture/video/android/video_capture_device_factory_android.cc1
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.cc30
-rw-r--r--chromium/media/capture/video/chromeos/camera_3a_controller.h7
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_context.cc84
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_context.h31
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.cc166
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate.h17
-rw-r--r--chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc13
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate.cc2
-rw-r--r--chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc2
-rw-r--r--chromium/media/capture/video/chromeos/camera_metadata_utils.cc10
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.cc1
-rw-r--r--chromium/media/capture/video/chromeos/mock_video_capture_client.h1
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.cc16
-rw-r--r--chromium/media/capture/video/chromeos/request_manager.h7
-rw-r--r--chromium/media/capture/video/chromeos/request_manager_unittest.cc40
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.cc14
-rw-r--r--chromium/media/capture/video/chromeos/stream_buffer_manager.h7
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc20
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h5
-rw-r--r--chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc2
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.cc17
-rw-r--r--chromium/media/capture/video/create_video_capture_device_factory.h5
-rw-r--r--chromium/media/capture/video/fake_video_capture_device.cc3
-rw-r--r--chromium/media/capture/video/fake_video_capture_device_unittest.cc2
-rw-r--r--chromium/media/capture/video/file_video_capture_device.cc3
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia_test.cc2
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc7
-rw-r--r--chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc3
-rw-r--r--chromium/media/capture/video/gpu_memory_buffer_utils.cc2
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux.cc22
-rw-r--r--chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc2
-rw-r--r--chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.cc104
-rw-r--r--chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h53
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_pool_mac.h17
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm73
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_transferer_mac.cc40
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_transferer_mac.h39
-rw-r--r--chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm268
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc822
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac.h147
-rw-r--r--chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm548
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h11
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm147
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h3
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h2
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm6
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm2
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.h3
-rw-r--r--chromium/media/capture/video/mac/video_capture_device_mac.mm8
-rw-r--r--chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc5
-rw-r--r--chromium/media/capture/video/mock_video_capture_device_client.h4
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool.h17
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.cc78
-rw-r--r--chromium/media/capture/video/video_capture_buffer_pool_impl.h8
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker.cc45
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker.h43
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory.h7
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc22
-rw-r--r--chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h3
-rw-r--r--chromium/media/capture/video/video_capture_device.h10
-rw-r--r--chromium/media/capture/video/video_capture_device_client.cc64
-rw-r--r--chromium/media/capture/video/video_capture_device_client.h10
-rw-r--r--chromium/media/capture/video/video_capture_device_client_unittest.cc13
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.cc5
-rw-r--r--chromium/media/capture/video/video_capture_device_factory.h5
-rw-r--r--chromium/media/capture/video/video_capture_device_unittest.cc25
-rw-r--r--chromium/media/capture/video/video_capture_system_impl.cc7
-rw-r--r--chromium/media/capture/video/win/capability_list_win.cc5
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win.cc27
-rw-r--r--chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc173
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.cc49
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win.h5
-rw-r--r--chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc78
-rw-r--r--chromium/media/capture/video/win/video_capture_device_utils_win.cc4
-rw-r--r--chromium/media/capture/video/win/video_capture_device_win.cc9
-rw-r--r--chromium/media/capture/video/win/video_capture_device_win.h3
-rw-r--r--chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc8
-rw-r--r--chromium/media/capture/video/win/video_capture_dxgi_device_manager.h3
-rw-r--r--chromium/media/capture/video_capture_types.h3
-rw-r--r--chromium/media/capture/video_capturer_source.cc5
-rw-r--r--chromium/media/capture/video_capturer_source.h5
-rw-r--r--chromium/media/cast/BUILD.gn7
-rw-r--r--chromium/media/cast/DIR_METADATA11
-rw-r--r--chromium/media/cast/OWNERS2
-rw-r--r--chromium/media/cast/cast_sender_impl.cc9
-rw-r--r--chromium/media/cast/cast_sender_impl.h2
-rw-r--r--chromium/media/cast/logging/encoding_event_subscriber.h2
-rw-r--r--chromium/media/cast/logging/log_event_dispatcher.cc2
-rw-r--r--chromium/media/cast/logging/receiver_time_offset_estimator_impl.h3
-rw-r--r--chromium/media/cast/logging/simple_event_subscriber.h2
-rw-r--r--chromium/media/cast/logging/stats_event_subscriber.h2
-rw-r--r--chromium/media/cast/net/cast_transport_impl.cc7
-rw-r--r--chromium/media/cast/net/cast_transport_impl_unittest.cc2
-rw-r--r--chromium/media/cast/net/pacing/paced_sender.h2
-rw-r--r--chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h2
-rw-r--r--chromium/media/cast/net/udp_transport_unittest.cc5
-rw-r--r--chromium/media/cast/receiver/audio_decoder.cc6
-rw-r--r--chromium/media/cast/receiver/audio_decoder_unittest.cc13
-rw-r--r--chromium/media/cast/receiver/cast_receiver_impl.cc7
-rw-r--r--chromium/media/cast/receiver/cast_receiver_impl.h2
-rw-r--r--chromium/media/cast/receiver/frame_receiver.h2
-rw-r--r--chromium/media/cast/receiver/video_decoder.cc6
-rw-r--r--chromium/media/cast/receiver/video_decoder_unittest.cc8
-rw-r--r--chromium/media/cast/sender/audio_encoder.cc6
-rw-r--r--chromium/media/cast/sender/audio_encoder_unittest.cc15
-rw-r--r--chromium/media/cast/sender/audio_sender.h2
-rw-r--r--chromium/media/cast/sender/audio_sender_unittest.cc2
-rw-r--r--chromium/media/cast/sender/congestion_control.cc4
-rw-r--r--chromium/media/cast/sender/external_video_encoder.cc6
-rw-r--r--chromium/media/cast/sender/external_video_encoder.h5
-rw-r--r--chromium/media/cast/sender/fake_software_video_encoder.h2
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder.cc16
-rw-r--r--chromium/media/cast/sender/h264_vt_encoder_unittest.cc14
-rw-r--r--chromium/media/cast/sender/sender_encoded_frame.h2
-rw-r--r--chromium/media/cast/sender/video_encoder_impl.cc2
-rw-r--r--chromium/media/cast/sender/video_encoder_impl.h2
-rw-r--r--chromium/media/cast/sender/video_encoder_unittest.cc4
-rw-r--r--chromium/media/cast/sender/video_sender.cc9
-rw-r--r--chromium/media/cast/sender/video_sender.h6
-rw-r--r--chromium/media/cast/sender/video_sender_unittest.cc23
-rw-r--r--chromium/media/cast/sender/vp8_encoder.h2
-rw-r--r--chromium/media/cdm/BUILD.gn2
-rw-r--r--chromium/media/cdm/cdm_adapter.h10
-rw-r--r--chromium/media/cdm/cdm_context_ref_impl.h2
-rw-r--r--chromium/media/cdm/cdm_helpers.h4
-rw-r--r--chromium/media/cdm/cdm_paths_unittest.cc7
-rw-r--r--chromium/media/cdm/default_cdm_factory.cc2
-rw-r--r--chromium/media/cdm/default_cdm_factory.h2
-rw-r--r--chromium/media/cdm/library_cdm/cdm_paths.gni4
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc11
-rw-r--r--chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc2
-rw-r--r--chromium/media/cdm/win/media_foundation_cdm_factory.cc2
-rw-r--r--chromium/media/device_monitors/BUILD.gn31
-rw-r--r--chromium/media/device_monitors/device_monitor_mac.mm2
-rw-r--r--chromium/media/ffmpeg/ffmpeg_common.cc2
-rw-r--r--chromium/media/filters/BUILD.gn17
-rw-r--r--chromium/media/filters/android/media_codec_audio_decoder.cc1
-rw-r--r--chromium/media/filters/aom_video_decoder.cc314
-rw-r--r--chromium/media/filters/aom_video_decoder.h89
-rw-r--r--chromium/media/filters/aom_video_decoder_unittest.cc288
-rw-r--r--chromium/media/filters/audio_decoder_stream_unittest.cc2
-rw-r--r--chromium/media/filters/audio_decoder_unittest.cc24
-rw-r--r--chromium/media/filters/audio_video_metadata_extractor.cc3
-rw-r--r--chromium/media/filters/blocking_url_protocol.cc2
-rw-r--r--chromium/media/filters/blocking_url_protocol.h5
-rw-r--r--chromium/media/filters/blocking_url_protocol_unittest.cc13
-rw-r--r--chromium/media/filters/chunk_demuxer.cc1
-rw-r--r--chromium/media/filters/chunk_demuxer_unittest.cc20
-rw-r--r--chromium/media/filters/dav1d_video_decoder.cc2
-rw-r--r--chromium/media/filters/dav1d_video_decoder.h2
-rw-r--r--chromium/media/filters/dav1d_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/decoder_selector.cc26
-rw-r--r--chromium/media/filters/decoder_selector.h9
-rw-r--r--chromium/media/filters/decoder_stream.cc64
-rw-r--r--chromium/media/filters/decoder_stream.h6
-rw-r--r--chromium/media/filters/decrypting_audio_decoder.cc20
-rw-r--r--chromium/media/filters/decrypting_audio_decoder.h6
-rw-r--r--chromium/media/filters/decrypting_audio_decoder_unittest.cc42
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.cc23
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream.h8
-rw-r--r--chromium/media/filters/decrypting_demuxer_stream_unittest.cc4
-rw-r--r--chromium/media/filters/decrypting_media_resource.cc6
-rw-r--r--chromium/media/filters/decrypting_media_resource.h17
-rw-r--r--chromium/media/filters/decrypting_media_resource_unittest.cc2
-rw-r--r--chromium/media/filters/decrypting_video_decoder.cc24
-rw-r--r--chromium/media/filters/decrypting_video_decoder.h9
-rw-r--r--chromium/media/filters/decrypting_video_decoder_unittest.cc21
-rw-r--r--chromium/media/filters/demuxer_perftest.cc2
-rw-r--r--chromium/media/filters/fake_video_decoder.cc33
-rw-r--r--chromium/media/filters/fake_video_decoder.h4
-rw-r--r--chromium/media/filters/fake_video_decoder_unittest.cc37
-rw-r--r--chromium/media/filters/ffmpeg_audio_decoder.cc16
-rw-r--r--chromium/media/filters/ffmpeg_audio_decoder.h8
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.cc81
-rw-r--r--chromium/media/filters/ffmpeg_demuxer.h7
-rw-r--r--chromium/media/filters/ffmpeg_demuxer_unittest.cc7
-rw-r--r--chromium/media/filters/ffmpeg_glue_unittest.cc5
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.cc17
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder.h5
-rw-r--r--chromium/media/filters/ffmpeg_video_decoder_unittest.cc4
-rw-r--r--chromium/media/filters/frame_buffer_pool.cc6
-rw-r--r--chromium/media/filters/frame_buffer_pool.h2
-rw-r--r--chromium/media/filters/frame_buffer_pool_unittest.cc8
-rw-r--r--chromium/media/filters/fuchsia/DEPS3
-rw-r--r--chromium/media/filters/fuchsia/DIR_METADATA10
-rw-r--r--chromium/media/filters/fuchsia/OWNERS5
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder.cc186
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder.h15
-rw-r--r--chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc146
-rw-r--r--chromium/media/filters/gav1_video_decoder.cc2
-rw-r--r--chromium/media/filters/gav1_video_decoder_unittest.cc2
-rw-r--r--chromium/media/filters/h264_bitstream_buffer.cc6
-rw-r--r--chromium/media/filters/media_file_checker.cc2
-rw-r--r--chromium/media/filters/memory_data_source.h2
-rw-r--r--chromium/media/filters/offloading_video_decoder.cc12
-rw-r--r--chromium/media/filters/offloading_video_decoder.h4
-rw-r--r--chromium/media/filters/offloading_video_decoder_unittest.cc14
-rw-r--r--chromium/media/filters/pipeline_controller_unittest.cc2
-rw-r--r--chromium/media/filters/source_buffer_state_unittest.cc21
-rw-r--r--chromium/media/filters/source_buffer_stream.cc11
-rw-r--r--chromium/media/filters/source_buffer_stream.h5
-rw-r--r--chromium/media/filters/source_buffer_stream_unittest.cc2
-rw-r--r--chromium/media/filters/stream_parser_factory.cc2
-rw-r--r--chromium/media/filters/video_renderer_algorithm.cc80
-rw-r--r--chromium/media/filters/video_renderer_algorithm.h3
-rw-r--r--chromium/media/filters/video_renderer_algorithm_unittest.cc49
-rw-r--r--chromium/media/filters/vp9_parser.cc2
-rw-r--r--chromium/media/filters/vpx_video_decoder_fuzzertest.cc4
-rw-r--r--chromium/media/filters/vpx_video_decoder_unittest.cc20
-rw-r--r--chromium/media/formats/mp2t/es_parser_mpeg1audio.cc8
-rw-r--r--chromium/media/formats/mp2t/es_parser_mpeg1audio.h2
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.cc64
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser.h6
-rw-r--r--chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc32
-rw-r--r--chromium/media/formats/mp4/box_definitions.cc7
-rw-r--r--chromium/media/formats/mp4/box_definitions.h1
-rw-r--r--chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc5
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser.cc14
-rw-r--r--chromium/media/formats/mp4/mp4_stream_parser_unittest.cc27
-rw-r--r--chromium/media/formats/mp4/track_run_iterator.cc4
-rw-r--r--chromium/media/formats/mpeg/adts_stream_parser.cc13
-rw-r--r--chromium/media/formats/mpeg/adts_stream_parser.h4
-rw-r--r--chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc159
-rw-r--r--chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h9
-rw-r--r--chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc2
-rw-r--r--chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h4
-rw-r--r--chromium/media/formats/webm/webm_colour_parser.cc2
-rw-r--r--chromium/media/formats/webm/webm_colour_parser.h8
-rw-r--r--chromium/media/formats/webm/webm_stream_parser_unittest.cc4
-rw-r--r--chromium/media/fuchsia/OWNERS5
-rw-r--r--chromium/media/fuchsia/audio/BUILD.gn8
-rw-r--r--chromium/media/fuchsia/audio/fake_audio_consumer.cc18
-rw-r--r--chromium/media/fuchsia/audio/fake_audio_consumer.h4
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc4
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc492
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_output_device.h169
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc228
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc70
-rw-r--r--chromium/media/fuchsia/audio/fuchsia_audio_renderer.h18
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.cc18
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm.h1
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_cdm_context.h2
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_decryptor.cc10
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_decryptor.h13
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc75
-rw-r--r--chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.h23
-rw-r--r--chromium/media/fuchsia/cdm/service/BUILD.gn4
-rw-r--r--chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc172
-rw-r--r--chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h21
-rw-r--r--chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc (renamed from chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_test.cc)164
-rw-r--r--chromium/media/fuchsia/cdm/service/mock_provision_fetcher.h2
-rw-r--r--chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc2
-rw-r--r--chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc (renamed from chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_test.cc)4
-rw-r--r--chromium/media/fuchsia/common/stream_processor_helper.cc10
-rw-r--r--chromium/media/fuchsia/common/stream_processor_helper.h5
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_reader.cc13
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_reader.h8
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_writer.cc26
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_writer.h3
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_writer_queue.cc11
-rw-r--r--chromium/media/fuchsia/common/sysmem_buffer_writer_queue.h14
-rw-r--r--chromium/media/fuchsia/media_unittests.test-cmx (renamed from chromium/media/fuchsia/media_unittests.cmx)0
-rw-r--r--chromium/media/gpu/BUILD.gn37
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl.cc15
-rw-r--r--chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc8
-rw-r--r--chromium/media/gpu/android/codec_allocator.cc2
-rw-r--r--chromium/media/gpu/android/codec_allocator_unittest.cc2
-rw-r--r--chromium/media/gpu/android/codec_image.cc2
-rw-r--r--chromium/media/gpu/android/codec_image_unittest.cc2
-rw-r--r--chromium/media/gpu/android/codec_output_buffer_renderer.cc2
-rw-r--r--chromium/media/gpu/android/direct_shared_image_video_provider.cc2
-rw-r--r--chromium/media/gpu/android/frame_info_helper_unittest.cc4
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder.cc23
-rw-r--r--chromium/media/gpu/android/media_codec_video_decoder_unittest.cc4
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl.cc16
-rw-r--r--chromium/media/gpu/android/video_frame_factory_impl_unittest.cc2
-rw-r--r--chromium/media/gpu/args.gni21
-rw-r--r--chromium/media/gpu/chromeos/BUILD.gn1
-rw-r--r--chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h3
-rw-r--r--chromium/media/gpu/chromeos/fourcc.h16
-rw-r--r--chromium/media/gpu/chromeos/image_processor.cc14
-rw-r--r--chromium/media/gpu/chromeos/image_processor_test.cc7
-rw-r--r--chromium/media/gpu/chromeos/image_processor_with_pool.cc3
-rw-r--r--chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc2
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool.cc60
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool.h10
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc9
-rw-r--r--chromium/media/gpu/chromeos/platform_video_frame_utils.cc1
-rw-r--r--chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc16
-rw-r--r--chromium/media/gpu/chromeos/vda_video_frame_pool.cc14
-rw-r--r--chromium/media/gpu/chromeos/vda_video_frame_pool.h3
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.cc36
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline.h7
-rw-r--r--chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc21
-rw-r--r--chromium/media/gpu/gpu_video_decode_accelerator_factory.cc5
-rw-r--r--chromium/media/gpu/h264_decoder.cc1
-rw-r--r--chromium/media/gpu/h265_decoder.cc888
-rw-r--r--chromium/media/gpu/h265_decoder.h329
-rw-r--r--chromium/media/gpu/h265_decoder_fuzzertest.cc81
-rw-r--r--chromium/media/gpu/h265_decoder_unittest.cc459
-rw-r--r--chromium/media/gpu/h265_dpb.cc107
-rw-r--r--chromium/media/gpu/h265_dpb.h147
-rw-r--r--chromium/media/gpu/ipc/service/BUILD.gn1
-rw-r--r--chromium/media/gpu/ipc/service/vda_video_decoder.cc4
-rw-r--r--chromium/media/gpu/mac/vt_config_util.h7
-rw-r--r--chromium/media/gpu/mac/vt_config_util.mm212
-rw-r--r--chromium/media/gpu/mac/vt_config_util_unittest.cc59
-rw-r--r--chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc26
-rw-r--r--chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h6
-rw-r--r--chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc3
-rw-r--r--chromium/media/gpu/test/BUILD.gn7
-rw-r--r--chromium/media/gpu/v4l2/BUILD.gn3
-rw-r--r--chromium/media/gpu/v4l2/buffer_affinity_tracker.cc4
-rw-r--r--chromium/media/gpu/v4l2/buffer_affinity_tracker.h1
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device.cc2
-rw-r--r--chromium/media/gpu/v4l2/v4l2_device_poller.cc4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc52
-rw-r--r--chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc123
-rw-r--r--chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc1
-rw-r--r--chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc1
-rw-r--r--chromium/media/gpu/v4l2/v4l2_vda_helpers.cc4
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder.cc13
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder.h2
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc82
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h7
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc2
-rw-r--r--chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc25
-rw-r--r--chromium/media/gpu/vaapi/BUILD.gn17
-rw-r--r--chromium/media/gpu/vaapi/fuzzers/jpeg_decoder/BUILD.gn9
-rw-r--r--chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc520
-rw-r--r--chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h87
-rw-r--r--chromium/media/gpu/vaapi/vaapi_common.cc17
-rw-r--r--chromium/media/gpu/vaapi/vaapi_common.h26
-rw-r--r--chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc4
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc10
-rw-r--r--chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc10
-rw-r--r--chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc11
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_factory.cc133
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_factory.h29
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc16
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h3
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_tfp.cc11
-rw-r--r--chromium/media/gpu/vaapi/vaapi_picture_tfp.h2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_unittest.cc56
-rw-r--r--chromium/media/gpu/vaapi/vaapi_utils.h2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc41
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc11
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.cc45
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_decoder.h2
-rw-r--r--chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc12
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.cc76
-rw-r--r--chromium/media/gpu/vaapi/vaapi_wrapper.h4
-rw-r--r--chromium/media/gpu/vaapi/vp9_encoder_unittest.cc2
-rw-r--r--chromium/media/gpu/video_decode_accelerator_perf_tests.cc8
-rw-r--r--chromium/media/gpu/video_decode_accelerator_tests.cc9
-rw-r--r--chromium/media/gpu/video_encode_accelerator_tests.cc18
-rw-r--r--chromium/media/gpu/video_encode_accelerator_unittest.cc19
-rw-r--r--chromium/media/gpu/vp9_decoder.cc5
-rw-r--r--chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc4
-rw-r--r--chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h6
-rw-r--r--chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc24
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_decoder_configurator.h6
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer.h13
-rw-r--r--chromium/media/gpu/windows/d3d11_picture_buffer_unittest.cc48
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector.cc58
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector.h28
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc3
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper.h7
-rw-r--r--chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.cc70
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder.h5
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_client.h1
-rw-r--r--chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc18
-rw-r--r--chromium/media/gpu/windows/d3d11_video_processor_proxy.h11
-rw-r--r--chromium/media/gpu/windows/d3d11_video_processor_proxy_unittest.cc6
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_accelerator.cc2
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_picture.cc11
-rw-r--r--chromium/media/gpu/windows/d3d11_vp9_picture.h7
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc126
-rw-r--r--chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h4
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers.cc18
-rw-r--r--chromium/media/gpu/windows/supported_profile_helpers_unittest.cc14
-rw-r--r--chromium/media/learning/impl/learning_session_impl.cc27
-rw-r--r--chromium/media/learning/impl/learning_session_impl_unittest.cc5
-rw-r--r--chromium/media/learning/impl/learning_task_controller_helper.cc10
-rw-r--r--chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc2
-rw-r--r--chromium/media/media_options.gni17
-rw-r--r--chromium/media/midi/BUILD.gn21
-rw-r--r--chromium/media/midi/DIR_METADATA12
-rw-r--r--chromium/media/midi/OWNERS3
-rw-r--r--chromium/media/midi/midi_manager_win.cc2
-rw-r--r--chromium/media/midi/task_service_unittest.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_android_overlay_unittest.cc14
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.cc28
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder.h11
-rw-r--r--chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.cc2
-rw-r--r--chromium/media/mojo/clients/mojo_cdm.h6
-rw-r--r--chromium/media/mojo/clients/mojo_cdm_factory.h2
-rw-r--r--chromium/media/mojo/clients/mojo_decoder_factory.cc6
-rw-r--r--chromium/media/mojo/clients/mojo_decoder_factory.h6
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor.h2
-rw-r--r--chromium/media/mojo/clients/mojo_decryptor_unittest.cc44
-rw-r--r--chromium/media/mojo/clients/mojo_demuxer_stream_impl.h4
-rw-r--r--chromium/media/mojo/clients/mojo_media_log_service.h2
-rw-r--r--chromium/media/mojo/clients/mojo_renderer.cc4
-rw-r--r--chromium/media/mojo/clients/mojo_renderer_factory.h2
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.cc38
-rw-r--r--chromium/media/mojo/clients/mojo_video_decoder.h8
-rw-r--r--chromium/media/mojo/clients/mojo_video_encode_accelerator.cc2
-rw-r--r--chromium/media/mojo/common/mojo_decoder_buffer_converter.cc10
-rw-r--r--chromium/media/mojo/mojom/media_types.mojom6
-rw-r--r--chromium/media/mojo/mojom/provision_fetcher.mojom5
-rw-r--r--chromium/media/mojo/mojom/speech_recognition_service.mojom18
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc2
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h4
-rw-r--r--chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc2
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc2
-rw-r--r--chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc2
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.cc6
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits.h2
-rw-r--r--chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc2
-rw-r--r--chromium/media/mojo/services/BUILD.gn6
-rw-r--r--chromium/media/mojo/services/cdm_service.cc2
-rw-r--r--chromium/media/mojo/services/cdm_service.h2
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.cc11
-rw-r--r--chromium/media/mojo/services/gpu_mojo_media_client.h2
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.cc4
-rw-r--r--chromium/media/mojo/services/interface_factory_impl.h3
-rw-r--r--chromium/media/mojo/services/media_metrics_provider_unittest.cc2
-rw-r--r--chromium/media/mojo/services/media_resource_shim.cc5
-rw-r--r--chromium/media/mojo/services/media_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_audio_decoder_service.cc2
-rw-r--r--chromium/media/mojo/services/mojo_audio_decoder_service.h3
-rw-r--r--chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_allocator.cc4
-rw-r--r--chromium/media/mojo/services/mojo_cdm_allocator.h2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_promise.h2
-rw-r--r--chromium/media/mojo/services/mojo_cdm_service.h3
-rw-r--r--chromium/media/mojo/services/mojo_decryptor_service.h2
-rw-r--r--chromium/media/mojo/services/mojo_demuxer_stream_adapter.cc6
-rw-r--r--chromium/media/mojo/services/mojo_demuxer_stream_adapter.h4
-rw-r--r--chromium/media/mojo/services/mojo_media_drm_storage.cc2
-rw-r--r--chromium/media/mojo/services/mojo_provision_fetcher.cc2
-rw-r--r--chromium/media/mojo/services/mojo_provision_fetcher.h4
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.cc2
-rw-r--r--chromium/media/mojo/services/mojo_renderer_service.h4
-rw-r--r--chromium/media/mojo/services/mojo_video_decoder_service.cc2
-rw-r--r--chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc2
-rw-r--r--chromium/media/mojo/services/playback_events_recorder.h3
-rw-r--r--chromium/media/mojo/services/test_mojo_media_client.h2
-rw-r--r--chromium/media/mojo/services/video_decode_perf_history_unittest.cc2
-rw-r--r--chromium/media/mojo/services/watch_time_recorder_unittest.cc2
-rw-r--r--chromium/media/muxers/DIR_METADATA11
-rw-r--r--chromium/media/muxers/OWNERS2
-rw-r--r--chromium/media/muxers/webm_muxer.cc40
-rw-r--r--chromium/media/muxers/webm_muxer.h11
-rw-r--r--chromium/media/remoting/DIR_METADATA11
-rw-r--r--chromium/media/remoting/OWNERS2
-rw-r--r--chromium/media/remoting/courier_renderer.cc1
-rw-r--r--chromium/media/remoting/courier_renderer.h2
-rw-r--r--chromium/media/remoting/end2end_test_renderer.cc2
-rw-r--r--chromium/media/remoting/fake_remoter.cc2
-rw-r--r--chromium/media/renderers/audio_renderer_impl.cc6
-rw-r--r--chromium/media/renderers/decrypting_renderer.cc2
-rw-r--r--chromium/media/renderers/decrypting_renderer_factory.h2
-rw-r--r--chromium/media/renderers/decrypting_renderer_unittest.cc2
-rw-r--r--chromium/media/renderers/default_decoder_factory.cc16
-rw-r--r--chromium/media/renderers/default_decoder_factory.h6
-rw-r--r--chromium/media/renderers/default_renderer_factory.h2
-rw-r--r--chromium/media/renderers/paint_canvas_video_renderer.cc67
-rw-r--r--chromium/media/renderers/renderer_impl.cc1
-rw-r--r--chromium/media/renderers/renderer_impl.h2
-rw-r--r--chromium/media/renderers/renderer_impl_unittest.cc2
-rw-r--r--chromium/media/renderers/video_frame_yuv_converter.cc67
-rw-r--r--chromium/media/renderers/video_resource_updater.cc37
-rw-r--r--chromium/media/renderers/video_resource_updater.h7
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer.cc1
-rw-r--r--chromium/media/renderers/win/media_foundation_renderer_unittest.cc2
-rw-r--r--chromium/media/video/BUILD.gn10
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool.cc77
-rw-r--r--chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc38
-rw-r--r--chromium/media/video/gpu_video_accelerator_factories.h5
-rw-r--r--chromium/media/video/h265_parser.cc1681
-rw-r--r--chromium/media/video/h265_parser.h396
-rw-r--r--chromium/media/video/h265_parser_fuzzertest.cc63
-rw-r--r--chromium/media/video/h265_parser_unittest.cc392
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.cc1
-rw-r--r--chromium/media/video/mock_gpu_video_accelerator_factories.h4
-rw-r--r--chromium/media/video/openh264_video_encoder.cc47
-rw-r--r--chromium/media/video/openh264_video_encoder.h4
-rw-r--r--chromium/media/video/video_decode_accelerator.h2
-rw-r--r--chromium/media/video/video_encode_accelerator.h2
-rw-r--r--chromium/media/video/video_encode_accelerator_adapter.cc337
-rw-r--r--chromium/media/video/video_encode_accelerator_adapter.h50
-rw-r--r--chromium/media/video/vpx_video_encoder.cc195
-rw-r--r--chromium/media/video/vpx_video_encoder.h8
-rw-r--r--chromium/media/webrtc/DIR_METADATA11
-rw-r--r--chromium/media/webrtc/OWNERS2
681 files changed, 16146 insertions, 5941 deletions
diff --git a/chromium/media/BUILD.gn b/chromium/media/BUILD.gn
index b34e92a644d..2247c8f4808 100644
--- a/chromium/media/BUILD.gn
+++ b/chromium/media/BUILD.gn
@@ -41,6 +41,7 @@ buildflag_header("media_buildflags") {
"ENABLE_PLATFORM_MPEG_H_AUDIO=$enable_platform_mpeg_h_audio",
"ENABLE_MSE_MPEG2TS_STREAM_PARSER=$enable_mse_mpeg2ts_stream_parser",
"USE_CHROMEOS_MEDIA_ACCELERATION=$use_vaapi||$use_v4l2_codec",
+ "USE_CHROMEOS_PROTECTED_MEDIA=$use_chromeos_protected_media",
"USE_PROPRIETARY_CODECS=$proprietary_codecs",
]
}
@@ -122,6 +123,13 @@ component("media") {
if (is_apple) {
public_deps += [ "//media/base/mac" ]
}
+
+ if (use_x11) {
+ deps += [ "//ui/base/x" ]
+ }
+ if (use_ozone) {
+ deps += [ "//ui/ozone" ]
+ }
}
# Note: This can't be a static_library since it does not have any sources.
@@ -191,7 +199,7 @@ test("media_unittests") {
"//media/fuchsia/audio:unittests",
"//media/fuchsia/cdm/service:unittests",
]
- manifest = "//media/fuchsia/media_unittests.cmx"
+ manifest = "//media/fuchsia/media_unittests.test-cmx"
}
if (enable_media_remoting) {
@@ -294,6 +302,7 @@ component("shared_memory_support") {
}
deps = [
"//base",
+ "//build:chromeos_buildflags",
"//ui/gfx/geometry",
]
}
diff --git a/chromium/media/DEPS b/chromium/media/DEPS
index 63da8c9d4e4..4ae2a118691 100644
--- a/chromium/media/DEPS
+++ b/chromium/media/DEPS
@@ -12,6 +12,7 @@ include_rules = [
"+mojo/public/cpp/system/platform_handle.h",
"+services/device/public",
"+services/viz/public/cpp/gpu/context_provider_command_buffer.h",
+ "+skia/ext",
"+third_party/dav1d",
"+third_party/ffmpeg",
"+third_party/libaom",
@@ -22,6 +23,7 @@ include_rules = [
"+third_party/opus",
"+third_party/skia",
"+ui/base/ui_base_features.h",
+ "+ui/base/x/x11_user_input_monitor.h",
"+ui/display",
"+ui/events",
"+ui/gfx",
diff --git a/chromium/media/DIR_METADATA b/chromium/media/DIR_METADATA
new file mode 100644
index 00000000000..9f769ce7f98
--- /dev/null
+++ b/chromium/media/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Internals>Media"
+} \ No newline at end of file
diff --git a/chromium/media/OWNERS b/chromium/media/OWNERS
index 2d26a058a9f..69199030a27 100644
--- a/chromium/media/OWNERS
+++ b/chromium/media/OWNERS
@@ -23,5 +23,3 @@ per-file *_fuchsia*=file://build/fuchsia/OWNERS
# For GpuMemoryBuffer-related changes:
per-file *gpu_memory_buffer*=dcastagna@chromium.org
-
-# COMPONENT: Internals>Media
diff --git a/chromium/media/audio/BUILD.gn b/chromium/media/audio/BUILD.gn
index 5e4dc90597d..d56fc217768 100644
--- a/chromium/media/audio/BUILD.gn
+++ b/chromium/media/audio/BUILD.gn
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/linux/pkg_config.gni")
import("//media/media_options.gni")
import("//testing/libfuzzer/fuzzer_test.gni")
@@ -146,6 +147,7 @@ source_set("audio") {
deps = [
"//base",
"//build:chromecast_buildflags",
+ "//build:chromeos_buildflags",
"//media/base",
"//third_party/opus:opus",
"//url",
@@ -266,7 +268,7 @@ source_set("audio") {
"cras/cras_unified.h",
]
configs += [ ":libcras" ]
- if (is_chromeos) {
+ if (is_ash) {
sources += [
"cras/audio_manager_chromeos.cc",
"cras/audio_manager_chromeos.h",
@@ -276,6 +278,8 @@ source_set("audio") {
sources += [
"cras/audio_manager_cras.cc",
"cras/audio_manager_cras.h",
+ "cras/cras_util.cc",
+ "cras/cras_util.h",
]
}
}
@@ -359,6 +363,7 @@ static_library("test_support") {
deps = [
"//base",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
# Do not add any other //media deps except this; it will automatically pull
# a dep on //media which is required to ensure test_support targets all use
@@ -400,6 +405,7 @@ source_set("unit_tests") {
deps = [
"//base",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//media:test_support",
"//testing/gmock",
"//testing/gtest",
@@ -426,7 +432,7 @@ source_set("unit_tests") {
]
}
- if (is_chromeos || is_chromecast) {
+ if (is_ash || is_chromecast) {
sources += [
"test_data.h",
"wav_audio_handler_unittest.cc",
diff --git a/chromium/media/audio/DIR_METADATA b/chromium/media/audio/DIR_METADATA
new file mode 100644
index 00000000000..99031e09f7e
--- /dev/null
+++ b/chromium/media/audio/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Internals>Media>Audio"
+} \ No newline at end of file
diff --git a/chromium/media/audio/OWNERS b/chromium/media/audio/OWNERS
index db515cd2aea..e7ee28564ec 100644
--- a/chromium/media/audio/OWNERS
+++ b/chromium/media/audio/OWNERS
@@ -7,5 +7,3 @@ henrika@chromium.org
# Mirroring (and related glue) OWNERS.
miu@chromium.org
mfoltz@chromium.org
-
-# COMPONENT: Blink>Media>Audio
diff --git a/chromium/media/audio/alive_checker.cc b/chromium/media/audio/alive_checker.cc
index a369d4640e3..8ca4e936c37 100644
--- a/chromium/media/audio/alive_checker.cc
+++ b/chromium/media/audio/alive_checker.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/threading/thread_task_runner_handle.h"
namespace media {
diff --git a/chromium/media/audio/android/audio_track_output_stream.cc b/chromium/media/audio/android/audio_track_output_stream.cc
index d6d2e87cd84..5b98e2d923b 100644
--- a/chromium/media/audio/android/audio_track_output_stream.cc
+++ b/chromium/media/audio/android/audio_track_output_stream.cc
@@ -7,7 +7,7 @@
#include <cmath>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/single_thread_task_runner.h"
#include "base/time/default_tick_clock.h"
diff --git a/chromium/media/audio/audio_device_thread.cc b/chromium/media/audio/audio_device_thread.cc
index a9be7d23650..2340cc137b5 100644
--- a/chromium/media/audio/audio_device_thread.cc
+++ b/chromium/media/audio/audio_device_thread.cc
@@ -8,6 +8,7 @@
#include "base/check_op.h"
#include "base/system/sys_info.h"
+#include "build/build_config.h"
namespace media {
@@ -45,8 +46,17 @@ AudioDeviceThread::AudioDeviceThread(Callback* callback,
: callback_(callback),
thread_name_(thread_name),
socket_(std::move(socket)) {
- CHECK(base::PlatformThread::CreateWithPriority(0, this, &thread_handle_,
- thread_priority));
+#if defined(ARCH_CPU_X86)
+ // Audio threads don't need a huge stack, they don't have a message loop and
+ // they are used exclusively for polling the next frame of audio. See
+ // https://crbug.com/1141563 for discussion.
+ constexpr size_t kStackSize = 256 * 1024;
+#else
+ constexpr size_t kStackSize = 0; // Default.
+#endif
+
+ CHECK(base::PlatformThread::CreateWithPriority(
+ kStackSize, this, &thread_handle_, thread_priority));
DCHECK(!thread_handle_.is_null());
}
diff --git a/chromium/media/audio/audio_encoders_unittest.cc b/chromium/media/audio/audio_encoders_unittest.cc
index 78b4d4ef097..4cff1e26cf5 100644
--- a/chromium/media/audio/audio_encoders_unittest.cc
+++ b/chromium/media/audio/audio_encoders_unittest.cc
@@ -82,7 +82,7 @@ class AudioEncodersTest : public ::testing::TestWithParam<TestAudioParams> {
~AudioEncodersTest() override = default;
const AudioParameters& input_params() const { return input_params_; }
- const AudioEncoder* encoder() const { return encoder_.get(); }
+ AudioEncoder* encoder() const { return encoder_.get(); }
int encode_callback_count() const { return encode_callback_count_; }
void SetEncoder(std::unique_ptr<AudioEncoder> encoder) {
@@ -200,6 +200,14 @@ TEST_P(AudioEncodersTest, OpusEncoder) {
EXPECT_EQ(1, encode_callback_count());
+ // If there are remaining frames in the opus encoder FIFO, we need to flush
+ // them before we destroy the encoder. Flushing should trigger the encode
+ // callback and we should be able to decode the resulting encoded frames.
+ if (total_frames > frames_in_60_ms) {
+ encoder()->Flush();
+ EXPECT_EQ(2, encode_callback_count());
+ }
+
opus_decoder_destroy(opus_decoder);
opus_decoder = nullptr;
}
diff --git a/chromium/media/audio/audio_features.cc b/chromium/media/audio/audio_features.cc
index af2faa40fb4..1b78a09df80 100644
--- a/chromium/media/audio/audio_features.cc
+++ b/chromium/media/audio/audio_features.cc
@@ -4,6 +4,7 @@
#include "media/audio/audio_features.h"
#include "base/feature_list.h"
+#include "build/chromeos_buildflags.h"
namespace features {
@@ -31,14 +32,14 @@ const base::Feature kUseAAudioDriver{"UseAAudioDriver",
base::FEATURE_ENABLED_BY_DEFAULT};
#endif
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH) || BUILDFLAG(IS_LACROS)
const base::Feature kCrOSSystemAEC{"CrOSSystemAEC",
base::FEATURE_ENABLED_BY_DEFAULT};
const base::Feature kCrOSSystemAECDeactivatedGroups{
"CrOSSystemAECDeactivatedGroups", base::FEATURE_ENABLED_BY_DEFAULT};
#endif
-#if defined(OS_MAC) || defined(OS_CHROMEOS)
+#if defined(OS_MAC) || BUILDFLAG(IS_ASH)
const base::Feature kForceEnableSystemAec{"ForceEnableSystemAec",
base::FEATURE_DISABLED_BY_DEFAULT};
#endif
diff --git a/chromium/media/audio/audio_features.h b/chromium/media/audio/audio_features.h
index c8ed8cd5696..a250694690c 100644
--- a/chromium/media/audio/audio_features.h
+++ b/chromium/media/audio/audio_features.h
@@ -7,6 +7,7 @@
#include "base/feature_list.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/media_export.h"
namespace features {
@@ -18,12 +19,12 @@ MEDIA_EXPORT extern const base::Feature kDumpOnAudioServiceHang;
MEDIA_EXPORT extern const base::Feature kUseAAudioDriver;
#endif
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH) || BUILDFLAG(IS_LACROS)
MEDIA_EXPORT extern const base::Feature kCrOSSystemAEC;
MEDIA_EXPORT extern const base::Feature kCrOSSystemAECDeactivatedGroups;
#endif
-#if defined(OS_MAC) || defined(OS_CHROMEOS)
+#if defined(OS_MAC) || BUILDFLAG(IS_ASH)
MEDIA_EXPORT extern const base::Feature kForceEnableSystemAec;
#endif
diff --git a/chromium/media/audio/audio_input_unittest.cc b/chromium/media/audio/audio_input_unittest.cc
index f603729856a..ddaba1dd45c 100644
--- a/chromium/media/audio/audio_input_unittest.cc
+++ b/chromium/media/audio/audio_input_unittest.cc
@@ -88,6 +88,10 @@ class AudioInputTest : public testing::Test {
// FuchsiaAudioCapturerStream. It implements AudioCapturerStream interface
// and runs in the renderer process.
return false;
+#elif defined(OS_MAC) && defined(ARCH_CPU_ARM64)
+ // TODO(crbug.com/1128458): macOS on ARM64 says it has devices, but won't
+ // let any of them be opened or listed.
+ return false;
#else
return AudioDeviceInfoAccessorForTests(audio_manager_.get())
.HasAudioInputDevices();
@@ -135,11 +139,12 @@ class AudioInputTest : public testing::Test {
params, AudioDeviceDescription::kDefaultDeviceId,
base::BindRepeating(&AudioInputTest::OnLogMessage,
base::Unretained(this)));
- EXPECT_TRUE(audio_input_stream_);
+ ASSERT_TRUE(audio_input_stream_);
}
void OpenAndClose() {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
+ ASSERT_TRUE(audio_input_stream_);
EXPECT_TRUE(audio_input_stream_->Open());
audio_input_stream_->Close();
audio_input_stream_ = nullptr;
@@ -147,12 +152,14 @@ class AudioInputTest : public testing::Test {
void OpenAndStart(AudioInputStream::AudioInputCallback* sink) {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
+ ASSERT_TRUE(audio_input_stream_);
EXPECT_TRUE(audio_input_stream_->Open());
audio_input_stream_->Start(sink);
}
void OpenStopAndClose() {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
+ ASSERT_TRUE(audio_input_stream_);
EXPECT_TRUE(audio_input_stream_->Open());
audio_input_stream_->Stop();
audio_input_stream_->Close();
@@ -161,6 +168,7 @@ class AudioInputTest : public testing::Test {
void StopAndClose() {
DCHECK(audio_manager_->GetTaskRunner()->BelongsToCurrentThread());
+ ASSERT_TRUE(audio_input_stream_);
audio_input_stream_->Stop();
audio_input_stream_->Close();
audio_input_stream_ = nullptr;
diff --git a/chromium/media/audio/audio_manager.cc b/chromium/media/audio/audio_manager.cc
index 710130e7b95..77a0d4326a3 100644
--- a/chromium/media/audio/audio_manager.cc
+++ b/chromium/media/audio/audio_manager.cc
@@ -9,7 +9,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/logging.h"
#include "base/macros.h"
diff --git a/chromium/media/audio/audio_manager_base.cc b/chromium/media/audio/audio_manager_base.cc
index ff38a76975b..ae5cec1e71f 100644
--- a/chromium/media/audio/audio_manager_base.cc
+++ b/chromium/media/audio/audio_manager_base.cc
@@ -7,7 +7,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
@@ -27,6 +27,7 @@
#include "media/base/media_switches.h"
#include "base/logging.h"
+#include "build/chromeos_buildflags.h"
#include "media/audio/audio_input_stream_data_interceptor.h"
namespace media {
@@ -342,7 +343,7 @@ AudioOutputStream* AudioManagerBase::MakeAudioOutputStreamProxy(
std::string output_device_id =
AudioDeviceDescription::IsDefaultDevice(device_id)
?
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
// On ChromeOS, it is expected that, if the default device is given,
// no specific device ID should be used since the actual output device
// should change dynamically if the system default device changes.
diff --git a/chromium/media/audio/audio_manager_unittest.cc b/chromium/media/audio/audio_manager_unittest.cc
index 1dd8e73618c..f3dad5c1534 100644
--- a/chromium/media/audio/audio_manager_unittest.cc
+++ b/chromium/media/audio/audio_manager_unittest.cc
@@ -22,6 +22,7 @@
#include "base/test/test_message_loop.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/audio_device_info_accessor_for_tests.h"
#include "media/audio/audio_device_name.h"
@@ -54,7 +55,7 @@
#include "media/audio/pulse/pulse_util.h"
#endif // defined(USE_PULSEAUDIO)
-#if defined(USE_CRAS) && defined(OS_CHROMEOS)
+#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
#include "chromeos/audio/audio_devices_pref_handler_stub.h"
#include "chromeos/audio/cras_audio_handler.h"
#include "chromeos/dbus/audio/fake_cras_audio_client.h"
@@ -100,7 +101,7 @@ struct TestAudioManagerFactory<std::nullptr_t> {
}
};
-#if defined(USE_CRAS) && defined(OS_CHROMEOS)
+#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
using chromeos::AudioNode;
using chromeos::AudioNodeList;
@@ -296,7 +297,7 @@ class AudioManagerTest : public ::testing::Test {
device_info_accessor_->GetAssociatedOutputDeviceID(input_device_id);
}
-#if defined(USE_CRAS) && defined(OS_CHROMEOS)
+#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
void TearDown() override {
chromeos::CrasAudioHandler::Shutdown();
audio_pref_handler_ = nullptr;
@@ -333,7 +334,7 @@ class AudioManagerTest : public ::testing::Test {
AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
limits::kMaxAudioBufferSize));
}
-#endif // defined(USE_CRAS) && defined(OS_CHROMEOS)
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
protected:
AudioManagerTest() {
@@ -377,7 +378,7 @@ class AudioManagerTest : public ::testing::Test {
}
}
-#if defined(USE_CRAS) && defined(OS_CHROMEOS)
+#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
// Helper method for (USE_CRAS) which verifies that the device list starts
// with a valid default record followed by physical device names.
static void CheckDeviceDescriptionsCras(
@@ -433,10 +434,16 @@ class AudioManagerTest : public ::testing::Test {
EXPECT_NE(it, device_descriptions.end());
return it->group_id;
}
-#endif // defined(USE_CRAS) && defined(OS_CHROMEOS)
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
bool InputDevicesAvailable() {
+#if defined(OS_MAC) && defined(ARCH_CPU_ARM64)
+ // TODO(crbug.com/1128458): macOS on ARM64 says it has devices, but won't
+ // let any of them be opened or listed.
+ return false;
+#else
return device_info_accessor_->HasAudioInputDevices();
+#endif
}
bool OutputDevicesAvailable() {
return device_info_accessor_->HasAudioOutputDevices();
@@ -468,13 +475,13 @@ class AudioManagerTest : public ::testing::Test {
std::unique_ptr<AudioManager> audio_manager_;
std::unique_ptr<AudioDeviceInfoAccessorForTests> device_info_accessor_;
-#if defined(USE_CRAS) && defined(OS_CHROMEOS)
+#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
chromeos::CrasAudioHandler* cras_audio_handler_ = nullptr; // Not owned.
scoped_refptr<chromeos::AudioDevicesPrefHandlerStub> audio_pref_handler_;
-#endif // defined(USE_CRAS) && defined(OS_CHROMEOS)
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
};
-#if defined(USE_CRAS) && defined(OS_CHROMEOS)
+#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
TEST_F(AudioManagerTest, EnumerateInputDevicesCras) {
// Setup the devices without internal mic, so that it doesn't exist
// beamforming capable mic.
@@ -699,7 +706,7 @@ TEST_F(AudioManagerTest, LookupDefaultOutputDeviceWithProperGroupId) {
EXPECT_EQ(default_device_group_id, speaker_group_id);
EXPECT_EQ(base::NumberToString(kInternalSpeaker.id), new_default_device_id);
}
-#else // !(defined(USE_CRAS) && defined(OS_CHROMEOS))
+#else // !(defined(USE_CRAS) && BUILDFLAG(IS_ASH))
TEST_F(AudioManagerTest, HandleDefaultDeviceIDs) {
// Use a fake manager so we can makeup device ids, this will still use the
@@ -843,7 +850,7 @@ TEST_F(AudioManagerTest, GetAssociatedOutputDeviceID) {
EXPECT_TRUE(found_an_associated_device);
#endif // defined(OS_WIN) || defined(OS_MAC)
}
-#endif // defined(USE_CRAS) && defined(OS_CHROMEOS)
+#endif // defined(USE_CRAS) && BUILDFLAG(IS_ASH)
class TestAudioManager : public FakeAudioManager {
// For testing the default implementation of GetGroupId(Input|Output)
@@ -1006,7 +1013,7 @@ TEST_F(AudioManagerTest, CheckMinMaxAudioBufferSizeCallbacks) {
#if defined(OS_MAC)
CreateAudioManagerForTesting<AudioManagerMac>();
-#elif defined(USE_CRAS) && defined(OS_CHROMEOS)
+#elif defined(USE_CRAS) && BUILDFLAG(IS_ASH)
CreateAudioManagerForTesting<AudioManagerChromeOS>();
#endif
diff --git a/chromium/media/audio/audio_opus_encoder.cc b/chromium/media/audio/audio_opus_encoder.cc
index 28616532e7c..fbc2ed52411 100644
--- a/chromium/media/audio/audio_opus_encoder.cc
+++ b/chromium/media/audio/audio_opus_encoder.cc
@@ -207,7 +207,10 @@ AudioOpusEncoder::AudioOpusEncoder(const AudioParameters& input_params,
converted_params_.frames_per_buffer()));
}
-AudioOpusEncoder::~AudioOpusEncoder() = default;
+AudioOpusEncoder::~AudioOpusEncoder() {
+ DCHECK_EQ(fifo_.queued_frames(), 0)
+ << "Must flush the encoder before destroying to avoid dropping frames.";
+}
void AudioOpusEncoder::EncodeAudioImpl(const AudioBus& audio_bus,
base::TimeTicks capture_time) {
@@ -220,6 +223,20 @@ void AudioOpusEncoder::EncodeAudioImpl(const AudioBus& audio_bus,
fifo_.Push(audio_bus);
}
+void AudioOpusEncoder::FlushImpl() {
+ // Initializing the opus encoder may have failed.
+ if (!opus_encoder_)
+ return;
+
+ // This is needed to correctly compute the timestamp, since the number of
+ // frames of |output_bus| provided to OnFifoOutput() will always be equal to
+ // the full frames_per_buffer(), as the fifo's Flush() will pad the remaining
+ // empty frames with zeros.
+ number_of_flushed_frames_ = fifo_.queued_frames();
+ fifo_.Flush();
+ number_of_flushed_frames_ = base::nullopt;
+}
+
void AudioOpusEncoder::OnFifoOutput(const AudioBus& output_bus,
int frame_delay) {
// Provides input to the converter from |output_bus| within this scope only.
@@ -236,7 +253,9 @@ void AudioOpusEncoder::OnFifoOutput(const AudioBus& output_bus,
DCHECK_GT(encoded_data_size, 1u);
encode_callback().Run(EncodedAudioBuffer(
converted_params_, std::move(encoded_data), encoded_data_size,
- ComputeTimestamp(output_bus.frames(), last_capture_time())));
+ ComputeTimestamp(
+ number_of_flushed_frames_.value_or(output_bus.frames()),
+ last_capture_time())));
}
}
diff --git a/chromium/media/audio/audio_opus_encoder.h b/chromium/media/audio/audio_opus_encoder.h
index d4919abdf20..42983c9862f 100644
--- a/chromium/media/audio/audio_opus_encoder.h
+++ b/chromium/media/audio/audio_opus_encoder.h
@@ -8,6 +8,7 @@
#include <memory>
#include <vector>
+#include "base/optional.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_converter.h"
#include "media/base/audio_encoder.h"
@@ -36,6 +37,7 @@ class MEDIA_EXPORT AudioOpusEncoder : public AudioEncoder {
// AudioEncoder:
void EncodeAudioImpl(const AudioBus& audio_bus,
base::TimeTicks capture_time) override;
+ void FlushImpl() override;
private:
// Called synchronously by |fifo_| once enough audio frames have been
@@ -70,6 +72,11 @@ class MEDIA_EXPORT AudioOpusEncoder : public AudioEncoder {
// The actual libopus encoder instance. This is nullptr if creating the
// encoder fails.
OwnedOpusEncoder opus_encoder_;
+
+ // If FlushImpl() was called while |fifo_| has some frames but not full yet,
+ // this will be the number of flushed frames, which is used to compute the
+ // timestamp provided in the output |EncodedAudioBuffer|.
+ base::Optional<int> number_of_flushed_frames_;
};
} // namespace media
diff --git a/chromium/media/audio/audio_output_device_unittest.cc b/chromium/media/audio/audio_output_device_unittest.cc
index a2b7039e7fb..ed5547fe088 100644
--- a/chromium/media/audio/audio_output_device_unittest.cc
+++ b/chromium/media/audio/audio_output_device_unittest.cc
@@ -10,8 +10,8 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/memory/shared_memory_mapping.h"
diff --git a/chromium/media/audio/audio_output_dispatcher_impl.cc b/chromium/media/audio/audio_output_dispatcher_impl.cc
index 2a46de2969e..439331d5edd 100644
--- a/chromium/media/audio/audio_output_dispatcher_impl.cc
+++ b/chromium/media/audio/audio_output_dispatcher_impl.cc
@@ -33,6 +33,7 @@ AudioOutputDispatcherImpl::AudioOutputDispatcherImpl(
&AudioOutputDispatcherImpl::CloseAllIdleStreams),
audio_stream_id_(0) {
DCHECK(audio_manager->GetTaskRunner()->BelongsToCurrentThread());
+ audio_manager->AddOutputDeviceChangeListener(this);
}
AudioOutputDispatcherImpl::~AudioOutputDispatcherImpl() {
@@ -47,6 +48,8 @@ AudioOutputDispatcherImpl::~AudioOutputDispatcherImpl() {
// invalidating any outstanding tasks upon its destruction.
CloseAllIdleStreams();
+ audio_manager()->RemoveOutputDeviceChangeListener(this);
+
// All idle physical streams must have been closed during shutdown.
CHECK(idle_streams_.empty());
}
@@ -134,6 +137,17 @@ void AudioOutputDispatcherImpl::CloseStream(AudioOutputProxy* stream_proxy) {
// StopStream().
void AudioOutputDispatcherImpl::FlushStream(AudioOutputProxy* stream_proxy) {}
+void AudioOutputDispatcherImpl::OnDeviceChange() {
+ DCHECK(audio_manager()->GetTaskRunner()->BelongsToCurrentThread());
+
+ // We don't want to end up reusing streams which were opened for the wrong
+ // default device. We need to post this task so it runs after device changes
+ // have been sent to all listeners and they've had time to close streams.
+ audio_manager()->GetTaskRunner()->PostTask(
+ FROM_HERE, base::BindOnce(&AudioOutputDispatcherImpl::CloseAllIdleStreams,
+ weak_factory_.GetWeakPtr()));
+}
+
bool AudioOutputDispatcherImpl::HasOutputProxies() const {
DCHECK(audio_manager()->GetTaskRunner()->BelongsToCurrentThread());
return idle_proxies_ || !proxy_to_physical_map_.empty();
diff --git a/chromium/media/audio/audio_output_dispatcher_impl.h b/chromium/media/audio/audio_output_dispatcher_impl.h
index 994a1d258a0..01b200d3a75 100644
--- a/chromium/media/audio/audio_output_dispatcher_impl.h
+++ b/chromium/media/audio/audio_output_dispatcher_impl.h
@@ -23,13 +23,16 @@
#include "base/macros.h"
#include "base/timer/timer.h"
#include "media/audio/audio_io.h"
+#include "media/audio/audio_manager.h"
#include "media/audio/audio_output_dispatcher.h"
#include "media/base/audio_parameters.h"
namespace media {
class AudioLog;
-class MEDIA_EXPORT AudioOutputDispatcherImpl : public AudioOutputDispatcher {
+class MEDIA_EXPORT AudioOutputDispatcherImpl
+ : public AudioOutputDispatcher,
+ public AudioManager::AudioDeviceListener {
public:
// |close_delay| specifies delay after the stream is idle until the audio
// device is closed.
@@ -49,6 +52,9 @@ class MEDIA_EXPORT AudioOutputDispatcherImpl : public AudioOutputDispatcher {
void CloseStream(AudioOutputProxy* stream_proxy) override;
void FlushStream(AudioOutputProxy* stream_proxy) override;
+ // AudioDeviceListener implementation.
+ void OnDeviceChange() override;
+
// Returns true if there are any open AudioOutputProxy objects.
bool HasOutputProxies() const;
diff --git a/chromium/media/audio/audio_output_proxy_unittest.cc b/chromium/media/audio/audio_output_proxy_unittest.cc
index 7e92fa3dfd8..b3cc8e1b1c4 100644
--- a/chromium/media/audio/audio_output_proxy_unittest.cc
+++ b/chromium/media/audio/audio_output_proxy_unittest.cc
@@ -640,6 +640,32 @@ TEST_F(AudioOutputResamplerTest, DispatcherDestroyed_AfterStop) {
DispatcherDestroyed_AfterStop(std::move(resampler_));
}
+TEST_F(AudioOutputProxyTest, DispatcherDeviceChangeClosesIdleStreams) {
+ // Set close delay so long that it triggers a test timeout if relied upon.
+ InitDispatcher(base::TimeDelta::FromSeconds(1000));
+
+ MockAudioOutputStream stream(&manager_, params_);
+
+ EXPECT_CALL(manager(), MakeAudioOutputStream(_, _, _))
+ .WillOnce(Return(&stream));
+ EXPECT_CALL(stream, Open()).WillOnce(Return(true));
+
+ AudioOutputProxy* proxy = dispatcher_impl_->CreateStreamProxy();
+ EXPECT_TRUE(proxy->Open());
+
+ // Close the stream and verify it doesn't happen immediately.
+ proxy->Close();
+ Mock::VerifyAndClear(&stream);
+
+ // This should trigger a true close on the stream.
+ dispatcher_impl_->OnDeviceChange();
+
+ base::RunLoop run_loop;
+ EXPECT_CALL(stream, Close())
+ .WillOnce(testing::InvokeWithoutArgs(&run_loop, &base::RunLoop::Quit));
+ run_loop.Run();
+}
+
// Simulate AudioOutputStream::Create() failure with a low latency stream and
// ensure AudioOutputResampler falls back to the high latency path.
TEST_F(AudioOutputResamplerTest, LowLatencyCreateFailedFallback) {
diff --git a/chromium/media/audio/audio_output_resampler.cc b/chromium/media/audio/audio_output_resampler.cc
index 49ed554f8df..ffcece8d70c 100644
--- a/chromium/media/audio/audio_output_resampler.cc
+++ b/chromium/media/audio/audio_output_resampler.cc
@@ -12,7 +12,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/compiler_specific.h"
#include "base/logging.h"
#include "base/macros.h"
diff --git a/chromium/media/audio/audio_output_stream_sink.cc b/chromium/media/audio/audio_output_stream_sink.cc
index 682d05e4813..1dff40e1449 100644
--- a/chromium/media/audio/audio_output_stream_sink.cc
+++ b/chromium/media/audio/audio_output_stream_sink.cc
@@ -8,7 +8,7 @@
#include <cmath>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "media/audio/audio_manager.h"
diff --git a/chromium/media/audio/audio_output_unittest.cc b/chromium/media/audio/audio_output_unittest.cc
index b4bfe90b862..c69ba66da6f 100644
--- a/chromium/media/audio/audio_output_unittest.cc
+++ b/chromium/media/audio/audio_output_unittest.cc
@@ -9,7 +9,7 @@
#include "base/command_line.h"
#include "base/memory/aligned_memory.h"
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
diff --git a/chromium/media/audio/audio_pcm_encoder.cc b/chromium/media/audio/audio_pcm_encoder.cc
index 103378a3ab5..a3722ef103b 100644
--- a/chromium/media/audio/audio_pcm_encoder.cc
+++ b/chromium/media/audio/audio_pcm_encoder.cc
@@ -27,4 +27,8 @@ void AudioPcmEncoder::EncodeAudioImpl(const AudioBus& audio_bus,
ComputeTimestamp(audio_bus.frames(), capture_time)));
}
+void AudioPcmEncoder::FlushImpl() {
+ // No buffering is done here, so do nothing.
+}
+
} // namespace media
diff --git a/chromium/media/audio/audio_pcm_encoder.h b/chromium/media/audio/audio_pcm_encoder.h
index af3a519eec0..7bd0da1dc39 100644
--- a/chromium/media/audio/audio_pcm_encoder.h
+++ b/chromium/media/audio/audio_pcm_encoder.h
@@ -24,6 +24,7 @@ class MEDIA_EXPORT AudioPcmEncoder : public AudioEncoder {
// AudioEncoder:
void EncodeAudioImpl(const AudioBus& audio_bus,
base::TimeTicks capture_time) override;
+ void FlushImpl() override;
};
} // namespace media
diff --git a/chromium/media/audio/cras/audio_manager_chromeos.cc b/chromium/media/audio/cras/audio_manager_chromeos.cc
index f2fbb4d995b..cef4e0a3b61 100644
--- a/chromium/media/audio/cras/audio_manager_chromeos.cc
+++ b/chromium/media/audio/cras/audio_manager_chromeos.cc
@@ -537,4 +537,8 @@ void AudioManagerChromeOS::WaitEventOrShutdown(base::WaitableEvent* event) {
base::WaitableEvent::WaitMany(waitables, base::size(waitables));
}
+enum CRAS_CLIENT_TYPE AudioManagerChromeOS::GetClientType() {
+ return CRAS_CLIENT_TYPE_CHROME;
+}
+
} // namespace media
diff --git a/chromium/media/audio/cras/audio_manager_chromeos.h b/chromium/media/audio/cras/audio_manager_chromeos.h
index be0282d05db..72fd689951c 100644
--- a/chromium/media/audio/cras/audio_manager_chromeos.h
+++ b/chromium/media/audio/cras/audio_manager_chromeos.h
@@ -41,8 +41,9 @@ class MEDIA_EXPORT AudioManagerChromeOS : public AudioManagerCrasBase {
std::string GetGroupIDInput(const std::string& input_device_id) override;
bool Shutdown() override;
- // AudioManagerCras implementation.
+ // AudioManagerCrasBase implementation.
bool IsDefault(const std::string& device_id, bool is_input) override;
+ enum CRAS_CLIENT_TYPE GetClientType() override;
protected:
AudioParameters GetPreferredOutputStreamParameters(
diff --git a/chromium/media/audio/cras/audio_manager_cras.cc b/chromium/media/audio/cras/audio_manager_cras.cc
index 0b8bf316fd7..a78ad96743e 100644
--- a/chromium/media/audio/cras/audio_manager_cras.cc
+++ b/chromium/media/audio/cras/audio_manager_cras.cc
@@ -24,6 +24,7 @@
#include "media/audio/audio_features.h"
#include "media/audio/cras/cras_input.h"
#include "media/audio/cras/cras_unified.h"
+#include "media/audio/cras/cras_util.h"
#include "media/base/channel_layout.h"
#include "media/base/limits.h"
#include "media/base/localized_strings.h"
@@ -47,7 +48,7 @@ bool AudioManagerCras::HasAudioOutputDevices() {
}
bool AudioManagerCras::HasAudioInputDevices() {
- return true;
+ return !CrasGetAudioDevices(DeviceType::kInput).empty();
}
AudioManagerCras::AudioManagerCras(
@@ -64,11 +65,17 @@ AudioManagerCras::~AudioManagerCras() = default;
void AudioManagerCras::GetAudioInputDeviceNames(
AudioDeviceNames* device_names) {
device_names->push_back(AudioDeviceName::CreateDefault());
+ for (const auto& device : CrasGetAudioDevices(DeviceType::kInput)) {
+ device_names->emplace_back(device.name, base::NumberToString(device.id));
+ }
}
void AudioManagerCras::GetAudioOutputDeviceNames(
AudioDeviceNames* device_names) {
device_names->push_back(AudioDeviceName::CreateDefault());
+ for (const auto& device : CrasGetAudioDevices(DeviceType::kOutput)) {
+ device_names->emplace_back(device.name, base::NumberToString(device.id));
+ }
}
AudioParameters AudioManagerCras::GetInputStreamParameters(
@@ -84,6 +91,28 @@ AudioParameters AudioManagerCras::GetInputStreamParameters(
kDefaultSampleRate, buffer_size,
AudioParameters::HardwareCapabilities(limits::kMinAudioBufferSize,
limits::kMaxAudioBufferSize));
+
+ // Allow experimentation with system echo cancellation with all devices,
+ // but enable it by default on devices that actually support it.
+ params.set_effects(params.effects() |
+ AudioParameters::EXPERIMENTAL_ECHO_CANCELLER);
+ if (base::FeatureList::IsEnabled(features::kCrOSSystemAEC)) {
+ if (CrasGetAecSupported()) {
+ const int32_t aec_group_id = CrasGetAecGroupId();
+
+ // Check if the system AEC has a group ID which is flagged to be
+ // deactivated by the field trial.
+ const bool system_aec_deactivated =
+ base::GetFieldTrialParamByFeatureAsBool(
+ features::kCrOSSystemAECDeactivatedGroups,
+ base::NumberToString(aec_group_id), false);
+
+ if (!system_aec_deactivated) {
+ params.set_effects(params.effects() | AudioParameters::ECHO_CANCELLER);
+ }
+ }
+ }
+
return params;
}
@@ -135,7 +164,11 @@ uint64_t AudioManagerCras::GetPrimaryActiveOutputNode() {
}
bool AudioManagerCras::IsDefault(const std::string& device_id, bool is_input) {
- return true;
+ return device_id == AudioDeviceDescription::kDefaultDeviceId;
+}
+
+enum CRAS_CLIENT_TYPE AudioManagerCras::GetClientType() {
+ return CRAS_CLIENT_TYPE_LACROS;
}
} // namespace media
diff --git a/chromium/media/audio/cras/audio_manager_cras.h b/chromium/media/audio/cras/audio_manager_cras.h
index ef9c9452ed4..3afb730c530 100644
--- a/chromium/media/audio/cras/audio_manager_cras.h
+++ b/chromium/media/audio/cras/audio_manager_cras.h
@@ -34,8 +34,9 @@ class MEDIA_EXPORT AudioManagerCras : public AudioManagerCrasBase {
std::string GetDefaultInputDeviceID() override;
std::string GetDefaultOutputDeviceID() override;
- // AudioManagerCras implementation.
+ // AudioManagerCrasBase implementation.
bool IsDefault(const std::string& device_id, bool is_input) override;
+ enum CRAS_CLIENT_TYPE GetClientType() override;
protected:
AudioParameters GetPreferredOutputStreamParameters(
diff --git a/chromium/media/audio/cras/audio_manager_cras_base.h b/chromium/media/audio/cras/audio_manager_cras_base.h
index eff016fe122..f9c70233525 100644
--- a/chromium/media/audio/cras/audio_manager_cras_base.h
+++ b/chromium/media/audio/cras/audio_manager_cras_base.h
@@ -48,6 +48,9 @@ class MEDIA_EXPORT AudioManagerCrasBase : public AudioManagerBase {
// Set |is_input| to true for capture devices, false for output.
virtual bool IsDefault(const std::string& device_id, bool is_input) = 0;
+ // Returns CRAS client type.
+ virtual enum CRAS_CLIENT_TYPE GetClientType() = 0;
+
protected:
// Called by MakeLinearOutputStream and MakeLowLatencyOutputStream.
AudioOutputStream* MakeOutputStream(const AudioParameters& params,
diff --git a/chromium/media/audio/cras/cras_input.cc b/chromium/media/audio/cras/cras_input.cc
index 539b327b480..a6a4ef9229d 100644
--- a/chromium/media/audio/cras/cras_input.cc
+++ b/chromium/media/audio/cras/cras_input.cc
@@ -210,7 +210,7 @@ void CrasInputStream::Start(AudioInputCallback* callback) {
}
cras_client_stream_params_set_client_type(stream_params,
- CRAS_CLIENT_TYPE_CHROME);
+ audio_manager_->GetClientType());
if (UseCrasAec())
cras_client_stream_params_enable_aec(stream_params);
diff --git a/chromium/media/audio/cras/cras_unified.cc b/chromium/media/audio/cras/cras_unified.cc
index d946fb871fe..8158ed03f72 100644
--- a/chromium/media/audio/cras/cras_unified.cc
+++ b/chromium/media/audio/cras/cras_unified.cc
@@ -203,7 +203,7 @@ void CrasUnifiedStream::Start(AudioSourceCallback* callback) {
}
cras_client_stream_params_set_client_type(stream_params,
- CRAS_CLIENT_TYPE_CHROME);
+ manager_->GetClientType());
// Before starting the stream, save the number of bytes in a frame for use in
// the callback.
diff --git a/chromium/media/audio/cras/cras_util.cc b/chromium/media/audio/cras/cras_util.cc
new file mode 100644
index 00000000000..f17766d5e38
--- /dev/null
+++ b/chromium/media/audio/cras/cras_util.cc
@@ -0,0 +1,132 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/audio/cras/cras_util.h"
+
+#include "base/logging.h"
+#include "base/stl_util.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/time/time.h"
+#include "media/audio/audio_device_description.h"
+#include "media/audio/cras/audio_manager_cras_base.h"
+
+namespace media {
+
+namespace {
+
+// Returns if that an input or output audio device is for simple usage like
+// playback or recording for user. In contrast, audio device such as loopback,
+// always on keyword recognition (HOTWORD), and keyboard mic are not for simple
+// usage.
+// One special case is ALSA loopback device, which will only exist under
+// testing. We want it visible to users for e2e tests.
+bool IsForSimpleUsage(uint32_t type) {
+ return type == CRAS_NODE_TYPE_INTERNAL_SPEAKER ||
+ type == CRAS_NODE_TYPE_HEADPHONE || type == CRAS_NODE_TYPE_HDMI ||
+ type == CRAS_NODE_TYPE_LINEOUT || type == CRAS_NODE_TYPE_MIC ||
+ type == CRAS_NODE_TYPE_BLUETOOTH_NB_MIC ||
+ type == CRAS_NODE_TYPE_USB || type == CRAS_NODE_TYPE_BLUETOOTH ||
+ type == CRAS_NODE_TYPE_ALSA_LOOPBACK;
+}
+
+// Connects to the CRAS server.
+cras_client* CrasConnect() {
+ cras_client* client;
+ if (cras_client_create(&client)) {
+ LOG(ERROR) << "Couldn't create CRAS client.\n";
+ return nullptr;
+ }
+ if (cras_client_connect(client)) {
+ LOG(ERROR) << "Couldn't connect CRAS client.\n";
+ cras_client_destroy(client);
+ return nullptr;
+ }
+ return client;
+}
+
+// Disconnects from the CRAS server.
+void CrasDisconnect(cras_client** client) {
+ if (*client) {
+ cras_client_stop(*client);
+ cras_client_destroy(*client);
+ *client = nullptr;
+ }
+}
+
+} // namespace
+
+CrasDevice::CrasDevice() = default;
+
+CrasDevice::CrasDevice(const cras_ionode_info* node,
+ const cras_iodev_info* dev,
+ DeviceType type)
+ : type(type) {
+ id = cras_make_node_id(node->iodev_idx, node->ionode_idx);
+ name = std::string(node->name);
+ // If the name of node is not meaningful, use the device name instead.
+ if (name.empty() || name == "(default)")
+ name = dev->name;
+}
+
+std::vector<CrasDevice> CrasGetAudioDevices(DeviceType type) {
+ std::vector<CrasDevice> devices;
+
+ cras_client* client = CrasConnect();
+ if (!client)
+ return devices;
+
+ struct cras_iodev_info devs[CRAS_MAX_IODEVS];
+ struct cras_ionode_info nodes[CRAS_MAX_IONODES];
+ size_t num_devs = CRAS_MAX_IODEVS, num_nodes = CRAS_MAX_IONODES;
+ int rc;
+
+ if (type == DeviceType::kInput) {
+ rc = cras_client_get_input_devices(client, devs, nodes, &num_devs,
+ &num_nodes);
+ } else {
+ rc = cras_client_get_output_devices(client, devs, nodes, &num_devs,
+ &num_nodes);
+ }
+ if (rc < 0) {
+ LOG(ERROR) << "Failed to get devices: " << std::strerror(rc);
+ return devices;
+ }
+
+ for (size_t i = 0; i < num_nodes; i++) {
+ if (!nodes[i].plugged || !IsForSimpleUsage(nodes[i].type_enum))
+ continue;
+ for (size_t j = 0; j < num_devs; j++) {
+ if (nodes[i].iodev_idx == devs[j].idx) {
+ devices.emplace_back(&nodes[i], &devs[j], type);
+ break;
+ }
+ }
+ }
+
+ CrasDisconnect(&client);
+ return devices;
+}
+
+int CrasGetAecSupported() {
+ cras_client* client = CrasConnect();
+ if (!client)
+ return 0;
+
+ int rc = cras_client_get_aec_supported(client);
+ CrasDisconnect(&client);
+ return rc;
+}
+
+int CrasGetAecGroupId() {
+ cras_client* client = CrasConnect();
+ if (!client)
+ return -1;
+
+ int rc = cras_client_get_aec_group_id(client);
+ CrasDisconnect(&client);
+
+ return rc;
+}
+
+} // namespace media
diff --git a/chromium/media/audio/cras/cras_util.h b/chromium/media/audio/cras/cras_util.h
new file mode 100644
index 00000000000..99c791bc4d1
--- /dev/null
+++ b/chromium/media/audio/cras/cras_util.h
@@ -0,0 +1,40 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_AUDIO_CRAS_CRAS_UTIL_H_
+#define MEDIA_AUDIO_CRAS_CRAS_UTIL_H_
+
+#include <cras_client.h>
+
+#include <cstdint>
+#include <string>
+#include <vector>
+
+namespace media {
+
+enum class DeviceType { kInput, kOutput };
+
+struct CrasDevice {
+ CrasDevice();
+ explicit CrasDevice(const cras_ionode_info* node,
+ const cras_iodev_info* dev,
+ DeviceType type);
+ DeviceType type;
+ uint64_t id;
+ std::string name;
+};
+
+// Enumerates all devices of |type|.
+std::vector<CrasDevice> CrasGetAudioDevices(DeviceType type);
+
+// Returns if system AEC is supported in CRAS.
+int CrasGetAecSupported();
+
+// Returns the system AEC group ID. If no group ID is specified, -1 is
+// returned.
+int CrasGetAecGroupId();
+
+} // namespace media
+
+#endif // MEDIA_AUDIO_CRAS_CRAS_UTIL_H_
diff --git a/chromium/media/audio/fake_audio_input_stream.cc b/chromium/media/audio/fake_audio_input_stream.cc
index 89af0a9f506..ca0ec900067 100644
--- a/chromium/media/audio/fake_audio_input_stream.cc
+++ b/chromium/media/audio/fake_audio_input_stream.cc
@@ -9,7 +9,7 @@
#include "base/atomicops.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "base/logging.h"
diff --git a/chromium/media/audio/fake_audio_output_stream.cc b/chromium/media/audio/fake_audio_output_stream.cc
index 067172b0617..22282b3d40f 100644
--- a/chromium/media/audio/fake_audio_output_stream.cc
+++ b/chromium/media/audio/fake_audio_output_stream.cc
@@ -5,7 +5,7 @@
#include "media/audio/fake_audio_output_stream.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/check.h"
#include "base/single_thread_task_runner.h"
#include "base/time/time.h"
diff --git a/chromium/media/audio/fuchsia/DIR_METADATA b/chromium/media/audio/fuchsia/DIR_METADATA
new file mode 100644
index 00000000000..e88f62328ca
--- /dev/null
+++ b/chromium/media/audio/fuchsia/DIR_METADATA
@@ -0,0 +1,10 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+team_email: "cr-fuchsia@chromium.org"
+os: FUCHSIA \ No newline at end of file
diff --git a/chromium/media/audio/fuchsia/OWNERS b/chromium/media/audio/fuchsia/OWNERS
index c1b584511a6..3ebcc4268bd 100644
--- a/chromium/media/audio/fuchsia/OWNERS
+++ b/chromium/media/audio/fuchsia/OWNERS
@@ -1,4 +1 @@
-file://build/fuchsia/OWNERS
-# COMPONENT: Fuchsia
-# OS: Fuchsia
-# TEAM: cr-fuchsia@chromium.org
+file://build/fuchsia/OWNERS \ No newline at end of file
diff --git a/chromium/media/audio/linux/audio_manager_linux.cc b/chromium/media/audio/linux/audio_manager_linux.cc
index efa8732946f..84fc4317017 100644
--- a/chromium/media/audio/linux/audio_manager_linux.cc
+++ b/chromium/media/audio/linux/audio_manager_linux.cc
@@ -7,6 +7,7 @@
#include "base/command_line.h"
#include "base/logging.h"
#include "base/metrics/histogram_macros.h"
+#include "build/chromeos_buildflags.h"
#include "media/audio/fake_audio_manager.h"
#include "media/base/media_switches.h"
@@ -14,7 +15,7 @@
#include "media/audio/alsa/audio_manager_alsa.h"
#endif
-#if defined(USE_CRAS) && defined(OS_CHROMEOS)
+#if defined(USE_CRAS) && BUILDFLAG(IS_ASH)
#include "media/audio/cras/audio_manager_chromeos.h"
#elif defined(USE_CRAS)
#include "media/audio/cras/audio_manager_cras.h"
@@ -47,7 +48,7 @@ std::unique_ptr<media::AudioManager> CreateAudioManager(
#if defined(USE_CRAS)
if (base::CommandLine::ForCurrentProcess()->HasSwitch(switches::kUseCras)) {
UMA_HISTOGRAM_ENUMERATION("Media.LinuxAudioIO", kCras, kAudioIOMax + 1);
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return std::make_unique<AudioManagerChromeOS>(std::move(audio_thread),
audio_log_factory);
#else
diff --git a/chromium/media/audio/mac/audio_auhal_mac.cc b/chromium/media/audio/mac/audio_auhal_mac.cc
index a54f94ab06a..3663cab37c8 100644
--- a/chromium/media/audio/mac/audio_auhal_mac.cc
+++ b/chromium/media/audio/mac/audio_auhal_mac.cc
@@ -12,7 +12,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/mac/mac_logging.h"
#include "base/metrics/histogram_macros.h"
diff --git a/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc b/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
index 73d344d73f3..cd7a0fff778 100644
--- a/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_device_listener_mac_unittest.cc
@@ -9,7 +9,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc b/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
index a00c8211661..341111d2e6b 100644
--- a/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
+++ b/chromium/media/audio/mac/audio_low_latency_input_mac_unittest.cc
@@ -14,6 +14,7 @@
#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/platform_thread.h"
+#include "build/build_config.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/audio_device_info_accessor_for_tests.h"
#include "media/audio/audio_io.h"
@@ -124,8 +125,14 @@ class MacAudioInputTest : public testing::Test {
~MacAudioInputTest() override { audio_manager_->Shutdown(); }
bool InputDevicesAvailable() {
+#if defined(OS_MAC) && defined(ARCH_CPU_ARM64)
+ // TODO(crbug.com/1128458): macOS on ARM64 says it has devices, but won't
+ // let any of them be opened or listed.
+ return false;
+#else
return AudioDeviceInfoAccessorForTests(audio_manager_.get())
.HasAudioInputDevices();
+#endif
}
// Convenience method which creates a default AudioInputStream object using
diff --git a/chromium/media/audio/pulse/audio_manager_pulse.cc b/chromium/media/audio/pulse/audio_manager_pulse.cc
index 1024ada3997..cc02c2c9655 100644
--- a/chromium/media/audio/pulse/audio_manager_pulse.cc
+++ b/chromium/media/audio/pulse/audio_manager_pulse.cc
@@ -12,6 +12,7 @@
#include "base/logging.h"
#include "base/nix/xdg_util.h"
#include "base/stl_util.h"
+#include "build/chromeos_buildflags.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/pulse/pulse_input.h"
#include "media/audio/pulse/pulse_output.h"
@@ -189,7 +190,7 @@ std::string AudioManagerPulse::GetDefaultOutputDeviceID() {
std::string AudioManagerPulse::GetAssociatedOutputDeviceID(
const std::string& input_device_id) {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return AudioManagerBase::GetAssociatedOutputDeviceID(input_device_id);
#else
DCHECK(AudioManager::Get()->GetTaskRunner()->BelongsToCurrentThread());
diff --git a/chromium/media/audio/win/audio_device_listener_win_unittest.cc b/chromium/media/audio/win/audio_device_listener_win_unittest.cc
index e357ef850b9..7311441309f 100644
--- a/chromium/media/audio/win/audio_device_listener_win_unittest.cc
+++ b/chromium/media/audio/win/audio_device_listener_win_unittest.cc
@@ -8,7 +8,7 @@
#include <string>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/strings/utf_string_conversions.h"
#include "base/system/system_monitor.h"
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.cc b/chromium/media/audio/win/audio_low_latency_input_win.cc
index 94fd5b48ff5..8aaa71d6a7d 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win.cc
@@ -6,6 +6,8 @@
#include <objbase.h>
#include <propkey.h>
+#include <windows.devices.enumeration.h>
+#include <windows.media.devices.h>
#include <algorithm>
#include <cmath>
@@ -15,11 +17,15 @@
#include "base/logging.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
+#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/trace_event/trace_event.h"
+#include "base/win/core_winrt_util.h"
#include "base/win/scoped_propvariant.h"
#include "base/win/scoped_variant.h"
+#include "base/win/vector.h"
+#include "base/win/windows_version.h"
#include "media/audio/audio_device_description.h"
#include "media/audio/audio_features.h"
#include "media/audio/win/avrt_wrapper_win.h"
@@ -31,12 +37,22 @@
#include "media/base/limits.h"
#include "media/base/media_switches.h"
+using ABI::Windows::Foundation::Collections::IVectorView;
+using ABI::Windows::Media::Devices::IMediaDeviceStatics;
+using ABI::Windows::Media::Effects::IAudioCaptureEffectsManager;
+using ABI::Windows::Media::Effects::IAudioEffectsManagerStatics;
+using base::win::GetActivationFactory;
+using base::win::ScopedCoMem;
using base::win::ScopedCOMInitializer;
+using base::win::ScopedHString;
+using Microsoft::WRL::ComPtr;
namespace media {
namespace {
+constexpr char kUwpDeviceIdPrefix[] = "\\\\?\\SWD#MMDEVAPI#";
+
constexpr uint32_t KSAUDIO_SPEAKER_UNSUPPORTED = 0;
// Converts a COM error into a human-readable string.
@@ -135,6 +151,49 @@ const char* StreamOpenResultToString(
return "UNKNOWN";
}
+const char* EffectTypeToString(
+ ABI::Windows::Media::Effects::AudioEffectType type) {
+ switch (type) {
+ case ABI::Windows::Media::Effects::AudioEffectType_Other:
+ return "Other/None";
+ case ABI::Windows::Media::Effects::AudioEffectType_AcousticEchoCancellation:
+ return "AcousticEchoCancellation";
+ case ABI::Windows::Media::Effects::AudioEffectType_NoiseSuppression:
+ return "NoiseSuppression";
+ case ABI::Windows::Media::Effects::AudioEffectType_AutomaticGainControl:
+ return "AutomaticGainControl";
+ case ABI::Windows::Media::Effects::AudioEffectType_BeamForming:
+ return "BeamForming";
+ case ABI::Windows::Media::Effects::AudioEffectType_ConstantToneRemoval:
+ return "ConstantToneRemoval";
+ case ABI::Windows::Media::Effects::AudioEffectType_Equalizer:
+ return "Equalizer";
+ case ABI::Windows::Media::Effects::AudioEffectType_LoudnessEqualizer:
+ return "LoudnessEqualizer";
+ case ABI::Windows::Media::Effects::AudioEffectType_BassBoost:
+ return "BassBoost";
+ case ABI::Windows::Media::Effects::AudioEffectType_VirtualSurround:
+ return "VirtualSurround";
+ case ABI::Windows::Media::Effects::AudioEffectType_VirtualHeadphones:
+ return "VirtualHeadphones";
+ case ABI::Windows::Media::Effects::AudioEffectType_SpeakerFill:
+ return "SpeakerFill";
+ case ABI::Windows::Media::Effects::AudioEffectType_RoomCorrection:
+ return "RoomCorrection";
+ case ABI::Windows::Media::Effects::AudioEffectType_BassManagement:
+ return "BassManagement";
+ case ABI::Windows::Media::Effects::AudioEffectType_EnvironmentalEffects:
+ return "EnvironmentalEffects";
+ case ABI::Windows::Media::Effects::AudioEffectType_SpeakerProtection:
+ return "SpeakerProtection";
+ case ABI::Windows::Media::Effects::AudioEffectType_SpeakerCompensation:
+ return "SpeakerCompensation";
+ case ABI::Windows::Media::Effects::AudioEffectType_DynamicRangeCompression:
+ return "DynamicRangeCompression";
+ }
+ return "Unknown";
+}
+
bool VariantBoolToBool(VARIANT_BOOL var_bool) {
switch (var_bool) {
case VARIANT_TRUE:
@@ -158,6 +217,35 @@ std::string GetOpenLogString(WASAPIAudioInputStream::StreamOpenResult result,
CoreAudioUtil::WaveFormatToString(&output_format).c_str());
}
+bool InitializeUWPSupport() {
+ // Place the actual body of the initialization in a lambda and store the
+ // result as a static since we don't expect this result to change between
+ // runs.
+ static const bool initialization_result = []() {
+ // Windows.Media.Effects and Windows.Media.Devices requires Windows 10 build
+ // 10.0.10240.0.
+ if (base::win::GetVersion() < base::win::Version::WIN10) {
+ DLOG(WARNING) << "AudioCaptureEffectsManager requires Windows 10";
+ return false;
+ }
+ DCHECK_GE(base::win::OSInfo::GetInstance()->version_number().build, 10240);
+
+ // Provide access to Core WinRT/UWP functions and load all required HSTRING
+ // functions available from Win8 and onwards. ScopedHString is a wrapper
+ // around an HSTRING and it requires certain functions that need to be
+ // delayloaded to avoid breaking Chrome on Windows 7.
+ if (!(base::win::ResolveCoreWinRTDelayload() &&
+ base::win::ScopedHString::ResolveCoreWinRTStringDelayload())) {
+ // Failed loading functions from combase.dll.
+ DLOG(WARNING) << "Failed to initialize WinRT/UWP";
+ return false;
+ }
+ return true;
+ }();
+
+ return initialization_result;
+}
+
} // namespace
WASAPIAudioInputStream::WASAPIAudioInputStream(
@@ -269,6 +357,24 @@ bool WASAPIAudioInputStream::Open() {
// Check if raw audio processing is supported for the selected capture device.
raw_processing_supported_ = RawProcessingSupported();
+ if (raw_processing_supported_ &&
+ !AudioDeviceDescription::IsLoopbackDevice(device_id_) &&
+ InitializeUWPSupport()) {
+ // Retrieve a unique identifier of the selected audio device but in a
+ // format which can be used by UWP (or Core WinRT) APIs. It can then be
+ // utilized in combination with the Windows.Media.Effects UWP API to
+ // discover the audio processing chain on a device.
+ std::string uwp_device_id = GetUWPDeviceId();
+ if (!uwp_device_id.empty()) {
+ // For the selected device, generate two lists of enabled audio effects
+ // and store them in |default_effect_types_| and |raw_effect_types_|.
+ // Default corresponds to "Normal audio signal processing" and Raw is for
+ // "Minimal audio signal processing". These two lists are used for UMA
+ // stats when the stream is closed.
+ GetAudioCaptureEffects(uwp_device_id);
+ }
+ }
+
// Obtain an IAudioClient interface which enables us to create and initialize
// an audio stream between an audio application and the audio engine.
hr = endpoint_device_->Activate(__uuidof(IAudioClient), CLSCTX_ALL, nullptr,
@@ -450,6 +556,19 @@ void WASAPIAudioInputStream::Close() {
// capture or not. See https://crbug.com/1133643.
base::UmaHistogramBoolean("Media.Audio.RawProcessingSupportedWin",
raw_processing_supported_);
+
+ for (auto const& type : default_effect_types_) {
+ base::UmaHistogramSparse("Media.Audio.Capture.Win.DefaultEffectType",
+ type);
+ SendLogMessage("%s => (Media.Audio.Capture.Win.DefaultEffectType=%s)",
+ __func__, EffectTypeToString(type));
+ }
+
+ for (auto const& type : raw_effect_types_) {
+ base::UmaHistogramSparse("Media.Audio.Capture.Win.RawEffectType", type);
+ SendLogMessage("%s => (Media.Audio.Capture.Win.RawEffectType=%s)",
+ __func__, EffectTypeToString(type));
+ }
}
if (converter_)
@@ -543,7 +662,9 @@ void WASAPIAudioInputStream::SendLogMessage(const char* format, ...) {
return;
va_list args;
va_start(args, format);
- log_callback_.Run("WAIS::" + base::StringPrintV(format, args));
+ std::string msg("WAIS::" + base::StringPrintV(format, args));
+ log_callback_.Run(msg);
+ DVLOG(1) << msg;
va_end(args);
}
@@ -873,6 +994,190 @@ bool WASAPIAudioInputStream::RawProcessingSupported() {
return raw_processing_supported;
}
+std::string WASAPIAudioInputStream::GetUWPDeviceId() {
+ DCHECK(endpoint_device_.Get());
+
+ // The Windows.Media.Devices.IMediaDeviceStatics interface provides access to
+ // the implementation of Windows.Media.Devices.MediaDevice.
+ ComPtr<IMediaDeviceStatics> media_device_statics;
+ HRESULT hr =
+ GetActivationFactory<IMediaDeviceStatics,
+ RuntimeClass_Windows_Media_Devices_MediaDevice>(
+ &media_device_statics);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "IMediaDeviceStatics factory failed: " << ErrorToString(hr);
+ return std::string();
+ }
+
+ // The remaining part of this method builds up the unique device ID needed
+ // by the Windows.Media.Effects.AudioEffectsManager UWP API to enumerate
+ // active capture effects like AEC and NS. The ID contains three parts.
+ // Example:
+ // 1) \\?\SWD#MMDEVAPI#
+ // 2) {0.0.1.00000000}.{7c24467c-94fc-4fa1-a2b2-a3f5d9cb8a5b}
+ // 3) #{2eef81be-33fa-4800-9670-1cd474972c3f}
+ // Where (1) is a constant string, (2) comes from the IMMDevice::GetId() API,
+ // and (3) is a substring of of the selector string which can be retrieved by
+ // the IMediaDeviceStatics::GetAudioCaptureSelector UWP API. Knowledge about
+ // the structure of this device ID can be gained by using the
+ // IMediaDeviceStatics::GetDefaultAudioCaptureId UWP API but this method also
+ // adds support for non default devices.
+
+ // (1) Start building the final device ID. Start with the constant prefix.
+ std::string device_id(kUwpDeviceIdPrefix);
+
+ // (2) Next, add the unique ID from IMMDevice::GetId() API.
+ // Example: {0.0.1.00000000}.{7c24467c-94fc-4fa1-a2b2-a3f5d9cb8a5b}.
+ ScopedCoMem<WCHAR> immdevice_id16;
+ hr = endpoint_device_->GetId(&immdevice_id16);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "IMMDevice::GetId failed: " << ErrorToString(hr);
+ return std::string();
+ }
+ std::string immdevice_id8;
+ base::WideToUTF8(immdevice_id16, wcslen(immdevice_id16), &immdevice_id8);
+ device_id.append(immdevice_id8);
+
+ // (3) Finally, add the last part from the selector string.
+ // Example: '#{2eef81be-33fa-4800-9670-1cd474972c3f}'.
+ HSTRING selector;
+ // Returns the identifier string of a device for capturing audio. A substring
+ // will be used when generating the final unique device ID.
+ // Example: part of the selector string can look like
+ // System.Devices.InterfaceClassGuid:="{2eef81be-33fa-4800-9670-1cd474972c3f}"
+ // and we want the {2eef81be-33fa-4800-9670-1cd474972c3f} substring for our
+ // purposes.
+ hr = media_device_statics->GetAudioCaptureSelector(&selector);
+ if (FAILED(hr)) {
+ DLOG(ERROR) << "IMediaDeviceStatics::GetAudioCaptureSelector failed: "
+ << ErrorToString(hr);
+ return std::string();
+ }
+ device_id.append("#");
+ std::string selector_string = ScopedHString(selector).GetAsUTF8();
+ std::size_t start = selector_string.find("{");
+ std::size_t stop = selector_string.find("}", start + 1);
+ if (start != std::string::npos && stop != std::string::npos) {
+ // Will extract '{2eef81be-33fa-4800-9670-1cd474972c3f}' in the example
+ // above.
+ device_id.append(selector_string.substr(start, stop - start + 1));
+ } else {
+ DLOG(ERROR) << "Failed to extract System.Devices.InterfaceClassGuid string";
+ return std::string();
+ }
+
+ return device_id;
+}
+
+HRESULT WASAPIAudioInputStream::GetAudioCaptureEffects(
+ const std::string& uwp_device_id) {
+ DCHECK(!AudioDeviceDescription::IsLoopbackDevice(device_id_));
+ DCHECK(raw_processing_supported_);
+ DCHECK(!uwp_device_id.empty());
+ SendLogMessage("%s()", __func__);
+
+ // The Windows.Media.Effects.IAudioEffectsManagerStatics interface provides
+ // access to the implementation of Windows.Media.Effects.AudioEffectsManager.
+ ComPtr<IAudioEffectsManagerStatics> audio_effects_manager;
+ HRESULT hr = GetActivationFactory<
+ IAudioEffectsManagerStatics,
+ RuntimeClass_Windows_Media_Effects_AudioEffectsManager>(
+ &audio_effects_manager);
+ if (FAILED(hr)) {
+ SendLogMessage(
+ "%s => (ERROR: IAudioEffectsManagerStatics factory failed: [%s])",
+ __func__, ErrorToString(hr).c_str());
+ return hr;
+ }
+
+ SendLogMessage("%s => (uwp_device_id=[%s])", __func__, uwp_device_id.c_str());
+ ScopedHString device_id = ScopedHString::Create(uwp_device_id);
+
+ // Check capture effects for two different audio processing modes:
+ // - Default: Normal audio signal processing
+ // - Raw: Minimal audio signal processing
+ // Raw is included since it is not possible to disable all effects on all
+ // devices. In most cases, the number of found capture effects will be zero
+ // for the raw mode.
+ ABI::Windows::Media::AudioProcessing audio_processing_mode[] = {
+ ABI::Windows::Media::AudioProcessing::AudioProcessing_Default,
+ ABI::Windows::Media::AudioProcessing::AudioProcessing_Raw};
+ for (size_t i = 0; i < base::size(audio_processing_mode); ++i) {
+ // Create an AudioCaptureEffectsManager manager which can be used to
+ // discover the audio processing chain on a device for a specific media
+ // category and audio processing mode. The media category is fixed and set
+ // to Communications since that is what we aim at using when audio effects
+ // later are disabled.
+ ComPtr<IAudioCaptureEffectsManager> capture_effects_manager;
+ hr = audio_effects_manager->CreateAudioCaptureEffectsManagerWithMode(
+ device_id.get(),
+ ABI::Windows::Media::Capture::MediaCategory::
+ MediaCategory_Communications,
+ audio_processing_mode[i], &capture_effects_manager);
+ if (FAILED(hr)) {
+ SendLogMessage(
+ "%s => (ERROR: IAudioEffectsManagerStatics::"
+ "CreateAudioCaptureEffectsManager=[%s])",
+ __func__, ErrorToString(hr).c_str());
+ return hr;
+ }
+
+ // Get a list of audio effects on the device. Based on tests on different
+ // devices, only enabled effects will be included. Hence, if a user has
+ // explicitly disabled an effect using the System Sound Settings, that
+ // component will not show up here.
+ ComPtr<IVectorView<ABI::Windows::Media::Effects::AudioEffect*>> effects;
+ hr = capture_effects_manager->GetAudioCaptureEffects(&effects);
+ if (FAILED(hr)) {
+ SendLogMessage(
+ "%s => (ERROR: IAudioCaptureEffectsManager::"
+ "GetAudioCaptureEffects=[%s])",
+ __func__, ErrorToString(hr).c_str());
+ return hr;
+ }
+
+ unsigned int count = 0;
+ if (effects) {
+ // Returns number of supported effects.
+ effects->get_Size(&count);
+ }
+
+ // Store all supported and active effect types in |default_effect_types_|
+ // or |raw_effect_types_| depending on selected audio processing mode.
+ // These will be utilized later for UMA histograms.
+ for (unsigned int j = 0; j < count; ++j) {
+ ComPtr<ABI::Windows::Media::Effects::IAudioEffect> effect;
+ hr = effects->GetAt(j, &effect);
+ if (SUCCEEDED(hr)) {
+ ABI::Windows::Media::Effects::AudioEffectType type;
+ hr = effect->get_AudioEffectType(&type);
+ if (SUCCEEDED(hr)) {
+ audio_processing_mode[i] ==
+ ABI::Windows::Media::AudioProcessing::AudioProcessing_Default
+ ? default_effect_types_.push_back(type)
+ : raw_effect_types_.push_back(type);
+ }
+ }
+ }
+
+ // For cases when no audio effects were found (common in raw mode), add a
+ // dummy effect type called AudioEffectType_Other so that the vector
+ // contains at least one value. This is done to ensure that an UMA histogram
+ // is uploaded also for the empty case. Hence, AudioEffectType_Other is
+ // used to indicate an unknown audio effect and "no audio effect found".
+ if (count == 0) {
+ const ABI::Windows::Media::Effects::AudioEffectType no_effect_found =
+ ABI::Windows::Media::Effects::AudioEffectType::AudioEffectType_Other;
+ audio_processing_mode[i] ==
+ ABI::Windows::Media::AudioProcessing::AudioProcessing_Default
+ ? default_effect_types_.push_back(no_effect_found)
+ : raw_effect_types_.push_back(no_effect_found);
+ }
+ }
+
+ return hr;
+}
+
HRESULT WASAPIAudioInputStream::SetCommunicationsCategoryAndRawCaptureMode() {
DCHECK(audio_client_.Get());
DCHECK(!AudioDeviceDescription::IsLoopbackDevice(device_id_));
diff --git a/chromium/media/audio/win/audio_low_latency_input_win.h b/chromium/media/audio/win/audio_low_latency_input_win.h
index c2d87b102c7..f3e45d6343a 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win.h
+++ b/chromium/media/audio/win/audio_low_latency_input_win.h
@@ -61,10 +61,12 @@
#include <endpointvolume.h>
#include <stddef.h>
#include <stdint.h>
+#include <windows.media.effects.h>
#include <wrl/client.h>
#include <memory>
#include <string>
+#include <vector>
#include "base/compiler_specific.h"
#include "base/macros.h"
@@ -157,6 +159,18 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// Returns whether raw audio processing is supported or not for the selected
// capture device.
bool RawProcessingSupported();
+ // The Windows.Media.Effects.AudioEffectsManager UWP API contains a method
+ // called CreateAudioCaptureEffectsManagerWithMode() which is needed to
+ // enumerate active audio effects on the capture stream. This UWP method
+ // needs a device ID which differs from what can be derived from the default
+ // Win32 API in CoreAudio. The GetUWPDeviceId() method builds up the required
+ // device ID that the audio effects manager needs. Note that it is also
+ // possible to get the ID directly from the Windows.Devices.Enumeration UWP
+ // API but that is rather complex and requires use of asynchronous methods.
+ std::string GetUWPDeviceId();
+ // For the selected |uwp_device_id|, generate two lists of enabled audio
+ // effects and store them in |default_effect_types_| and |raw_effect_types_|.
+ HRESULT GetAudioCaptureEffects(const std::string& uwp_device_id);
HRESULT SetCommunicationsCategoryAndRawCaptureMode();
HRESULT GetAudioEngineStreamFormat();
// Returns whether the desired format is supported or not and writes the
@@ -306,6 +320,14 @@ class MEDIA_EXPORT WASAPIAudioInputStream
// Also added to a UMS histogram.
bool raw_processing_supported_ = false;
+ // List of supported and active capture effects for the selected device in
+ // default (normal) audio processing mode.
+ std::vector<ABI::Windows::Media::Effects::AudioEffectType>
+ default_effect_types_;
+ // List of supported and active capture effects for the selected device in
+ // raw (minimal) audio processing mode. Will be empty in most cases.
+ std::vector<ABI::Windows::Media::Effects::AudioEffectType> raw_effect_types_;
+
SEQUENCE_CHECKER(sequence_checker_);
DISALLOW_COPY_AND_ASSIGN(WASAPIAudioInputStream);
diff --git a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
index 8d29d018c61..2eed0cc87ea 100644
--- a/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_input_win_unittest.cc
@@ -175,7 +175,7 @@ class AudioInputStreamWrapper {
explicit AudioInputStreamWrapper(AudioManager* audio_manager)
: audio_man_(audio_manager) {
EXPECT_TRUE(SUCCEEDED(CoreAudioUtil::GetPreferredAudioParameters(
- AudioDeviceDescription::kDefaultDeviceId, false, &default_params_)));
+ device_id_, false, &default_params_)));
EXPECT_EQ(format(), AudioParameters::AUDIO_PCM_LOW_LATENCY);
frames_per_buffer_ = default_params_.frames_per_buffer();
}
@@ -187,6 +187,15 @@ class AudioInputStreamWrapper {
frames_per_buffer_ = default_params_.frames_per_buffer();
}
+ AudioInputStreamWrapper(AudioManager* audio_manager,
+ const std::string& device_id)
+ : audio_man_(audio_manager), device_id_(device_id) {
+ EXPECT_TRUE(SUCCEEDED(CoreAudioUtil::GetPreferredAudioParameters(
+ device_id_, false, &default_params_)));
+ EXPECT_EQ(format(), AudioParameters::AUDIO_PCM_LOW_LATENCY);
+ frames_per_buffer_ = default_params_.frames_per_buffer();
+ }
+
~AudioInputStreamWrapper() {}
// Creates AudioInputStream object using default parameters.
@@ -205,20 +214,21 @@ class AudioInputStreamWrapper {
}
int sample_rate() const { return default_params_.sample_rate(); }
int frames_per_buffer() const { return frames_per_buffer_; }
+ std::string device_id() const { return device_id_; }
private:
AudioInputStream* CreateInputStream() {
AudioParameters params = default_params_;
params.set_frames_per_buffer(frames_per_buffer_);
AudioInputStream* ais = audio_man_->MakeAudioInputStream(
- params, AudioDeviceDescription::kDefaultDeviceId,
- base::BindRepeating(&LogCallbackDummy));
+ params, device_id_, base::BindRepeating(&LogCallbackDummy));
EXPECT_TRUE(ais);
return ais;
}
AudioManager* audio_man_;
AudioParameters default_params_;
+ std::string device_id_ = AudioDeviceDescription::kDefaultDeviceId;
int frames_per_buffer_;
};
@@ -341,6 +351,26 @@ TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenAndClose) {
ais.Close();
}
+// Test Open(), Close() calling sequences for all available devices.
+TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenAndCloseForAllDevices) {
+ AudioDeviceInfoAccessorForTests device_info_accessor(audio_manager_.get());
+ ABORT_AUDIO_TEST_IF_NOT(device_info_accessor.HasAudioInputDevices() &&
+ CoreAudioUtil::IsSupported());
+
+ // Retrieve a list of all available input devices.
+ media::AudioDeviceDescriptions device_descriptions;
+ device_info_accessor.GetAudioInputDeviceDescriptions(&device_descriptions);
+
+ // Open and close an audio input stream for all available devices.
+ for (const auto& device : device_descriptions) {
+ AudioInputStreamWrapper aisw(audio_manager_.get(), device.unique_id);
+ {
+ ScopedAudioInputStream ais(aisw.Create());
+ EXPECT_TRUE(ais->Open());
+ }
+ }
+}
+
// Test Open(), Start(), Close() calling sequence.
TEST_F(WinAudioInputTest, WASAPIAudioInputStreamOpenStartAndClose) {
ABORT_AUDIO_TEST_IF_NOT(HasCoreAudioAndInputDevices(audio_manager_.get()));
@@ -362,6 +392,7 @@ TEST_P(WinAudioInputTest, WASAPIAudioInputStreamOpenStartStopAndClose) {
: feature_list.InitAndDisableFeature(media::kWasapiRawAudioCapture);
ScopedAudioInputStream ais(
CreateDefaultAudioInputStream(audio_manager_.get()));
+ EXPECT_TRUE(ais->SetAutomaticGainControl(true));
EXPECT_TRUE(ais->Open());
MockAudioInputCallback sink;
ais->Start(&sink);
diff --git a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
index c796ab32a0c..177bf469de4 100644
--- a/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_low_latency_output_win_unittest.cc
@@ -70,7 +70,7 @@ class ReadFromFileAudioSource : public AudioOutputStream::AudioSourceCallback {
explicit ReadFromFileAudioSource(const std::string& name)
: pos_(0),
previous_call_time_(base::TimeTicks::Now()),
- text_file_(NULL),
+ text_file_(nullptr),
elements_to_write_(0) {
// Reads a test file from media/test/data directory.
file_ = ReadTestDataFile(name);
diff --git a/chromium/media/audio/win/audio_manager_win.cc b/chromium/media/audio/win/audio_manager_win.cc
index e5824cbefea..370edec9e9f 100644
--- a/chromium/media/audio/win/audio_manager_win.cc
+++ b/chromium/media/audio/win/audio_manager_win.cc
@@ -17,7 +17,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/strings/string_number_conversions.h"
#include "base/win/windows_version.h"
diff --git a/chromium/media/audio/win/audio_output_win_unittest.cc b/chromium/media/audio/win/audio_output_win_unittest.cc
index e3c0377f163..346cfde42b9 100644
--- a/chromium/media/audio/win/audio_output_win_unittest.cc
+++ b/chromium/media/audio/win/audio_output_win_unittest.cc
@@ -117,7 +117,7 @@ class TestSourceLaggy : public TestSourceBasic {
class ReadOnlyMappedFile {
public:
explicit ReadOnlyMappedFile(const wchar_t* file_name)
- : fmap_(NULL), start_(NULL), size_(0) {
+ : fmap_(NULL), start_(nullptr), size_(0) {
HANDLE file = ::CreateFileW(file_name, GENERIC_READ, FILE_SHARE_READ, NULL,
OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
if (INVALID_HANDLE_VALUE == file)
diff --git a/chromium/media/audio/win/audio_session_event_listener_win_unittest.cc b/chromium/media/audio/win/audio_session_event_listener_win_unittest.cc
index 128e7425877..8ee545e808f 100644
--- a/chromium/media/audio/win/audio_session_event_listener_win_unittest.cc
+++ b/chromium/media/audio/win/audio_session_event_listener_win_unittest.cc
@@ -6,7 +6,7 @@
#include <memory>
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/win/scoped_com_initializer.h"
#include "media/audio/audio_unittest_util.h"
#include "media/audio/win/core_audio_util_win.h"
diff --git a/chromium/media/audio/win/waveout_output_win.cc b/chromium/media/audio/win/waveout_output_win.cc
index 4b30a770216..d36d8571af0 100644
--- a/chromium/media/audio/win/waveout_output_win.cc
+++ b/chromium/media/audio/win/waveout_output_win.cc
@@ -82,7 +82,7 @@ PCMWaveOutAudioOutputStream::PCMWaveOutAudioOutputStream(
UINT device_id)
: state_(PCMA_BRAND_NEW),
manager_(manager),
- callback_(NULL),
+ callback_(nullptr),
num_buffers_(num_buffers),
buffer_size_(params.GetBytesPerBuffer(kSampleFormat)),
volume_(1),
@@ -270,7 +270,7 @@ void PCMWaveOutAudioOutputStream::Stop() {
GetBuffer(ix)->dwFlags = WHDR_PREPARED;
// Don't use callback after Stop().
- callback_ = NULL;
+ callback_ = nullptr;
state_ = PCMA_READY;
}
diff --git a/chromium/media/base/BUILD.gn b/chromium/media/base/BUILD.gn
index 676eb651405..f3ec9d91fd9 100644
--- a/chromium/media/base/BUILD.gn
+++ b/chromium/media/base/BUILD.gn
@@ -6,6 +6,7 @@ import("//build/config/android/config.gni")
import("//build/config/arm.gni")
import("//build/config/features.gni")
import("//build/config/linux/pkg_config.gni")
+import("//build/config/ozone.gni")
import("//build/config/ui.gni")
import("//media/media_options.gni")
import("//testing/libfuzzer/fuzzer_test.gni")
@@ -229,6 +230,8 @@ source_set("base") {
"multi_channel_resampler.h",
"null_video_sink.cc",
"null_video_sink.h",
+ "offloading_video_encoder.cc",
+ "offloading_video_encoder.h",
"output_device_info.cc",
"output_device_info.h",
"overlay_info.cc",
@@ -355,6 +358,7 @@ source_set("base") {
deps = [
"//base/allocator:buildflags",
"//base/util/values:values_util",
+ "//build:chromeos_buildflags",
"//components/system_media_controls/linux/buildflags",
"//gpu/command_buffer/client:interface_base",
"//gpu/command_buffer/common",
@@ -362,6 +366,7 @@ source_set("base") {
"//third_party/libyuv",
"//third_party/widevine/cdm:headers",
"//ui/display:display",
+ "//ui/events:events",
"//ui/events:events_base",
"//url:url",
]
@@ -387,7 +392,7 @@ source_set("base") {
deps += [ "//third_party/libvpx" ]
}
- if (use_x11) {
+ if (use_x11 || ozone_platform_x11) {
sources += [ "user_input_monitor_linux.cc" ]
deps += [
"//ui/base:features",
@@ -408,12 +413,11 @@ source_set("base") {
} else {
defines += [ "DISABLE_USER_INPUT_MONITOR" ]
}
-
- if (is_linux || is_chromeos || is_win) {
- sources += [
- "keyboard_event_counter.cc",
- "keyboard_event_counter.h",
- ]
+ if (use_x11) {
+ deps += [ "//ui/base/x" ]
+ }
+ if (use_ozone) {
+ deps += [ "//ui/ozone" ]
}
# Note: should also work on is_posix || is_fuchsia
@@ -428,10 +432,14 @@ source_set("base") {
public_deps += [ "//media/base/win:mf_cdm_proxy" ]
}
- if (is_chromecast || is_fuchsia) {
- sources += [ "demuxer_memory_limit_low.cc" ]
- } else if (is_android) {
+ # TODO(ziyangch): Check |is_chromecast| first when using cast media pipeline
+ # on Android cast devices.
+ if (is_android) {
sources += [ "demuxer_memory_limit_android.cc" ]
+ } else if (is_chromecast) {
+ sources += [ "demuxer_memory_limit_cast.cc" ]
+ } else if (is_fuchsia) {
+ sources += [ "demuxer_memory_limit_low.cc" ]
} else {
sources += [ "demuxer_memory_limit_default.cc" ]
}
@@ -570,6 +578,7 @@ source_set("unit_tests") {
"moving_average_unittest.cc",
"multi_channel_resampler_unittest.cc",
"null_video_sink_unittest.cc",
+ "offloading_video_encoder_unittest.cc",
"pipeline_impl_unittest.cc",
"ranges_unittest.cc",
"reentrancy_checker_unittest.cc",
@@ -608,6 +617,7 @@ source_set("unit_tests") {
]
deps = [
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//components/viz/common",
"//gpu/command_buffer/common",
"//media:test_support",
@@ -622,9 +632,6 @@ source_set("unit_tests") {
deps += [ "//media/base/mac:unit_tests" ]
}
- if (is_linux || is_chromeos || is_win) {
- sources += [ "keyboard_event_counter_unittest.cc" ]
- }
if (is_win) {
sources += [ "win/dxgi_device_scope_handle_unittest.cc" ]
deps += [ "//media/base/win:media_foundation_util" ]
@@ -633,6 +640,10 @@ source_set("unit_tests") {
"mfplat.lib",
]
}
+
+ if (is_chromecast && !is_android) {
+ sources += [ "demuxer_memory_limit_cast_unittest.cc" ]
+ }
}
source_set("perftests") {
diff --git a/chromium/media/base/android/jni_hdr_metadata.cc b/chromium/media/base/android/jni_hdr_metadata.cc
index 939577a88c8..de677849648 100644
--- a/chromium/media/base/android/jni_hdr_metadata.cc
+++ b/chromium/media/base/android/jni_hdr_metadata.cc
@@ -6,12 +6,12 @@
#include "media/base/android/media_jni_headers/HdrMetadata_jni.h"
#include "media/base/video_color_space.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
JniHdrMetadata::JniHdrMetadata(const VideoColorSpace& color_space,
- const gl::HDRMetadata& hdr_metadata)
+ const gfx::HDRMetadata& hdr_metadata)
: color_space_(color_space), hdr_metadata_(hdr_metadata) {
JNIEnv* env = base::android::AttachCurrentThread();
jobject_ = Java_HdrMetadata_create(env, reinterpret_cast<jlong>(this));
diff --git a/chromium/media/base/android/jni_hdr_metadata.h b/chromium/media/base/android/jni_hdr_metadata.h
index af3510f3bf8..f7ea00533c2 100644
--- a/chromium/media/base/android/jni_hdr_metadata.h
+++ b/chromium/media/base/android/jni_hdr_metadata.h
@@ -7,7 +7,7 @@
#include "base/android/jni_android.h"
#include "base/macros.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
@@ -16,7 +16,7 @@ class VideoColorSpace;
class JniHdrMetadata {
public:
JniHdrMetadata(const VideoColorSpace& color_space,
- const gl::HDRMetadata& hdr_metadata);
+ const gfx::HDRMetadata& hdr_metadata);
~JniHdrMetadata();
base::android::ScopedJavaLocalRef<jobject> obj() { return jobject_; }
@@ -58,7 +58,7 @@ class JniHdrMetadata {
private:
const VideoColorSpace& color_space_;
- const gl::HDRMetadata& hdr_metadata_;
+ const gfx::HDRMetadata& hdr_metadata_;
base::android::ScopedJavaLocalRef<jobject> jobject_;
DISALLOW_COPY_AND_ASSIGN(JniHdrMetadata);
diff --git a/chromium/media/base/android/media_codec_bridge_impl.h b/chromium/media/base/android/media_codec_bridge_impl.h
index c21ec38bab6..1d4ce6da9d8 100644
--- a/chromium/media/base/android/media_codec_bridge_impl.h
+++ b/chromium/media/base/android/media_codec_bridge_impl.h
@@ -20,7 +20,7 @@
#include "media/base/media_export.h"
#include "media/base/video_decoder_config.h"
#include "ui/gfx/geometry/size.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
@@ -54,7 +54,7 @@ class MEDIA_EXPORT VideoCodecConfig {
// VP9 HDR metadata is only embedded in the container. HDR10 metadata is
// embedded in the video stream.
- base::Optional<gl::HDRMetadata> hdr_metadata;
+ base::Optional<gfx::HDRMetadata> hdr_metadata;
// Enables the async MediaCodec.Callback API. |on_buffers_available_cb|
// will be called when input or output buffers are available. This will be
diff --git a/chromium/media/base/android/media_drm_bridge.cc b/chromium/media/base/android/media_drm_bridge.cc
index d71a30826da..e5d7978a28f 100644
--- a/chromium/media/base/android/media_drm_bridge.cc
+++ b/chromium/media/base/android/media_drm_bridge.cc
@@ -689,10 +689,11 @@ void MediaDrmBridge::OnProvisionRequest(
std::string request_data;
JavaByteArrayToString(env, j_request_data, &request_data);
+ std::string default_url;
+ ConvertJavaStringToUTF8(env, j_default_url, &default_url);
task_runner_->PostTask(
FROM_HERE, base::BindOnce(&MediaDrmBridge::SendProvisioningRequest,
- weak_factory_.GetWeakPtr(),
- ConvertJavaStringToUTF8(env, j_default_url),
+ weak_factory_.GetWeakPtr(), GURL(default_url),
std::move(request_data)));
}
@@ -944,7 +945,7 @@ void MediaDrmBridge::NotifyMediaCryptoReady(JavaObjectPtr j_media_crypto) {
IsSecureCodecRequired());
}
-void MediaDrmBridge::SendProvisioningRequest(const std::string& default_url,
+void MediaDrmBridge::SendProvisioningRequest(const GURL& default_url,
const std::string& request_data) {
DCHECK(task_runner_->BelongsToCurrentThread());
DVLOG(1) << __func__;
diff --git a/chromium/media/base/android/media_drm_bridge.h b/chromium/media/base/android/media_drm_bridge.h
index 56104ea9748..abbbf7a6d61 100644
--- a/chromium/media/base/android/media_drm_bridge.h
+++ b/chromium/media/base/android/media_drm_bridge.h
@@ -287,7 +287,7 @@ class MEDIA_EXPORT MediaDrmBridge : public ContentDecryptionModule,
void NotifyMediaCryptoReady(JavaObjectPtr j_media_crypto);
// Sends HTTP provisioning request to a provisioning server.
- void SendProvisioningRequest(const std::string& default_url,
+ void SendProvisioningRequest(const GURL& default_url,
const std::string& request_data);
// Process the data received by provisioning server.
diff --git a/chromium/media/base/android/media_drm_bridge_unittest.cc b/chromium/media/base/android/media_drm_bridge_unittest.cc
index 81243581abe..40c31575c49 100644
--- a/chromium/media/base/android/media_drm_bridge_unittest.cc
+++ b/chromium/media/base/android/media_drm_bridge_unittest.cc
@@ -6,7 +6,7 @@
#include "base/android/build_info.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
@@ -66,7 +66,7 @@ class ProvisionFetcherWrapper : public ProvisionFetcher {
: provision_fetcher_(provision_fetcher) {}
// ProvisionFetcher implementation.
- void Retrieve(const std::string& default_url,
+ void Retrieve(const GURL& default_url,
const std::string& request_data,
ResponseCB response_cb) override {
provision_fetcher_->Retrieve(default_url, request_data,
@@ -96,7 +96,7 @@ class MediaDrmBridgeTest : public ProvisionFetcher, public testing::Test {
// ProvisionFetcher implementation. Done as a mock method so we can properly
// check if |media_drm_bridge_| invokes it or not.
MOCK_METHOD3(Retrieve,
- void(const std::string& default_url,
+ void(const GURL& default_url,
const std::string& request_data,
ResponseCB response_cb));
diff --git a/chromium/media/base/android/media_service_throttler.cc b/chromium/media/base/android/media_service_throttler.cc
index ca54bb24204..d950218eebd 100644
--- a/chromium/media/base/android/media_service_throttler.cc
+++ b/chromium/media/base/android/media_service_throttler.cc
@@ -7,7 +7,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/default_tick_clock.h"
#include "media/base/android/media_server_crash_listener.h"
diff --git a/chromium/media/base/async_destroy_video_encoder.h b/chromium/media/base/async_destroy_video_encoder.h
index 8576205af49..1a94cf5b339 100644
--- a/chromium/media/base/async_destroy_video_encoder.h
+++ b/chromium/media/base/async_destroy_video_encoder.h
@@ -52,9 +52,12 @@ class AsyncDestroyVideoEncoder final : public VideoEncoder {
wrapped_encoder_->Encode(std::move(frame), key_frame, std::move(done_cb));
}
- void ChangeOptions(const Options& options, StatusCB done_cb) override {
+ void ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) override {
DCHECK(wrapped_encoder_);
- wrapped_encoder_->ChangeOptions(options, std::move(done_cb));
+ wrapped_encoder_->ChangeOptions(options, std::move(output_cb),
+ std::move(done_cb));
}
void Flush(StatusCB done_cb) override {
diff --git a/chromium/media/base/audio_buffer.cc b/chromium/media/base/audio_buffer.cc
index 622c9154f4f..ad6bd58c9c7 100644
--- a/chromium/media/base/audio_buffer.cc
+++ b/chromium/media/base/audio_buffer.cc
@@ -39,8 +39,12 @@ AudioBufferMemoryPool::AudioMemory AudioBufferMemoryPool::CreateBuffer(
return std::move(entry.first);
}
- return AudioMemory(static_cast<uint8_t*>(
+ // FFmpeg may not always initialize the entire output memory, so just like
+ // for VideoFrames we need to zero out the memory. https://crbug.com/1144070.
+ auto memory = AudioMemory(static_cast<uint8_t*>(
base::AlignedAlloc(size, AudioBuffer::kChannelAlignment)));
+ memset(memory.get(), 0, size);
+ return memory;
}
void AudioBufferMemoryPool::ReturnBuffer(AudioMemory memory, size_t size) {
@@ -228,12 +232,6 @@ scoped_refptr<AudioBuffer> AudioBuffer::CreateEOSBuffer() {
nullptr, 0, kNoTimestamp, nullptr));
}
-// Convert int16_t values in the range [INT16_MIN, INT16_MAX] to [-1.0, 1.0].
-inline float ConvertSample(int16_t value) {
- return value * (value < 0 ? -1.0f / std::numeric_limits<int16_t>::min()
- : 1.0f / std::numeric_limits<int16_t>::max());
-}
-
void AudioBuffer::AdjustSampleRate(int sample_rate) {
DCHECK(!end_of_stream_);
sample_rate_ = sample_rate;
@@ -281,14 +279,16 @@ void AudioBuffer::ReadFrames(int frames_to_copy,
return;
}
+ // Note: The conversion steps below will clip values to [1.0, -1.0f].
+
if (sample_format_ == kSampleFormatPlanarF32) {
- // Format is planar float32. Copy the data from each channel as a block.
for (int ch = 0; ch < channel_count_; ++ch) {
+ float* dest_data = dest->channel(ch) + dest_frame_offset;
const float* source_data =
reinterpret_cast<const float*>(channel_data_[ch]) +
source_frame_offset;
- memcpy(dest->channel(ch) + dest_frame_offset, source_data,
- sizeof(float) * frames_to_copy);
+ for (int i = 0; i < frames_to_copy; ++i)
+ dest_data[i] = Float32SampleTypeTraits::FromFloat(source_data[i]);
}
return;
}
@@ -301,37 +301,35 @@ void AudioBuffer::ReadFrames(int frames_to_copy,
reinterpret_cast<const int16_t*>(channel_data_[ch]) +
source_frame_offset;
float* dest_data = dest->channel(ch) + dest_frame_offset;
- for (int i = 0; i < frames_to_copy; ++i) {
- dest_data[i] = ConvertSample(source_data[i]);
- }
+ for (int i = 0; i < frames_to_copy; ++i)
+ dest_data[i] = SignedInt16SampleTypeTraits::ToFloat(source_data[i]);
}
return;
}
+ const int bytes_per_channel = SampleFormatToBytesPerChannel(sample_format_);
+ const int frame_size = channel_count_ * bytes_per_channel;
+ const uint8_t* source_data = data_.get() + source_frame_offset * frame_size;
+
if (sample_format_ == kSampleFormatF32) {
- // Format is interleaved float32. Copy the data into each channel.
- const float* source_data = reinterpret_cast<const float*>(data_.get()) +
- source_frame_offset * channel_count_;
- for (int ch = 0; ch < channel_count_; ++ch) {
- float* dest_data = dest->channel(ch) + dest_frame_offset;
- for (int i = 0, offset = ch; i < frames_to_copy;
- ++i, offset += channel_count_) {
- dest_data[i] = source_data[offset];
- }
- }
- return;
+ dest->FromInterleavedPartial<Float32SampleTypeTraits>(
+ reinterpret_cast<const float*>(source_data), dest_frame_offset,
+ frames_to_copy);
+ } else if (sample_format_ == kSampleFormatU8) {
+ dest->FromInterleavedPartial<UnsignedInt8SampleTypeTraits>(
+ source_data, dest_frame_offset, frames_to_copy);
+ } else if (sample_format_ == kSampleFormatS16) {
+ dest->FromInterleavedPartial<SignedInt16SampleTypeTraits>(
+ reinterpret_cast<const int16_t*>(source_data), dest_frame_offset,
+ frames_to_copy);
+ } else if (sample_format_ == kSampleFormatS24 ||
+ sample_format_ == kSampleFormatS32) {
+ dest->FromInterleavedPartial<SignedInt32SampleTypeTraits>(
+ reinterpret_cast<const int32_t*>(source_data), dest_frame_offset,
+ frames_to_copy);
+ } else {
+ NOTREACHED() << "Unsupported audio sample type: " << sample_format_;
}
-
- // Remaining formats are integer interleaved data. Use the deinterleaving code
- // in AudioBus to copy the data.
- DCHECK(
- sample_format_ == kSampleFormatU8 || sample_format_ == kSampleFormatS16 ||
- sample_format_ == kSampleFormatS24 || sample_format_ == kSampleFormatS32);
- int bytes_per_channel = SampleFormatToBytesPerChannel(sample_format_);
- int frame_size = channel_count_ * bytes_per_channel;
- const uint8_t* source_data = data_.get() + source_frame_offset * frame_size;
- dest->FromInterleavedPartial(source_data, dest_frame_offset, frames_to_copy,
- bytes_per_channel);
}
void AudioBuffer::TrimStart(int frames_to_trim) {
diff --git a/chromium/media/base/audio_buffer.h b/chromium/media/base/audio_buffer.h
index 71f5c87e8b7..55adc82837f 100644
--- a/chromium/media/base/audio_buffer.h
+++ b/chromium/media/base/audio_buffer.h
@@ -124,8 +124,8 @@ class MEDIA_EXPORT AudioBuffer
// Copy frames into |dest|. |frames_to_copy| is the number of frames to copy.
// |source_frame_offset| specifies how many frames in the buffer to skip
// first. |dest_frame_offset| is the frame offset in |dest|. The frames are
- // converted from their source format into planar float32 data (which is all
- // that AudioBus handles).
+ // converted and clipped from their source format into planar float32 data
+ // (which is all that AudioBus handles).
void ReadFrames(int frames_to_copy,
int source_frame_offset,
int dest_frame_offset,
diff --git a/chromium/media/base/audio_buffer_queue_unittest.cc b/chromium/media/base/audio_buffer_queue_unittest.cc
index 3cf5e120c56..94e20d581a5 100644
--- a/chromium/media/base/audio_buffer_queue_unittest.cc
+++ b/chromium/media/base/audio_buffer_queue_unittest.cc
@@ -18,18 +18,24 @@
namespace media {
-const int kSampleRate = 44100;
+constexpr int kSampleRate = 44100;
+enum class ValueType { kNormal, kFloat };
static void VerifyBus(AudioBus* bus,
int offset,
int frames,
int buffer_size,
float start,
- float increment) {
+ float increment,
+ ValueType type = ValueType::kNormal) {
for (int ch = 0; ch < bus->channels(); ++ch) {
const float v = start + ch * buffer_size * increment;
for (int i = offset; i < offset + frames; ++i) {
- ASSERT_FLOAT_EQ(v + (i - offset) * increment, bus->channel(ch)[i])
+ float expected_value = v + (i - offset) * increment;
+ if (type == ValueType::kFloat)
+ expected_value /= std::numeric_limits<uint16_t>::max();
+
+ ASSERT_FLOAT_EQ(expected_value, bus->channel(ch)[i])
<< "i=" << i << ", ch=" << ch;
}
}
@@ -94,7 +100,7 @@ TEST(AudioBufferQueueTest, IteratorCheck) {
EXPECT_EQ(4, buffer.ReadFrames(4, 0, bus.get()));
EXPECT_EQ(4, buffer.frames());
- VerifyBus(bus.get(), 0, 4, bus->frames(), 10, 1);
+ VerifyBus(bus.get(), 0, 4, bus->frames(), 10, 1, ValueType::kFloat);
buffer.Append(MakeTestBuffer<float>(
kSampleFormatF32, channel_layout, 20.0f, 1.0f, 8));
@@ -106,7 +112,7 @@ TEST(AudioBufferQueueTest, IteratorCheck) {
buffer.SeekFrames(16);
EXPECT_EQ(4, buffer.ReadFrames(4, 0, bus.get()));
EXPECT_EQ(0, buffer.frames());
- VerifyBus(bus.get(), 0, 4, bus->frames(), 34, 1);
+ VerifyBus(bus.get(), 0, 4, bus->frames(), 34, 1, ValueType::kFloat);
buffer.Append(MakeTestBuffer<float>(
kSampleFormatF32, channel_layout, 40.0f, 1.0f, 8));
@@ -116,13 +122,13 @@ TEST(AudioBufferQueueTest, IteratorCheck) {
EXPECT_EQ(16, buffer.frames());
EXPECT_EQ(4, buffer.ReadFrames(4, 0, bus.get()));
- VerifyBus(bus.get(), 0, 4, bus->frames(), 40, 1);
+ VerifyBus(bus.get(), 0, 4, bus->frames(), 40, 1, ValueType::kFloat);
// Read off the end of the buffer.
EXPECT_EQ(12, buffer.frames());
buffer.SeekFrames(8);
EXPECT_EQ(4, buffer.ReadFrames(100, 0, bus.get()));
- VerifyBus(bus.get(), 0, 4, bus->frames(), 54, 1);
+ VerifyBus(bus.get(), 0, 4, bus->frames(), 54, 1, ValueType::kFloat);
}
TEST(AudioBufferQueueTest, Seek) {
@@ -188,13 +194,13 @@ TEST(AudioBufferQueueTest, ReadF32) {
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, 100);
EXPECT_EQ(3, buffer.ReadFrames(3, 0, bus.get()));
EXPECT_EQ(73, buffer.frames());
- VerifyBus(bus.get(), 0, 3, 6, 1, 1);
+ VerifyBus(bus.get(), 0, 3, 6, 1, 1, ValueType::kFloat);
// Now read 5 frames, which will span buffers. Append the data into AudioBus.
EXPECT_EQ(5, buffer.ReadFrames(5, 3, bus.get()));
EXPECT_EQ(68, buffer.frames());
- VerifyBus(bus.get(), 0, 6, 6, 1, 1);
- VerifyBus(bus.get(), 6, 2, 10, 13, 1);
+ VerifyBus(bus.get(), 0, 6, 6, 1, 1, ValueType::kFloat);
+ VerifyBus(bus.get(), 6, 2, 10, 13, 1, ValueType::kFloat);
// Now skip into the third buffer.
buffer.SeekFrames(20);
@@ -202,7 +208,7 @@ TEST(AudioBufferQueueTest, ReadF32) {
// Now read 2 frames, which are in the third buffer.
EXPECT_EQ(2, buffer.ReadFrames(2, 0, bus.get()));
- VerifyBus(bus.get(), 0, 2, 60, 45, 1);
+ VerifyBus(bus.get(), 0, 2, 60, 45, 1, ValueType::kFloat);
}
TEST(AudioBufferQueueTest, ReadU8) {
@@ -289,8 +295,8 @@ TEST(AudioBufferQueueTest, ReadF32Planar) {
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, 100);
EXPECT_EQ(6, buffer.ReadFrames(6, 0, bus.get()));
EXPECT_EQ(8, buffer.frames());
- VerifyBus(bus.get(), 0, 4, 4, 1, 1);
- VerifyBus(bus.get(), 4, 2, 10, 50, 1);
+ VerifyBus(bus.get(), 0, 4, 4, 1, 1, ValueType::kFloat);
+ VerifyBus(bus.get(), 4, 2, 10, 50, 1, ValueType::kFloat);
}
TEST(AudioBufferQueueTest, ReadS16Planar) {
@@ -333,9 +339,9 @@ TEST(AudioBufferQueueTest, ReadManyChannels) {
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, 100);
EXPECT_EQ(30, buffer.ReadFrames(30, 0, bus.get()));
EXPECT_EQ(46, buffer.frames());
- VerifyBus(bus.get(), 0, 6, 6, 0, 1);
- VerifyBus(bus.get(), 6, 10, 10, 6 * channels, 1);
- VerifyBus(bus.get(), 16, 14, 60, 16 * channels, 1);
+ VerifyBus(bus.get(), 0, 6, 6, 0, 1, ValueType::kFloat);
+ VerifyBus(bus.get(), 6, 10, 10, 6 * channels, 1, ValueType::kFloat);
+ VerifyBus(bus.get(), 16, 14, 60, 16 * channels, 1, ValueType::kFloat);
}
TEST(AudioBufferQueueTest, Peek) {
@@ -355,17 +361,17 @@ TEST(AudioBufferQueueTest, Peek) {
EXPECT_EQ(frames, buffer.PeekFrames(60, 0, 0, bus1.get()));
EXPECT_EQ(30, buffer.PeekFrames(30, 0, 0, bus1.get()));
EXPECT_EQ(frames, buffer.frames());
- VerifyBus(bus1.get(), 0, 30, bus1->frames(), 0, 1);
+ VerifyBus(bus1.get(), 0, 30, bus1->frames(), 0, 1, ValueType::kFloat);
// Now read the next 30 frames (which should be the same as those peeked at).
std::unique_ptr<AudioBus> bus2 = AudioBus::Create(channels, frames);
EXPECT_EQ(30, buffer.ReadFrames(30, 0, bus2.get()));
- VerifyBus(bus2.get(), 0, 30, bus2->frames(), 0, 1);
+ VerifyBus(bus2.get(), 0, 30, bus2->frames(), 0, 1, ValueType::kFloat);
// Peek 10 frames forward
bus1->Zero();
EXPECT_EQ(5, buffer.PeekFrames(5, 10, 0, bus1.get()));
- VerifyBus(bus1.get(), 0, 5, bus1->frames(), 40, 1);
+ VerifyBus(bus1.get(), 0, 5, bus1->frames(), 40, 1, ValueType::kFloat);
// Peek to the end of the buffer.
EXPECT_EQ(30, buffer.frames());
diff --git a/chromium/media/base/audio_buffer_unittest.cc b/chromium/media/base/audio_buffer_unittest.cc
index 31b5f4d0470..c769bd5adc2 100644
--- a/chromium/media/base/audio_buffer_unittest.cc
+++ b/chromium/media/base/audio_buffer_unittest.cc
@@ -14,25 +14,34 @@
namespace media {
-static const int kSampleRate = 4800;
+constexpr int kSampleRate = 4800;
+enum class ValueType { kNormal, kFloat };
static void VerifyBusWithOffset(AudioBus* bus,
int offset,
int frames,
float start,
float start_offset,
- float increment) {
+ float increment,
+ ValueType type = ValueType::kNormal) {
for (int ch = 0; ch < bus->channels(); ++ch) {
const float v = start_offset + start + ch * bus->frames() * increment;
for (int i = offset; i < offset + frames; ++i) {
- ASSERT_FLOAT_EQ(v + i * increment, bus->channel(ch)[i]) << "i=" << i
- << ", ch=" << ch;
+ float expected_value = v + i * increment;
+ if (type == ValueType::kFloat)
+ expected_value /= std::numeric_limits<uint16_t>::max();
+ ASSERT_FLOAT_EQ(expected_value, bus->channel(ch)[i])
+ << "i=" << i << ", ch=" << ch;
}
}
}
-static void VerifyBus(AudioBus* bus, int frames, float start, float increment) {
- VerifyBusWithOffset(bus, 0, frames, start, 0, increment);
+static void VerifyBus(AudioBus* bus,
+ int frames,
+ float start,
+ float increment,
+ ValueType type = ValueType::kNormal) {
+ VerifyBusWithOffset(bus, 0, frames, start, 0, increment, type);
}
static void TrimRangeTest(SampleFormat sample_format) {
@@ -57,7 +66,7 @@ static void TrimRangeTest(SampleFormat sample_format) {
// Verify all frames before trimming.
buffer->ReadFrames(frames, 0, 0, bus.get());
- VerifyBus(bus.get(), frames, 0, 1);
+ VerifyBus(bus.get(), frames, 0, 1, ValueType::kFloat);
// Trim 10ms of frames from the middle of the buffer.
int trim_start = frames / 2;
@@ -69,13 +78,9 @@ static void TrimRangeTest(SampleFormat sample_format) {
EXPECT_EQ(duration - trim_duration, buffer->duration());
bus->Zero();
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), trim_start, 0, 1);
- VerifyBusWithOffset(bus.get(),
- trim_start,
- buffer->frame_count() - trim_start,
- 0,
- trim_length,
- 1);
+ VerifyBus(bus.get(), trim_start, 0, 1, ValueType::kFloat);
+ VerifyBusWithOffset(bus.get(), trim_start, buffer->frame_count() - trim_start,
+ 0, trim_length, 1, ValueType::kFloat);
// Trim 10ms of frames from the start, which just adjusts the buffer's
// internal start offset.
@@ -86,13 +91,9 @@ static void TrimRangeTest(SampleFormat sample_format) {
EXPECT_EQ(duration - 2 * trim_duration, buffer->duration());
bus->Zero();
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), trim_start, trim_length, 1);
- VerifyBusWithOffset(bus.get(),
- trim_start,
- buffer->frame_count() - trim_start,
- trim_length,
- trim_length,
- 1);
+ VerifyBus(bus.get(), trim_start, trim_length, 1, ValueType::kFloat);
+ VerifyBusWithOffset(bus.get(), trim_start, buffer->frame_count() - trim_start,
+ trim_length, trim_length, 1, ValueType::kFloat);
// Trim 10ms of frames from the end, which just adjusts the buffer's frame
// count.
@@ -102,13 +103,9 @@ static void TrimRangeTest(SampleFormat sample_format) {
EXPECT_EQ(duration - 3 * trim_duration, buffer->duration());
bus->Zero();
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), trim_start, trim_length, 1);
- VerifyBusWithOffset(bus.get(),
- trim_start,
- buffer->frame_count() - trim_start,
- trim_length,
- trim_length,
- 1);
+ VerifyBus(bus.get(), trim_start, trim_length, 1, ValueType::kFloat);
+ VerifyBusWithOffset(bus.get(), trim_start, buffer->frame_count() - trim_start,
+ trim_length, trim_length, 1, ValueType::kFloat);
// Trim another 10ms from the inner portion of the buffer.
buffer->TrimRange(trim_start, trim_start + trim_length);
@@ -117,13 +114,9 @@ static void TrimRangeTest(SampleFormat sample_format) {
EXPECT_EQ(duration - 4 * trim_duration, buffer->duration());
bus->Zero();
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), trim_start, trim_length, 1);
- VerifyBusWithOffset(bus.get(),
- trim_start,
- buffer->frame_count() - trim_start,
- trim_length,
- trim_length * 2,
- 1);
+ VerifyBus(bus.get(), trim_start, trim_length, 1, ValueType::kFloat);
+ VerifyBusWithOffset(bus.get(), trim_start, buffer->frame_count() - trim_start,
+ trim_length, trim_length * 2, 1, ValueType::kFloat);
// Trim off the end using TrimRange() to ensure end index is exclusive.
buffer->TrimRange(buffer->frame_count() - trim_length, buffer->frame_count());
@@ -132,13 +125,9 @@ static void TrimRangeTest(SampleFormat sample_format) {
EXPECT_EQ(duration - 5 * trim_duration, buffer->duration());
bus->Zero();
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), trim_start, trim_length, 1);
- VerifyBusWithOffset(bus.get(),
- trim_start,
- buffer->frame_count() - trim_start,
- trim_length,
- trim_length * 2,
- 1);
+ VerifyBus(bus.get(), trim_start, trim_length, 1, ValueType::kFloat);
+ VerifyBusWithOffset(bus.get(), trim_start, buffer->frame_count() - trim_start,
+ trim_length, trim_length * 2, 1, ValueType::kFloat);
// Trim off the start using TrimRange() to ensure start index is inclusive.
buffer->TrimRange(0, trim_length);
@@ -148,13 +137,9 @@ static void TrimRangeTest(SampleFormat sample_format) {
EXPECT_EQ(duration - 6 * trim_duration, buffer->duration());
bus->Zero();
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), trim_start, 2 * trim_length, 1);
- VerifyBusWithOffset(bus.get(),
- trim_start,
- buffer->frame_count() - trim_start,
- trim_length * 2,
- trim_length * 2,
- 1);
+ VerifyBus(bus.get(), trim_start, 2 * trim_length, 1, ValueType::kFloat);
+ VerifyBusWithOffset(bus.get(), trim_start, buffer->frame_count() - trim_start,
+ trim_length * 2, trim_length * 2, 1, ValueType::kFloat);
}
TEST(AudioBufferTest, CopyFrom) {
@@ -345,12 +330,12 @@ TEST(AudioBufferTest, ReadF32) {
start_time);
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, frames);
buffer->ReadFrames(10, 0, 0, bus.get());
- VerifyBus(bus.get(), 10, 1, 1);
+ VerifyBus(bus.get(), 10, 1, 1, ValueType::kFloat);
// Read second 10 frames.
bus->Zero();
buffer->ReadFrames(10, 10, 0, bus.get());
- VerifyBus(bus.get(), 10, 11, 1);
+ VerifyBus(bus.get(), 10, 11, 1, ValueType::kFloat);
}
TEST(AudioBufferTest, ReadS16Planar) {
@@ -406,12 +391,12 @@ TEST(AudioBufferTest, ReadF32Planar) {
// channels.
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, 100);
buffer->ReadFrames(frames, 0, 0, bus.get());
- VerifyBus(bus.get(), frames, 1, 1);
+ VerifyBus(bus.get(), frames, 1, 1, ValueType::kFloat);
// Now read 20 frames from the middle of the buffer.
bus->Zero();
buffer->ReadFrames(20, 50, 0, bus.get());
- VerifyBus(bus.get(), 20, 51, 1);
+ VerifyBus(bus.get(), 20, 51, 1, ValueType::kFloat);
}
TEST(AudioBufferTest, EmptyBuffer) {
@@ -487,7 +472,7 @@ TEST(AudioBufferTest, Trim) {
std::unique_ptr<AudioBus> bus = AudioBus::Create(channels, frames);
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), buffer->frame_count(), 0.0f, 1.0f);
+ VerifyBus(bus.get(), buffer->frame_count(), 0.0f, 1.0f, ValueType::kFloat);
// Trim off 10ms of frames from the start.
buffer->TrimStart(ten_ms_of_frames);
@@ -495,7 +480,8 @@ TEST(AudioBufferTest, Trim) {
EXPECT_EQ(frames - ten_ms_of_frames, buffer->frame_count());
EXPECT_EQ(duration - ten_ms, buffer->duration());
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), buffer->frame_count(), ten_ms_of_frames, 1.0f);
+ VerifyBus(bus.get(), buffer->frame_count(), ten_ms_of_frames, 1.0f,
+ ValueType::kFloat);
// Trim off 10ms of frames from the end.
buffer->TrimEnd(ten_ms_of_frames);
@@ -503,7 +489,8 @@ TEST(AudioBufferTest, Trim) {
EXPECT_EQ(frames - 2 * ten_ms_of_frames, buffer->frame_count());
EXPECT_EQ(duration - 2 * ten_ms, buffer->duration());
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), buffer->frame_count(), ten_ms_of_frames, 1.0f);
+ VerifyBus(bus.get(), buffer->frame_count(), ten_ms_of_frames, 1.0f,
+ ValueType::kFloat);
// Trim off 40ms more from the start.
buffer->TrimStart(4 * ten_ms_of_frames);
@@ -511,7 +498,8 @@ TEST(AudioBufferTest, Trim) {
EXPECT_EQ(frames - 6 * ten_ms_of_frames, buffer->frame_count());
EXPECT_EQ(duration - 6 * ten_ms, buffer->duration());
buffer->ReadFrames(buffer->frame_count(), 0, 0, bus.get());
- VerifyBus(bus.get(), buffer->frame_count(), 5 * ten_ms_of_frames, 1.0f);
+ VerifyBus(bus.get(), buffer->frame_count(), 5 * ten_ms_of_frames, 1.0f,
+ ValueType::kFloat);
// Trim off the final 40ms from the end.
buffer->TrimEnd(4 * ten_ms_of_frames);
diff --git a/chromium/media/base/audio_converter.cc b/chromium/media/base/audio_converter.cc
index 8d56906b31f..5b000656c3d 100644
--- a/chromium/media/base/audio_converter.cc
+++ b/chromium/media/base/audio_converter.cc
@@ -13,7 +13,7 @@
#include <algorithm>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/trace_event/trace_event.h"
#include "media/base/audio_bus.h"
diff --git a/chromium/media/base/audio_discard_helper_unittest.cc b/chromium/media/base/audio_discard_helper_unittest.cc
index 324439d5f82..9300fea296c 100644
--- a/chromium/media/base/audio_discard_helper_unittest.cc
+++ b/chromium/media/base/audio_discard_helper_unittest.cc
@@ -40,7 +40,7 @@ static float ExtractDecodedData(const AudioBuffer& buffer, int index) {
std::unique_ptr<AudioBus> temp_bus =
AudioBus::Create(buffer.channel_count(), 1);
buffer.ReadFrames(1, index, 0, temp_bus.get());
- return temp_bus->channel(0)[0];
+ return temp_bus->channel(0)[0] * std::numeric_limits<uint16_t>::max();
}
TEST(AudioDiscardHelperTest, TimeDeltaToFrames) {
diff --git a/chromium/media/base/audio_encoder.cc b/chromium/media/base/audio_encoder.cc
index 0a9ca22e96f..64b5cab9f0a 100644
--- a/chromium/media/base/audio_encoder.cc
+++ b/chromium/media/base/audio_encoder.cc
@@ -38,14 +38,16 @@ AudioEncoder::AudioEncoder(const AudioParameters& input_params,
DCHECK(audio_input_params_.IsValid());
DCHECK(!encode_callback_.is_null());
DCHECK(!status_callback_.is_null());
- DETACH_FROM_THREAD(thread_checker_);
+ DETACH_FROM_SEQUENCE(sequence_checker_);
}
-AudioEncoder::~AudioEncoder() = default;
+AudioEncoder::~AudioEncoder() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+}
void AudioEncoder::EncodeAudio(const AudioBus& audio_bus,
base::TimeTicks capture_time) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(audio_bus.channels(), audio_input_params_.channels());
DCHECK(!capture_time.is_null());
@@ -61,6 +63,12 @@ void AudioEncoder::EncodeAudio(const AudioBus& audio_bus,
EncodeAudioImpl(audio_bus, capture_time);
}
+void AudioEncoder::Flush() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ FlushImpl();
+}
+
base::TimeTicks AudioEncoder::ComputeTimestamp(
int num_frames,
base::TimeTicks capture_time) const {
diff --git a/chromium/media/base/audio_encoder.h b/chromium/media/base/audio_encoder.h
index e7ad866fcb0..c25218c9ee8 100644
--- a/chromium/media/base/audio_encoder.h
+++ b/chromium/media/base/audio_encoder.h
@@ -8,7 +8,7 @@
#include <memory>
#include "base/callback.h"
-#include "base/threading/thread_checker.h"
+#include "base/sequence_checker.h"
#include "base/time/time.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_parameters.h"
@@ -41,7 +41,8 @@ struct MEDIA_EXPORT EncodedAudioBuffer {
// number of encoded bytes may not be known in advance.
const size_t encoded_data_size;
- // The capture time of the first sample of the current AudioBus.
+ // The capture time of the first sample of the current AudioBus, or a previous
+ // AudioBus If this output was generated because of a call to Flush().
const base::TimeTicks timestamp;
};
@@ -50,9 +51,9 @@ struct MEDIA_EXPORT EncodedAudioBuffer {
class MEDIA_EXPORT AudioEncoder {
public:
// Signature of the callback invoked to provide the encoded audio data. It is
- // invoked on the same thread on which EncodeAudio() is called. The utility
+ // invoked on the same sequence on which EncodeAudio() is called. The utility
// media::BindToCurrentLoop() can be used to create a callback that will be
- // invoked on the same thread it is constructed on.
+ // invoked on the same sequence it is constructed on.
using EncodeCB = base::RepeatingCallback<void(EncodedAudioBuffer output)>;
// Signature of the callback to report errors.
@@ -62,8 +63,8 @@ class MEDIA_EXPORT AudioEncoder {
// encoder, and a callback to trigger to provide the encoded audio data.
// |input_params| must be valid, and |encode_callback| and |status_callback|
// must not be null callbacks. All calls to EncodeAudio() must happen on the
- // same thread (usually an encoder thread), but the encoder itself can be
- // constructed on any thread.
+ // same sequence (usually an encoder blocking pool sequence), but the encoder
+ // itself can be constructed on any sequence.
AudioEncoder(const AudioParameters& input_params,
EncodeCB encode_callback,
StatusCB status_callback);
@@ -79,6 +80,11 @@ class MEDIA_EXPORT AudioEncoder {
// actual encoding.
void EncodeAudio(const AudioBus& audio_bus, base::TimeTicks capture_time);
+ // Some encoders may choose to buffer audio frames before they encode them.
+ // This function provides a mechanism to drain and encode any buffered frames
+ // (if any). Must be called on the encoder sequence.
+ void Flush();
+
protected:
const EncodeCB& encode_callback() const { return encode_callback_; }
const StatusCB& status_callback() const { return status_callback_; }
@@ -87,6 +93,8 @@ class MEDIA_EXPORT AudioEncoder {
virtual void EncodeAudioImpl(const AudioBus& audio_bus,
base::TimeTicks capture_time) = 0;
+ virtual void FlushImpl() = 0;
+
// Computes the timestamp of an AudioBus which has |num_frames| and was
// captured at |capture_time|. This timestamp is the capture time of the first
// sample in that AudioBus.
@@ -104,7 +112,7 @@ class MEDIA_EXPORT AudioEncoder {
// EncodeAudio().
base::TimeTicks last_capture_time_;
- THREAD_CHECKER(thread_checker_);
+ SEQUENCE_CHECKER(sequence_checker_);
};
} // namespace media
diff --git a/chromium/media/base/audio_latency.cc b/chromium/media/base/audio_latency.cc
index c181ec49343..b55a2da37e1 100644
--- a/chromium/media/base/audio_latency.cc
+++ b/chromium/media/base/audio_latency.cc
@@ -11,6 +11,7 @@
#include "base/logging.h"
#include "base/time/time.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/limits.h"
#if defined(OS_ANDROID)
@@ -42,7 +43,7 @@ uint32_t RoundUpToPowerOfTwo(uint32_t v) {
// static
bool AudioLatency::IsResamplingPassthroughSupported(LatencyType type) {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return true;
#elif defined(OS_ANDROID)
// Only N MR1+ has support for OpenSLES performance modes which allow for
diff --git a/chromium/media/base/audio_pull_fifo_unittest.cc b/chromium/media/base/audio_pull_fifo_unittest.cc
index 2a83e3fb68f..358485a1831 100644
--- a/chromium/media/base/audio_pull_fifo_unittest.cc
+++ b/chromium/media/base/audio_pull_fifo_unittest.cc
@@ -5,7 +5,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/strings/stringprintf.h"
#include "media/base/audio_bus.h"
diff --git a/chromium/media/base/audio_push_fifo.h b/chromium/media/base/audio_push_fifo.h
index 2eb6bfc467a..fb9c615c397 100644
--- a/chromium/media/base/audio_push_fifo.h
+++ b/chromium/media/base/audio_push_fifo.h
@@ -42,6 +42,9 @@ class MEDIA_EXPORT AudioPushFifo final {
// OutputCallback.
int frames_per_buffer() const { return frames_per_buffer_; }
+ // The number of frames currently queued in this FIFO.
+ int queued_frames() const { return queued_frames_; }
+
// Must be called at least once before the first call to Push(). May be
// called later (e.g., to support an audio format change).
void Reset(int frames_per_buffer);
diff --git a/chromium/media/base/audio_push_fifo_unittest.cc b/chromium/media/base/audio_push_fifo_unittest.cc
index 5ee4a186489..2adf285f37e 100644
--- a/chromium/media/base/audio_push_fifo_unittest.cc
+++ b/chromium/media/base/audio_push_fifo_unittest.cc
@@ -7,7 +7,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_push_fifo.h"
diff --git a/chromium/media/base/audio_renderer_mixer_input_unittest.cc b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
index 3cfb45f9de6..01f4491e66b 100644
--- a/chromium/media/base/audio_renderer_mixer_input_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_input_unittest.cc
@@ -6,7 +6,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/base/audio_renderer_mixer_unittest.cc b/chromium/media/base/audio_renderer_mixer_unittest.cc
index 7b195f599dd..60ddd9df47a 100644
--- a/chromium/media/base/audio_renderer_mixer_unittest.cc
+++ b/chromium/media/base/audio_renderer_mixer_unittest.cc
@@ -13,7 +13,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/stl_util.h"
#include "base/synchronization/waitable_event.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/base/cdm_context.cc b/chromium/media/base/cdm_context.cc
index cbc0c9d7ddf..8f8cd843f64 100644
--- a/chromium/media/base/cdm_context.cc
+++ b/chromium/media/base/cdm_context.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "media/base/cdm_context.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/callback_registry.h"
@@ -52,4 +53,10 @@ FuchsiaCdmContext* CdmContext::GetFuchsiaCdmContext() {
}
#endif
+#if BUILDFLAG(IS_ASH)
+chromeos::ChromeOsCdmContext* CdmContext::GetChromeOsCdmContext() {
+ return nullptr;
+}
+#endif
+
} // namespace media
diff --git a/chromium/media/base/cdm_context.h b/chromium/media/base/cdm_context.h
index d2d6965592f..d555e21e360 100644
--- a/chromium/media/base/cdm_context.h
+++ b/chromium/media/base/cdm_context.h
@@ -10,6 +10,7 @@
#include "base/optional.h"
#include "base/unguessable_token.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/media_export.h"
#include "media/media_buildflags.h"
@@ -18,6 +19,12 @@
struct IMFCdmProxy;
#endif
+#if BUILDFLAG(IS_ASH)
+namespace chromeos {
+class ChromeOsCdmContext;
+}
+#endif
+
namespace media {
class CallbackRegistration;
@@ -115,6 +122,12 @@ class MEDIA_EXPORT CdmContext {
virtual FuchsiaCdmContext* GetFuchsiaCdmContext();
#endif
+#if BUILDFLAG(IS_ASH)
+ // Returns a ChromeOsCdmContext interface when the context is backed by the
+ // ChromeOS CdmFactoryDaemon. Otherwise return nullptr.
+ virtual chromeos::ChromeOsCdmContext* GetChromeOsCdmContext();
+#endif
+
protected:
CdmContext();
diff --git a/chromium/media/base/decoder_factory.cc b/chromium/media/base/decoder_factory.cc
index 47aa49bab02..52028b6901c 100644
--- a/chromium/media/base/decoder_factory.cc
+++ b/chromium/media/base/decoder_factory.cc
@@ -13,12 +13,12 @@ DecoderFactory::DecoderFactory() = default;
DecoderFactory::~DecoderFactory() = default;
void DecoderFactory::CreateAudioDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) {}
void DecoderFactory::CreateVideoDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
RequestOverlayInfoCB request_overlay_info_cb,
diff --git a/chromium/media/base/decoder_factory.h b/chromium/media/base/decoder_factory.h
index d01d55e6e56..2af1af58f9d 100644
--- a/chromium/media/base/decoder_factory.h
+++ b/chromium/media/base/decoder_factory.h
@@ -14,7 +14,7 @@
#include "media/base/overlay_info.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
} // namespace base
namespace gfx {
@@ -37,14 +37,14 @@ class MEDIA_EXPORT DecoderFactory {
// Creates audio decoders and append them to the end of |audio_decoders|.
// Decoders are single-threaded, each decoder should run on |task_runner|.
virtual void CreateAudioDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders);
// Creates video decoders and append them to the end of |video_decoders|.
// Decoders are single-threaded, each decoder should run on |task_runner|.
virtual void CreateVideoDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
RequestOverlayInfoCB request_overlay_info_cb,
diff --git a/chromium/media/base/demuxer.h b/chromium/media/base/demuxer.h
index a06793ab07a..c9a4ebbd6b8 100644
--- a/chromium/media/base/demuxer.h
+++ b/chromium/media/base/demuxer.h
@@ -73,6 +73,12 @@ class MEDIA_EXPORT Demuxer : public MediaResource {
base::OnceCallback<void(DemuxerStream::Type type,
const std::vector<DemuxerStream*>&)>;
+ enum DemuxerTypes {
+ kChunkDemuxer,
+ kFFmpegDemuxer,
+ kMediaUrlDemuxer,
+ };
+
Demuxer();
~Demuxer() override;
diff --git a/chromium/media/base/demuxer_memory_limit.h b/chromium/media/base/demuxer_memory_limit.h
index 6fac9409f7c..2492eca1dd4 100644
--- a/chromium/media/base/demuxer_memory_limit.h
+++ b/chromium/media/base/demuxer_memory_limit.h
@@ -8,17 +8,23 @@
#include <stddef.h>
#include "build/build_config.h"
+#include "media/base/audio_decoder_config.h"
+#include "media/base/demuxer.h"
#include "media/base/media_export.h"
+#include "media/base/video_decoder_config.h"
namespace media {
// The maximum amount of data (in bytes) a demuxer can keep in memory, for a
// particular type of stream.
-MEDIA_EXPORT size_t GetDemuxerStreamAudioMemoryLimit();
-MEDIA_EXPORT size_t GetDemuxerStreamVideoMemoryLimit();
+MEDIA_EXPORT size_t
+GetDemuxerStreamAudioMemoryLimit(const AudioDecoderConfig* audio_config);
+MEDIA_EXPORT size_t
+GetDemuxerStreamVideoMemoryLimit(Demuxer::DemuxerTypes demuxer_type,
+ const VideoDecoderConfig* video_config);
// The maximum amount of data (in bytes) a demuxer can keep in memory overall.
-MEDIA_EXPORT size_t GetDemuxerMemoryLimit();
+MEDIA_EXPORT size_t GetDemuxerMemoryLimit(Demuxer::DemuxerTypes demuxer_type);
namespace internal {
@@ -26,13 +32,17 @@ namespace internal {
// above based on platform capabilities.
// Default audio memory limit: 12MB (5 minutes of 320Kbps content).
+// Medium audio memory limit: 5MB.
// Low audio memory limit: 2MB (1 minute of 256Kbps content).
constexpr size_t kDemuxerStreamAudioMemoryLimitDefault = 12 * 1024 * 1024;
+constexpr size_t kDemuxerStreamAudioMemoryLimitMedium = 5 * 1024 * 1024;
constexpr size_t kDemuxerStreamAudioMemoryLimitLow = 2 * 1024 * 1024;
// Default video memory limit: 150MB (5 minutes of 4Mbps content).
+// Medium video memory limit: 80MB.
// Low video memory limit: 30MB (1 minute of 4Mbps content).
constexpr size_t kDemuxerStreamVideoMemoryLimitDefault = 150 * 1024 * 1024;
+constexpr size_t kDemuxerStreamVideoMemoryLimitMedium = 80 * 1024 * 1024;
constexpr size_t kDemuxerStreamVideoMemoryLimitLow = 30 * 1024 * 1024;
#if defined(OS_ANDROID)
diff --git a/chromium/media/base/demuxer_memory_limit_android.cc b/chromium/media/base/demuxer_memory_limit_android.cc
index b2f35ecd0d8..3ea849942e2 100644
--- a/chromium/media/base/demuxer_memory_limit_android.cc
+++ b/chromium/media/base/demuxer_memory_limit_android.cc
@@ -28,7 +28,8 @@ size_t SelectLimit(size_t default_limit,
} // namespace
-size_t GetDemuxerStreamAudioMemoryLimit() {
+size_t GetDemuxerStreamAudioMemoryLimit(
+ const AudioDecoderConfig* /*audio_config*/) {
static const size_t limit =
SelectLimit(internal::kDemuxerStreamAudioMemoryLimitDefault,
internal::kDemuxerStreamAudioMemoryLimitLow,
@@ -36,7 +37,9 @@ size_t GetDemuxerStreamAudioMemoryLimit() {
return limit;
}
-size_t GetDemuxerStreamVideoMemoryLimit() {
+size_t GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes /*demuxer_type*/,
+ const VideoDecoderConfig* /*video_config*/) {
static const size_t limit =
SelectLimit(internal::kDemuxerStreamVideoMemoryLimitDefault,
internal::kDemuxerStreamVideoMemoryLimitLow,
@@ -44,9 +47,9 @@ size_t GetDemuxerStreamVideoMemoryLimit() {
return limit;
}
-size_t GetDemuxerMemoryLimit() {
- return GetDemuxerStreamAudioMemoryLimit() +
- GetDemuxerStreamVideoMemoryLimit();
+size_t GetDemuxerMemoryLimit(Demuxer::DemuxerTypes demuxer_type) {
+ return GetDemuxerStreamAudioMemoryLimit(nullptr) +
+ GetDemuxerStreamVideoMemoryLimit(demuxer_type, nullptr);
}
} // namespace media
diff --git a/chromium/media/base/demuxer_memory_limit_cast.cc b/chromium/media/base/demuxer_memory_limit_cast.cc
new file mode 100644
index 00000000000..a4c2775c686
--- /dev/null
+++ b/chromium/media/base/demuxer_memory_limit_cast.cc
@@ -0,0 +1,62 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/demuxer_memory_limit.h"
+
+#include "base/check.h"
+#include "media/base/channel_layout.h"
+
+namespace media {
+
+size_t GetDemuxerStreamAudioMemoryLimit(
+ const AudioDecoderConfig* audio_config) {
+ if (!audio_config) {
+ return internal::kDemuxerStreamAudioMemoryLimitLow;
+ }
+
+ DCHECK(audio_config->IsValidConfig());
+ switch (audio_config->codec()) {
+ case kCodecEAC3:
+ case kCodecAC3:
+ return internal::kDemuxerStreamAudioMemoryLimitMedium;
+ case kCodecAAC:
+ if (ChannelLayoutToChannelCount(audio_config->channel_layout()) >= 5) {
+ return internal::kDemuxerStreamAudioMemoryLimitMedium;
+ }
+ return internal::kDemuxerStreamAudioMemoryLimitLow;
+ default:
+ return internal::kDemuxerStreamAudioMemoryLimitLow;
+ }
+}
+
+size_t GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes demuxer_type,
+ const VideoDecoderConfig* video_config) {
+ switch (demuxer_type) {
+ case Demuxer::DemuxerTypes::kFFmpegDemuxer:
+ return internal::kDemuxerStreamVideoMemoryLimitDefault;
+ case Demuxer::DemuxerTypes::kChunkDemuxer:
+ if (!video_config) {
+ return internal::kDemuxerStreamVideoMemoryLimitLow;
+ }
+ DCHECK(video_config->IsValidConfig());
+ switch (video_config->codec()) {
+ case kCodecVP9:
+ case kCodecHEVC:
+ case kCodecDolbyVision:
+ return internal::kDemuxerStreamVideoMemoryLimitMedium;
+ default:
+ return internal::kDemuxerStreamVideoMemoryLimitLow;
+ }
+ case Demuxer::DemuxerTypes::kMediaUrlDemuxer:
+ return internal::kDemuxerStreamVideoMemoryLimitLow;
+ }
+}
+
+size_t GetDemuxerMemoryLimit(Demuxer::DemuxerTypes demuxer_type) {
+ return GetDemuxerStreamAudioMemoryLimit(nullptr) +
+ GetDemuxerStreamVideoMemoryLimit(demuxer_type, nullptr);
+}
+
+} // namespace media
diff --git a/chromium/media/base/demuxer_memory_limit_cast_unittest.cc b/chromium/media/base/demuxer_memory_limit_cast_unittest.cc
new file mode 100644
index 00000000000..41b68a20aed
--- /dev/null
+++ b/chromium/media/base/demuxer_memory_limit_cast_unittest.cc
@@ -0,0 +1,106 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/optional.h"
+#include "media/base/audio_decoder_config.h"
+#include "media/base/demuxer.h"
+#include "media/base/demuxer_memory_limit.h"
+#include "media/base/media_util.h"
+#include "media/base/video_decoder_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+static const gfx::Size kCodedSize(320, 240);
+static const gfx::Rect kVisibleRect(320, 240);
+static const gfx::Size kNaturalSize(320, 240);
+
+TEST(DemuxerMemoryLimitCastTest, GetDemuxerStreamAudioMemoryLimit) {
+ EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(nullptr),
+ internal::kDemuxerStreamAudioMemoryLimitLow);
+
+ AudioDecoderConfig audio_config_opus(
+ AudioCodec::kCodecOpus, SampleFormat::kSampleFormatS16,
+ ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_opus),
+ internal::kDemuxerStreamAudioMemoryLimitLow);
+
+ AudioDecoderConfig audio_config_ac3(
+ AudioCodec::kCodecAC3, SampleFormat::kSampleFormatS16,
+ ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_ac3),
+ internal::kDemuxerStreamAudioMemoryLimitMedium);
+
+ AudioDecoderConfig audio_config_aac_1(
+ AudioCodec::kCodecAAC, SampleFormat::kSampleFormatS16,
+ ChannelLayout::CHANNEL_LAYOUT_5_0, 5000 /* samples_per_second */,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_aac_1),
+ internal::kDemuxerStreamAudioMemoryLimitMedium);
+
+ AudioDecoderConfig audio_config_aac_2(
+ AudioCodec::kCodecAAC, SampleFormat::kSampleFormatS16,
+ ChannelLayout::CHANNEL_LAYOUT_STEREO, 5000 /* samples_per_second */,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ EXPECT_EQ(GetDemuxerStreamAudioMemoryLimit(&audio_config_aac_2),
+ internal::kDemuxerStreamAudioMemoryLimitLow);
+}
+
+TEST(DemuxerMemoryLimitCastTest, GetDemuxerStreamVideoMemoryLimit) {
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kFFmpegDemuxer, nullptr),
+ internal::kDemuxerStreamVideoMemoryLimitDefault);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kChunkDemuxer, nullptr),
+ internal::kDemuxerStreamVideoMemoryLimitLow);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kMediaUrlDemuxer, nullptr),
+ internal::kDemuxerStreamVideoMemoryLimitLow);
+
+ VideoDecoderConfig video_config(
+ kCodecVP8, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoDecoderConfig::AlphaMode::kIsOpaque, VideoColorSpace(),
+ kNoTransformation, kCodedSize, kVisibleRect, kNaturalSize,
+ EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kFFmpegDemuxer, &video_config),
+ internal::kDemuxerStreamVideoMemoryLimitDefault);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kChunkDemuxer, &video_config),
+ internal::kDemuxerStreamVideoMemoryLimitLow);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kMediaUrlDemuxer, &video_config),
+ internal::kDemuxerStreamVideoMemoryLimitLow);
+
+ video_config.Initialize(kCodecVP9, VIDEO_CODEC_PROFILE_UNKNOWN,
+ VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize,
+ kVisibleRect, kNaturalSize, EmptyExtraData(),
+ EncryptionScheme::kUnencrypted);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kFFmpegDemuxer, &video_config),
+ internal::kDemuxerStreamVideoMemoryLimitDefault);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kChunkDemuxer, &video_config),
+ internal::kDemuxerStreamVideoMemoryLimitMedium);
+ EXPECT_EQ(GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes::kMediaUrlDemuxer, &video_config),
+ internal::kDemuxerStreamVideoMemoryLimitLow);
+}
+
+TEST(DemuxerMemoryLimitCastTest, GetDemuxerMemoryLimit) {
+ EXPECT_EQ(GetDemuxerMemoryLimit(Demuxer::DemuxerTypes::kFFmpegDemuxer),
+ internal::kDemuxerStreamAudioMemoryLimitLow +
+ internal::kDemuxerStreamVideoMemoryLimitDefault);
+ EXPECT_EQ(GetDemuxerMemoryLimit(Demuxer::DemuxerTypes::kChunkDemuxer),
+ internal::kDemuxerStreamAudioMemoryLimitLow +
+ internal::kDemuxerStreamVideoMemoryLimitLow);
+ EXPECT_EQ(GetDemuxerMemoryLimit(Demuxer::DemuxerTypes::kMediaUrlDemuxer),
+ internal::kDemuxerStreamAudioMemoryLimitLow +
+ internal::kDemuxerStreamVideoMemoryLimitLow);
+}
+
+} // namespace media
diff --git a/chromium/media/base/demuxer_memory_limit_default.cc b/chromium/media/base/demuxer_memory_limit_default.cc
index 4fb4551e60b..896d534098a 100644
--- a/chromium/media/base/demuxer_memory_limit_default.cc
+++ b/chromium/media/base/demuxer_memory_limit_default.cc
@@ -6,17 +6,20 @@
namespace media {
-size_t GetDemuxerStreamAudioMemoryLimit() {
+size_t GetDemuxerStreamAudioMemoryLimit(
+ const AudioDecoderConfig* /*audio_config*/) {
return internal::kDemuxerStreamAudioMemoryLimitDefault;
}
-size_t GetDemuxerStreamVideoMemoryLimit() {
+size_t GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes /*demuxer_type*/,
+ const VideoDecoderConfig* /*video_config*/) {
return internal::kDemuxerStreamVideoMemoryLimitDefault;
}
-size_t GetDemuxerMemoryLimit() {
- return GetDemuxerStreamAudioMemoryLimit() +
- GetDemuxerStreamVideoMemoryLimit();
+size_t GetDemuxerMemoryLimit(Demuxer::DemuxerTypes demuxer_type) {
+ return GetDemuxerStreamAudioMemoryLimit(nullptr) +
+ GetDemuxerStreamVideoMemoryLimit(demuxer_type, nullptr);
}
} // namespace media
diff --git a/chromium/media/base/demuxer_memory_limit_low.cc b/chromium/media/base/demuxer_memory_limit_low.cc
index 054fafe2081..71dc442d42a 100644
--- a/chromium/media/base/demuxer_memory_limit_low.cc
+++ b/chromium/media/base/demuxer_memory_limit_low.cc
@@ -8,17 +8,20 @@
namespace media {
-size_t GetDemuxerStreamAudioMemoryLimit() {
+size_t GetDemuxerStreamAudioMemoryLimit(
+ const AudioDecoderConfig* /*audio_config*/) {
return internal::kDemuxerStreamAudioMemoryLimitLow;
}
-size_t GetDemuxerStreamVideoMemoryLimit() {
+size_t GetDemuxerStreamVideoMemoryLimit(
+ Demuxer::DemuxerTypes /*demuxer_type*/,
+ const VideoDecoderConfig* /*video_config*/) {
return internal::kDemuxerStreamVideoMemoryLimitLow;
}
-size_t GetDemuxerMemoryLimit() {
- return GetDemuxerStreamAudioMemoryLimit() +
- GetDemuxerStreamVideoMemoryLimit();
+size_t GetDemuxerMemoryLimit(Demuxer::DemuxerTypes demuxer_type) {
+ return GetDemuxerStreamAudioMemoryLimit(nullptr) +
+ GetDemuxerStreamVideoMemoryLimit(demuxer_type, nullptr);
}
} // namespace media
diff --git a/chromium/media/base/fake_audio_worker.cc b/chromium/media/base/fake_audio_worker.cc
index bcaf73ebef5..2d0bbefd537 100644
--- a/chromium/media/base/fake_audio_worker.cc
+++ b/chromium/media/base/fake_audio_worker.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/cancelable_callback.h"
#include "base/check_op.h"
#include "base/location.h"
diff --git a/chromium/media/base/fake_single_thread_task_runner.h b/chromium/media/base/fake_single_thread_task_runner.h
index a56ac6f2b1f..68c8e45f62c 100644
--- a/chromium/media/base/fake_single_thread_task_runner.h
+++ b/chromium/media/base/fake_single_thread_task_runner.h
@@ -14,7 +14,7 @@
namespace media {
-class FakeSingleThreadTaskRunner : public base::SingleThreadTaskRunner {
+class FakeSingleThreadTaskRunner final : public base::SingleThreadTaskRunner {
public:
explicit FakeSingleThreadTaskRunner(base::SimpleTestTickClock* clock);
diff --git a/chromium/media/base/fallback_video_decoder.cc b/chromium/media/base/fallback_video_decoder.cc
index a4796c2ef4e..a10ac08bec8 100644
--- a/chromium/media/base/fallback_video_decoder.cc
+++ b/chromium/media/base/fallback_video_decoder.cc
@@ -5,7 +5,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "media/base/decoder_buffer.h"
#include "media/base/fallback_video_decoder.h"
diff --git a/chromium/media/base/fallback_video_decoder_unittest.cc b/chromium/media/base/fallback_video_decoder_unittest.cc
index ecf80f36b23..e996eb5b50c 100644
--- a/chromium/media/base/fallback_video_decoder_unittest.cc
+++ b/chromium/media/base/fallback_video_decoder_unittest.cc
@@ -5,7 +5,7 @@
#include <tuple>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/base/ipc/media_param_traits.cc b/chromium/media/base/ipc/media_param_traits.cc
index 46c977a708f..271754aff43 100644
--- a/chromium/media/base/ipc/media_param_traits.cc
+++ b/chromium/media/base/ipc/media_param_traits.cc
@@ -13,7 +13,6 @@
#include "media/base/limits.h"
#include "ui/gfx/ipc/geometry/gfx_param_traits.h"
#include "ui/gfx/ipc/gfx_param_traits.h"
-#include "ui/gfx/ipc/skia/gfx_skia_param_traits.h"
using media::AudioParameters;
using media::AudioLatency;
diff --git a/chromium/media/base/ipc/media_param_traits_macros.h b/chromium/media/base/ipc/media_param_traits_macros.h
index 9379933883f..ecebc25c85b 100644
--- a/chromium/media/base/ipc/media_param_traits_macros.h
+++ b/chromium/media/base/ipc/media_param_traits_macros.h
@@ -38,8 +38,8 @@
#include "media/base/watch_time_keys.h"
#include "media/media_buildflags.h"
#include "media/video/supported_video_decoder_config.h"
+#include "ui/gfx/hdr_metadata.h"
#include "ui/gfx/ipc/color/gfx_param_traits_macros.h"
-#include "ui/gl/hdr_metadata.h"
#if BUILDFLAG(ENABLE_MEDIA_DRM_STORAGE)
#include "media/base/media_drm_key_type.h"
@@ -183,7 +183,7 @@ IPC_STRUCT_TRAITS_BEGIN(media::VideoColorSpace)
IPC_STRUCT_TRAITS_MEMBER(range)
IPC_STRUCT_TRAITS_END()
-IPC_STRUCT_TRAITS_BEGIN(gl::MasteringMetadata)
+IPC_STRUCT_TRAITS_BEGIN(gfx::MasteringMetadata)
IPC_STRUCT_TRAITS_MEMBER(primary_r)
IPC_STRUCT_TRAITS_MEMBER(primary_g)
IPC_STRUCT_TRAITS_MEMBER(primary_b)
@@ -192,7 +192,7 @@ IPC_STRUCT_TRAITS_BEGIN(gl::MasteringMetadata)
IPC_STRUCT_TRAITS_MEMBER(luminance_min)
IPC_STRUCT_TRAITS_END()
-IPC_STRUCT_TRAITS_BEGIN(gl::HDRMetadata)
+IPC_STRUCT_TRAITS_BEGIN(gfx::HDRMetadata)
IPC_STRUCT_TRAITS_MEMBER(mastering_metadata)
IPC_STRUCT_TRAITS_MEMBER(max_content_light_level)
IPC_STRUCT_TRAITS_MEMBER(max_frame_average_light_level)
diff --git a/chromium/media/base/keyboard_event_counter.cc b/chromium/media/base/keyboard_event_counter.cc
deleted file mode 100644
index 6c17f676e8c..00000000000
--- a/chromium/media/base/keyboard_event_counter.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/keyboard_event_counter.h"
-
-#include "base/check_op.h"
-
-namespace media {
-
-KeyboardEventCounter::KeyboardEventCounter() : total_key_presses_(0) {}
-
-KeyboardEventCounter::~KeyboardEventCounter() = default;
-
-void KeyboardEventCounter::OnKeyboardEvent(ui::EventType event,
- ui::KeyboardCode key_code) {
- // Updates the pressed keys and the total count of key presses.
- if (event == ui::ET_KEY_PRESSED) {
- if (pressed_keys_.find(key_code) != pressed_keys_.end())
- return;
- pressed_keys_.insert(key_code);
- ++total_key_presses_;
- } else {
- DCHECK_EQ(ui::ET_KEY_RELEASED, event);
- pressed_keys_.erase(key_code);
- }
-}
-
-uint32_t KeyboardEventCounter::GetKeyPressCount() const {
- return total_key_presses_.load();
-}
-
-} // namespace media
diff --git a/chromium/media/base/keyboard_event_counter.h b/chromium/media/base/keyboard_event_counter.h
deleted file mode 100644
index d9f4e56d24f..00000000000
--- a/chromium/media/base/keyboard_event_counter.h
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright 2013 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_BASE_KEYBOARD_EVENT_COUNTER_H_
-#define MEDIA_BASE_KEYBOARD_EVENT_COUNTER_H_
-
-#include <stddef.h>
-
-#include <atomic>
-#include <set>
-
-#include "base/macros.h"
-#include "media/base/media_export.h"
-#include "ui/events/keycodes/keyboard_codes.h"
-#include "ui/events/types/event_type.h"
-
-namespace media {
-
-// This class tracks the total number of keypresses based on the OnKeyboardEvent
-// calls it receives from the client.
-// Multiple key down events for the same key are counted as one keypress until
-// the same key is released.
-class MEDIA_EXPORT KeyboardEventCounter {
- public:
- KeyboardEventCounter();
- ~KeyboardEventCounter();
-
- // Returns the total number of keypresses since its creation or last Reset()
- // call. Can be called on any thread.
- uint32_t GetKeyPressCount() const;
-
- // The client should call this method on key down or key up events.
- // Must be called on a single thread.
- void OnKeyboardEvent(ui::EventType event, ui::KeyboardCode key_code);
-
- private:
- // The set of keys currently held down.
- std::set<ui::KeyboardCode> pressed_keys_;
-
- std::atomic<uint32_t> total_key_presses_;
-
- DISALLOW_COPY_AND_ASSIGN(KeyboardEventCounter);
-};
-
-} // namespace media
-
-#endif // MEDIA_BASE_KEYBOARD_EVENT_COUNTER_H_
diff --git a/chromium/media/base/keyboard_event_counter_unittest.cc b/chromium/media/base/keyboard_event_counter_unittest.cc
deleted file mode 100644
index 7d104dc76c8..00000000000
--- a/chromium/media/base/keyboard_event_counter_unittest.cc
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/keyboard_event_counter.h"
-
-#include <memory>
-
-#include "base/run_loop.h"
-#include "build/build_config.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "third_party/skia/include/core/SkPoint.h"
-
-namespace media {
-
-TEST(KeyboardEventCounterTest, KeyPressCounter) {
- KeyboardEventCounter counter;
-
- EXPECT_EQ(0u, counter.GetKeyPressCount());
-
- counter.OnKeyboardEvent(ui::ET_KEY_PRESSED, ui::VKEY_0);
- EXPECT_EQ(1u, counter.GetKeyPressCount());
-
- // Holding the same key without releasing it does not increase the count.
- counter.OnKeyboardEvent(ui::ET_KEY_PRESSED, ui::VKEY_0);
- EXPECT_EQ(1u, counter.GetKeyPressCount());
-
- // Releasing the key does not affect the total count.
- counter.OnKeyboardEvent(ui::ET_KEY_RELEASED, ui::VKEY_0);
- EXPECT_EQ(1u, counter.GetKeyPressCount());
-
- counter.OnKeyboardEvent(ui::ET_KEY_PRESSED, ui::VKEY_0);
- counter.OnKeyboardEvent(ui::ET_KEY_RELEASED, ui::VKEY_0);
- EXPECT_EQ(2u, counter.GetKeyPressCount());
-}
-
-} // namespace media
diff --git a/chromium/media/base/logging_override_if_enabled.h b/chromium/media/base/logging_override_if_enabled.h
index 302a5f0ea20..ee274e79f9f 100644
--- a/chromium/media/base/logging_override_if_enabled.h
+++ b/chromium/media/base/logging_override_if_enabled.h
@@ -15,8 +15,17 @@
#if !defined(DVLOG)
#error This file must be included after base/logging.h.
#endif
+
+#define __DVLOG_0 LOG(INFO)
+#define __DVLOG_1 LOG(INFO)
+#define __DVLOG_2 LOG(INFO)
+#define __DVLOG_3 EAT_STREAM_PARAMETERS
+#define __DVLOG_4 EAT_STREAM_PARAMETERS
+#define __DVLOG_5 EAT_STREAM_PARAMETERS
+
#undef DVLOG
-#define DVLOG(verboselevel) LOG(INFO)
+#define DVLOG(verboselevel) __DVLOG_##verboselevel
+
#endif // BUILDFLAG(ENABLE_LOGGING_OVERRIDE)
#endif // MEDIA_BASE_LOGGING_OVERRIDE_IF_ENABLED_H_
diff --git a/chromium/media/base/mac/BUILD.gn b/chromium/media/base/mac/BUILD.gn
index 7b16b9833cd..9dd95bbc171 100644
--- a/chromium/media/base/mac/BUILD.gn
+++ b/chromium/media/base/mac/BUILD.gn
@@ -2,13 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-# This file depends on the legacy global sources assignment filter. It should
-# be converted to check target platform before assigning source files to the
-# sources variable. Remove this import and set_sources_assignment_filter call
-# when the file has been converted. See https://crbug.com/1018739 for details.
-import("//build/config/deprecated_default_sources_assignment_filter.gni")
-set_sources_assignment_filter(deprecated_default_sources_assignment_filter)
-
assert(is_apple)
source_set("mac") {
@@ -18,8 +11,9 @@ source_set("mac") {
# run-time.
visibility = [ "//media" ]
- set_sources_assignment_filter([])
sources = [
+ "color_space_util_mac.h",
+ "color_space_util_mac.mm",
"video_frame_mac.cc",
"video_frame_mac.h",
"videotoolbox_helpers.cc",
@@ -33,7 +27,6 @@ source_set("mac") {
"VideoToolbox.framework",
]
}
- set_sources_assignment_filter(deprecated_default_sources_assignment_filter)
configs += [ "//media:subcomponent_config" ]
deps = [
diff --git a/chromium/media/base/mac/color_space_util_mac.h b/chromium/media/base/mac/color_space_util_mac.h
new file mode 100644
index 00000000000..938b7fd46ab
--- /dev/null
+++ b/chromium/media/base/mac/color_space_util_mac.h
@@ -0,0 +1,24 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_MAC_COLOR_SPACE_UTIL_MAC_H_
+#define MEDIA_BASE_MAC_COLOR_SPACE_UTIL_MAC_H_
+
+#include <CoreMedia/CoreMedia.h>
+#include <CoreVideo/CoreVideo.h>
+
+#include "media/base/media_export.h"
+#include "ui/gfx/color_space.h"
+
+namespace media {
+
+MEDIA_EXPORT gfx::ColorSpace GetImageBufferColorSpace(
+ CVImageBufferRef image_buffer);
+
+MEDIA_EXPORT gfx::ColorSpace GetFormatDescriptionColorSpace(
+ CMFormatDescriptionRef format_description) API_AVAILABLE(macos(10.11));
+
+} // namespace media
+
+#endif // MEDIA_BASE_MAC_COLOR_SPACE_UTIL_MAC_H_
diff --git a/chromium/media/base/mac/color_space_util_mac.mm b/chromium/media/base/mac/color_space_util_mac.mm
new file mode 100644
index 00000000000..8e76a7ef56d
--- /dev/null
+++ b/chromium/media/base/mac/color_space_util_mac.mm
@@ -0,0 +1,285 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/mac/color_space_util_mac.h"
+
+#include <vector>
+
+#include "base/mac/foundation_util.h"
+#include "base/no_destructor.h"
+
+namespace media {
+
+namespace {
+
+// Read the value for the key in |key| to CFString and convert it to IdType.
+// Use the list of pairs in |cfstr_id_pairs| to do the conversion (by doing a
+// linear lookup).
+template <typename IdType, typename StringIdPair>
+bool GetImageBufferProperty(CFTypeRef value_untyped,
+ const std::vector<StringIdPair>& cfstr_id_pairs,
+ IdType* value_as_id) {
+ CFStringRef value_as_string = base::mac::CFCast<CFStringRef>(value_untyped);
+ if (!value_as_string)
+ return false;
+
+ for (const auto& p : cfstr_id_pairs) {
+ if (p.cfstr_cm)
+ DCHECK(!CFStringCompare(p.cfstr_cv, p.cfstr_cm, 0));
+ if (!CFStringCompare(value_as_string, p.cfstr_cv, 0)) {
+ *value_as_id = p.id;
+ return true;
+ }
+ }
+
+ return false;
+}
+
+gfx::ColorSpace::PrimaryID GetCoreVideoPrimary(CFTypeRef primaries_untyped) {
+ struct CVImagePrimary {
+ const CFStringRef cfstr_cv;
+ const CFStringRef cfstr_cm;
+ const gfx::ColorSpace::PrimaryID id;
+ };
+ static const base::NoDestructor<std::vector<CVImagePrimary>>
+ kSupportedPrimaries([] {
+ std::vector<CVImagePrimary> supported_primaries;
+ supported_primaries.push_back(
+ {kCVImageBufferColorPrimaries_ITU_R_709_2,
+ kCMFormatDescriptionColorPrimaries_ITU_R_709_2,
+ gfx::ColorSpace::PrimaryID::BT709});
+ supported_primaries.push_back(
+ {kCVImageBufferColorPrimaries_EBU_3213,
+ kCMFormatDescriptionColorPrimaries_EBU_3213,
+ gfx::ColorSpace::PrimaryID::BT470BG});
+ supported_primaries.push_back(
+ {kCVImageBufferColorPrimaries_SMPTE_C,
+ kCMFormatDescriptionColorPrimaries_SMPTE_C,
+
+ gfx::ColorSpace::PrimaryID::SMPTE240M});
+ if (@available(macos 10.11, *)) {
+ supported_primaries.push_back(
+ {kCVImageBufferColorPrimaries_ITU_R_2020,
+ kCMFormatDescriptionColorPrimaries_ITU_R_2020,
+ gfx::ColorSpace::PrimaryID::BT2020});
+ }
+ return supported_primaries;
+ }());
+
+ // The named primaries. Default to BT709.
+ auto primary_id = gfx::ColorSpace::PrimaryID::BT709;
+ if (!GetImageBufferProperty(primaries_untyped, *kSupportedPrimaries,
+ &primary_id)) {
+ DLOG(ERROR) << "Failed to find CVImageBufferRef primaries.";
+ }
+ return primary_id;
+}
+
+gfx::ColorSpace::TransferID GetCoreVideoTransferFn(CFTypeRef transfer_untyped,
+ CFTypeRef gamma_untyped,
+ double* gamma) {
+ struct CVImageTransferFn {
+ const CFStringRef cfstr_cv;
+ const CFStringRef cfstr_cm;
+ const gfx::ColorSpace::TransferID id;
+ };
+ static const base::NoDestructor<std::vector<CVImageTransferFn>>
+ kSupportedTransferFuncs([] {
+ std::vector<CVImageTransferFn> supported_transfer_funcs;
+ // The constants kCMFormatDescriptionTransferFunction_ITU_R_709_2,
+ // SMPTE_240M_1995, and UseGamma will compile against macOS 10.10
+ // because they are #defined to their kCVImageBufferTransferFunction
+ // equivalents. They are technically not present until macOS 10.11.
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_ITU_R_709_2,
+ kCMFormatDescriptionTransferFunction_ITU_R_709_2,
+ gfx::ColorSpace::TransferID::BT709_APPLE});
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_SMPTE_240M_1995,
+ kCMFormatDescriptionTransferFunction_SMPTE_240M_1995,
+ gfx::ColorSpace::TransferID::SMPTE240M});
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_UseGamma,
+ kCMFormatDescriptionTransferFunction_UseGamma,
+ gfx::ColorSpace::TransferID::CUSTOM});
+ if (@available(macos 10.11, *)) {
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_ITU_R_2020,
+ kCMFormatDescriptionTransferFunction_ITU_R_2020,
+ gfx::ColorSpace::TransferID::BT2020_10});
+ }
+ if (@available(macos 10.12, *)) {
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_SMPTE_ST_428_1,
+ kCMFormatDescriptionTransferFunction_SMPTE_ST_428_1,
+ gfx::ColorSpace::TransferID::SMPTEST428_1});
+ }
+ if (@available(macos 10.13, *)) {
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ,
+ kCMFormatDescriptionTransferFunction_SMPTE_ST_2084_PQ,
+ gfx::ColorSpace::TransferID::SMPTEST2084});
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_ITU_R_2100_HLG,
+ kCMFormatDescriptionTransferFunction_ITU_R_2100_HLG,
+ gfx::ColorSpace::TransferID::ARIB_STD_B67});
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_sRGB, nullptr,
+ gfx::ColorSpace::TransferID::IEC61966_2_1});
+ }
+ if (@available(macos 10.14, *)) {
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_Linear,
+ kCMFormatDescriptionTransferFunction_Linear,
+ gfx::ColorSpace::TransferID::LINEAR});
+ }
+ if (@available(macos 10.15, *)) {
+ supported_transfer_funcs.push_back(
+ {kCVImageBufferTransferFunction_sRGB,
+ kCMFormatDescriptionTransferFunction_sRGB,
+ gfx::ColorSpace::TransferID::IEC61966_2_1});
+ }
+
+ return supported_transfer_funcs;
+ }());
+
+ // The named transfer function.
+ auto transfer_id = gfx::ColorSpace::TransferID::BT709;
+ if (!GetImageBufferProperty(transfer_untyped, *kSupportedTransferFuncs,
+ &transfer_id)) {
+ DLOG(ERROR) << "Failed to find CVImageBufferRef transfer.";
+ }
+
+ if (transfer_id != gfx::ColorSpace::TransferID::CUSTOM)
+ return transfer_id;
+
+ // If we fail to retrieve the gamma parameter, fall back to BT709.
+ constexpr auto kDefaultTransferFn = gfx::ColorSpace::TransferID::BT709;
+ CFNumberRef gamma_number = base::mac::CFCast<CFNumberRef>(gamma_untyped);
+ if (!gamma_number) {
+ DLOG(ERROR) << "Failed to get gamma level.";
+ return kDefaultTransferFn;
+ }
+
+ // CGFloat is a double on 64-bit systems.
+ CGFloat gamma_double = 0;
+ if (!CFNumberGetValue(gamma_number, kCFNumberCGFloatType, &gamma_double)) {
+ DLOG(ERROR) << "Failed to get CVImageBufferRef gamma level as float.";
+ return kDefaultTransferFn;
+ }
+
+ if (gamma_double == 2.2)
+ return gfx::ColorSpace::TransferID::GAMMA22;
+ if (gamma_double == 2.8)
+ return gfx::ColorSpace::TransferID::GAMMA28;
+
+ *gamma = gamma_double;
+ return transfer_id;
+}
+
+gfx::ColorSpace::MatrixID GetCoreVideoMatrix(CFTypeRef matrix_untyped) {
+ struct CVImageMatrix {
+ const CFStringRef cfstr_cv;
+ const CFStringRef cfstr_cm;
+ gfx::ColorSpace::MatrixID id;
+ };
+ static const base::NoDestructor<std::vector<CVImageMatrix>>
+ kSupportedMatrices([] {
+ std::vector<CVImageMatrix> supported_matrices;
+ supported_matrices.push_back(
+ {kCVImageBufferYCbCrMatrix_ITU_R_709_2,
+ kCMFormatDescriptionYCbCrMatrix_ITU_R_709_2,
+ gfx::ColorSpace::MatrixID::BT709});
+ supported_matrices.push_back(
+ {kCVImageBufferYCbCrMatrix_ITU_R_601_4,
+ kCMFormatDescriptionYCbCrMatrix_ITU_R_601_4,
+ gfx::ColorSpace::MatrixID::SMPTE170M});
+ supported_matrices.push_back(
+ {kCVImageBufferYCbCrMatrix_SMPTE_240M_1995,
+ kCMFormatDescriptionYCbCrMatrix_SMPTE_240M_1995,
+ gfx::ColorSpace::MatrixID::SMPTE240M});
+ if (@available(macos 10.11, *)) {
+ supported_matrices.push_back(
+ {kCVImageBufferYCbCrMatrix_ITU_R_2020,
+ kCMFormatDescriptionYCbCrMatrix_ITU_R_2020,
+ gfx::ColorSpace::MatrixID::BT2020_NCL});
+ }
+ return supported_matrices;
+ }());
+
+ auto matrix_id = gfx::ColorSpace::MatrixID::INVALID;
+ if (!GetImageBufferProperty(matrix_untyped, *kSupportedMatrices,
+ &matrix_id)) {
+ DLOG(ERROR) << "Failed to find CVImageBufferRef YUV matrix.";
+ }
+ return matrix_id;
+}
+
+gfx::ColorSpace GetCoreVideoColorSpaceInternal(CFTypeRef primaries_untyped,
+ CFTypeRef transfer_untyped,
+ CFTypeRef gamma_untyped,
+ CFTypeRef matrix_untyped) {
+ double gamma;
+ auto primary_id = GetCoreVideoPrimary(primaries_untyped);
+ auto matrix_id = GetCoreVideoMatrix(matrix_untyped);
+ auto transfer_id =
+ GetCoreVideoTransferFn(transfer_untyped, gamma_untyped, &gamma);
+
+ // Use a matrix id that is coherent with a primary id. Useful when we fail to
+ // parse the matrix. Previously it was always defaulting to MatrixID::BT709
+ // See http://crbug.com/788236.
+ if (matrix_id == gfx::ColorSpace::MatrixID::INVALID) {
+ if (primary_id == gfx::ColorSpace::PrimaryID::BT470BG)
+ matrix_id = gfx::ColorSpace::MatrixID::BT470BG;
+ else
+ matrix_id = gfx::ColorSpace::MatrixID::BT709;
+ }
+
+ // It is specified to the decoder to use luma=[16,235] chroma=[16,240] via
+ // the kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange.
+ //
+ // TODO(crbug.com/1103432): We'll probably need support for more than limited
+ // range content if we want this to be used for more than video sites.
+ auto range_id = gfx::ColorSpace::RangeID::LIMITED;
+
+ if (transfer_id == gfx::ColorSpace::TransferID::CUSTOM) {
+ // Transfer functions can also be specified as a gamma value.
+ skcms_TransferFunction custom_tr_fn = {2.2f, 1, 0, 1, 0, 0, 0};
+ if (transfer_id == gfx::ColorSpace::TransferID::CUSTOM)
+ custom_tr_fn.g = gamma;
+
+ return gfx::ColorSpace(primary_id, gfx::ColorSpace::TransferID::CUSTOM,
+ matrix_id, range_id, nullptr, &custom_tr_fn);
+ }
+
+ return gfx::ColorSpace(primary_id, transfer_id, matrix_id, range_id);
+}
+
+} // anonymous namespace
+
+gfx::ColorSpace GetImageBufferColorSpace(CVImageBufferRef image_buffer) {
+ return GetCoreVideoColorSpaceInternal(
+ CVBufferGetAttachment(image_buffer, kCVImageBufferColorPrimariesKey,
+ nullptr),
+ CVBufferGetAttachment(image_buffer, kCVImageBufferTransferFunctionKey,
+ nullptr),
+ CVBufferGetAttachment(image_buffer, kCVImageBufferGammaLevelKey, nullptr),
+ CVBufferGetAttachment(image_buffer, kCVImageBufferYCbCrMatrixKey,
+ nullptr));
+}
+
+gfx::ColorSpace GetFormatDescriptionColorSpace(
+ CMFormatDescriptionRef format_description) {
+ return GetCoreVideoColorSpaceInternal(
+ CMFormatDescriptionGetExtension(
+ format_description, kCMFormatDescriptionExtension_ColorPrimaries),
+ CMFormatDescriptionGetExtension(
+ format_description, kCMFormatDescriptionExtension_TransferFunction),
+ CMFormatDescriptionGetExtension(format_description,
+ kCMFormatDescriptionExtension_GammaLevel),
+ CMFormatDescriptionGetExtension(
+ format_description, kCMFormatDescriptionExtension_YCbCrMatrix));
+}
+
+} // namespace media
diff --git a/chromium/media/base/mac/video_frame_mac.cc b/chromium/media/base/mac/video_frame_mac.cc
index 458f0274bdb..ced632c600e 100644
--- a/chromium/media/base/mac/video_frame_mac.cc
+++ b/chromium/media/base/mac/video_frame_mac.cc
@@ -48,8 +48,7 @@ WrapVideoFrameInCVPixelBuffer(const VideoFrame& frame) {
gfx::GpuMemoryBufferHandle handle =
frame.GetGpuMemoryBuffer()->CloneHandle();
if (handle.type == gfx::GpuMemoryBufferType::IO_SURFACE_BUFFER) {
- base::ScopedCFTypeRef<IOSurfaceRef> io_surface =
- gfx::IOSurfaceMachPortToIOSurface(std::move(handle.mach_port));
+ gfx::ScopedIOSurface io_surface = handle.io_surface;
if (io_surface) {
const CVReturn cv_return = CVPixelBufferCreateWithIOSurface(
nullptr, io_surface, nullptr, pixel_buffer.InitializeInto());
diff --git a/chromium/media/base/media_log.h b/chromium/media/base/media_log.h
index 48879f26e69..dc3ea25be66 100644
--- a/chromium/media/base/media_log.h
+++ b/chromium/media/base/media_log.h
@@ -17,6 +17,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/thread_annotations.h"
+#include "build/build_config.h"
#include "media/base/buffering_state.h"
#include "media/base/media_export.h"
#include "media/base/media_log_events.h"
@@ -43,6 +44,15 @@ class MEDIA_EXPORT MediaLog {
static const char kEventKey[];
static const char kStatusText[];
+// Maximum limit for the total number of logs kept per renderer. At the time of
+// writing, 512 events of the kind: { "property": value } together consume ~88kb
+// of memory on linux.
+#if defined(OS_ANDROID)
+ static constexpr size_t kLogLimit = 128;
+#else
+ static constexpr size_t kLogLimit = 512;
+#endif
+
// Constructor is protected, see below.
virtual ~MediaLog();
diff --git a/chromium/media/base/media_serializers.h b/chromium/media/base/media_serializers.h
index 1a2a6bd5856..42f89c07122 100644
--- a/chromium/media/base/media_serializers.h
+++ b/chromium/media/base/media_serializers.h
@@ -18,7 +18,7 @@
#include "media/base/text_track_config.h"
#include "media/base/video_decoder_config.h"
#include "ui/gfx/geometry/size.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
@@ -212,8 +212,8 @@ struct MediaSerializer<VideoColorSpace> {
// Class (complex)
template <>
-struct MediaSerializer<gl::HDRMetadata> {
- static base::Value Serialize(const gl::HDRMetadata& value) {
+struct MediaSerializer<gfx::HDRMetadata> {
+ static base::Value Serialize(const gfx::HDRMetadata& value) {
// TODO(tmathmeyer) serialize more fields here potentially.
base::Value result(base::Value::Type::DICTIONARY);
FIELD_SERIALIZE("luminance range",
diff --git a/chromium/media/base/media_switches.cc b/chromium/media/base/media_switches.cc
index 0c20d23e273..07419d183fc 100644
--- a/chromium/media/base/media_switches.cc
+++ b/chromium/media/base/media_switches.cc
@@ -6,6 +6,7 @@
#include "base/command_line.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "components/system_media_controls/linux/buildflags/buildflags.h"
namespace switches {
@@ -191,14 +192,6 @@ const char kOverrideHardwareSecureCodecsForTesting[] =
const char kEnableLiveCaptionPrefForTesting[] =
"enable-live-caption-pref-for-testing";
-#if defined(OS_CHROMEOS)
-// ChromeOS uses one of two VideoDecoder implementations based on SoC/board
-// specific configurations that are signalled via this command line flag.
-// TODO(b/159825227): remove when the "old" video decoder is fully launched.
-const char kPlatformDisallowsChromeOSDirectVideoDecoder[] =
- "platform-disallows-chromeos-direct-video-decoder";
-#endif
-
namespace autoplay {
// Autoplay policy that requires a document user activation.
@@ -326,11 +319,6 @@ const base::Feature kD3D11VideoDecoderIgnoreWorkarounds{
const base::Feature kD3D11VideoDecoderVP9Profile2{
"D3D11VideoDecoderEnableVP9Profile2", base::FEATURE_DISABLED_BY_DEFAULT};
-// Enable D3D11VideoDecoder to copy pictures based on workarounds, rather
-// than binding them.
-const base::Feature kD3D11VideoDecoderCopyPictures{
- "D3D11VideoDecoderCopyPictures", base::FEATURE_DISABLED_BY_DEFAULT};
-
// Tell D3D11VideoDecoder not to switch the D3D11 device to multi-threaded mode.
// This is to help us track down IGD crashes.
const base::Feature kD3D11VideoDecoderSkipMultithreaded{
@@ -360,8 +348,8 @@ const base::Feature kGav1VideoDecoder{"Gav1VideoDecoder",
// Show toolbar button that opens dialog for controlling media sessions.
const base::Feature kGlobalMediaControls {
"GlobalMediaControls",
-#if defined(OS_WIN) || defined(OS_MAC) || \
- (defined(OS_LINUX) && !defined(OS_CHROMEOS))
+#if defined(OS_WIN) || defined(OS_MAC) || defined(OS_LINUX) || \
+ BUILDFLAG(IS_LACROS)
base::FEATURE_ENABLED_BY_DEFAULT
#else
base::FEATURE_DISABLED_BY_DEFAULT
@@ -381,6 +369,19 @@ const base::Feature kGlobalMediaControlsForCast{
const base::Feature kGlobalMediaControlsForChromeOS{
"GlobalMediaControlsForChromeOS", base::FEATURE_DISABLED_BY_DEFAULT};
+constexpr base::FeatureParam<kCrosGlobalMediaControlsPinOptions>::Option
+ kCrosGlobalMediaControlsParamOptions[] = {
+ {kCrosGlobalMediaControlsPinOptions::kPin, "default-pinned"},
+ {kCrosGlobalMediaControlsPinOptions::kNotPin, "default-unpinned"},
+ {kCrosGlobalMediaControlsPinOptions::kHeuristic, "heuristic"}};
+
+constexpr base::FeatureParam<kCrosGlobalMediaControlsPinOptions>
+ kCrosGlobalMediaControlsPinParam(
+ &kGlobalMediaControlsForChromeOS,
+ "CrosGlobalMediaControlsPinParam",
+ kCrosGlobalMediaControlsPinOptions::kHeuristic,
+ &kCrosGlobalMediaControlsParamOptions);
+
// Allow global media controls notifications to be dragged out into overlay
// notifications. It is no-op if kGlobalMediaControls is not enabled.
const base::Feature kGlobalMediaControlsOverlayControls{
@@ -389,8 +390,8 @@ const base::Feature kGlobalMediaControlsOverlayControls{
// Show picture-in-picture button in Global Media Controls.
const base::Feature kGlobalMediaControlsPictureInPicture {
"GlobalMediaControlsPictureInPicture",
-#if defined(OS_WIN) || defined(OS_MAC) || \
- (defined(OS_LINUX) && !defined(OS_CHROMEOS))
+#if defined(OS_WIN) || defined(OS_MAC) || defined(OS_LINUX) || \
+ BUILDFLAG(IS_LACROS)
base::FEATURE_ENABLED_BY_DEFAULT
#else
base::FEATURE_DISABLED_BY_DEFAULT
@@ -427,6 +428,10 @@ const base::Feature kUseR16Texture{"use-r16-texture",
const base::Feature kUnifiedAutoplay{"UnifiedAutoplay",
base::FEATURE_ENABLED_BY_DEFAULT};
+// Enable VA-API hardware decode acceleration for AV1.
+const base::Feature kVaapiAV1Decoder{"VaapiAV1Decoder",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
// Enable VA-API hardware low power encoder for all codecs on intel Gen9x gpu.
const base::Feature kVaapiLowPowerEncoderGen9x{
"VaapiLowPowerEncoderGen9x", base::FEATURE_DISABLED_BY_DEFAULT};
@@ -439,11 +444,11 @@ const base::Feature kVaapiVP8Encoder{"VaapiVP8Encoder",
const base::Feature kVaapiVP9Encoder{"VaapiVP9Encoder",
base::FEATURE_ENABLED_BY_DEFAULT};
-#if defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
// Enable VP9 k-SVC decoding with HW decoder for webrtc use case on ChromeOS.
const base::Feature kVp9kSVCHWDecoding{"Vp9kSVCHWDecoding",
base::FEATURE_ENABLED_BY_DEFAULT};
-#endif // defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
// Inform video blitter of video color space.
const base::Feature kVideoBlitColorAccuracy{"video-blit-color-accuracy",
@@ -497,7 +502,7 @@ const base::Feature kWidevineAv1ForceSupportForTesting{
// Enables handling of hardware media keys for controlling media.
const base::Feature kHardwareMediaKeyHandling {
"HardwareMediaKeyHandling",
-#if defined(OS_CHROMEOS) || defined(OS_WIN) || defined(OS_MAC) || \
+#if BUILDFLAG(IS_ASH) || defined(OS_WIN) || defined(OS_MAC) || \
BUILDFLAG(USE_MPRIS)
base::FEATURE_ENABLED_BY_DEFAULT
#else
@@ -603,13 +608,9 @@ const base::Feature kUseAudioLatencyFromHAL{"UseAudioLatencyFromHAL",
// the GPU main thread during VideoFrame construction.
const base::Feature kUsePooledSharedImageVideoProvider{
"UsePooledSharedImageVideoProvider", base::FEATURE_ENABLED_BY_DEFAULT};
-
-// Used to enable/disable zero copy video path on webview for MCVD.
-const base::Feature kWebViewZeroCopyVideo{"WebViewZeroCopyVideo",
- base::FEATURE_DISABLED_BY_DEFAULT};
#endif // defined(OS_ANDROID)
-#if defined(OS_CHROMEOS) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
// Enable the hardware-accelerated direct video decoder instead of the one
// needing the VdaVideoDecoder adapter. This flag is used mainly as a
// chrome:flag for developers debugging issues. TODO(b/159825227): remove when
@@ -624,7 +625,7 @@ const base::Feature kUseChromeOSDirectVideoDecoder{
const base::Feature kUseAlternateVideoDecoderImplementation{
"UseAlternateVideoDecoderImplementation",
base::FEATURE_DISABLED_BY_DEFAULT};
-#endif // defined(OS_CHROMEOS) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#endif // BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#if defined(OS_WIN)
// Does NV12->NV12 video copy on the main thread right before the texture's
@@ -675,7 +676,7 @@ const base::Feature MEDIA_EXPORT kAVFoundationCaptureV2{
// Controls whether or not the V2 capturer exports IOSurfaces for zero-copy.
// This feature only has any effect if kAVFoundationCaptureV2 is also enabled.
const base::Feature MEDIA_EXPORT kAVFoundationCaptureV2ZeroCopy{
- "AVFoundationCaptureV2ZeroCopy", base::FEATURE_DISABLED_BY_DEFAULT};
+ "AVFoundationCaptureV2ZeroCopy", base::FEATURE_ENABLED_BY_DEFAULT};
const base::Feature MEDIA_EXPORT kVideoToolboxVp9Decoding{
"VideoToolboxVp9Decoding", base::FEATURE_DISABLED_BY_DEFAULT};
@@ -740,10 +741,6 @@ const base::Feature kMediaFeedsBackgroundFetching{
const base::Feature kMediaFeedsSafeSearch{"MediaFeedsSafeSearch",
base::FEATURE_ENABLED_BY_DEFAULT};
-// Send events to devtools rather than to chrome://media-internals
-const base::Feature kMediaInspectorLogging{"MediaInspectorLogging",
- base::FEATURE_ENABLED_BY_DEFAULT};
-
// Enables experimental local learning for media. Used in the context of media
// capabilities only. Adds reporting only; does not change media behavior.
const base::Feature kMediaLearningExperiment{"MediaLearningExperiment",
@@ -772,7 +769,7 @@ const base::Feature kMediaPowerExperiment{"MediaPowerExperiment",
// has audio focus enabled.
const base::Feature kAudioFocusDuckFlash {
"AudioFocusDuckFlash",
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
base::FEATURE_ENABLED_BY_DEFAULT
#else
base::FEATURE_DISABLED_BY_DEFAULT
@@ -827,7 +824,7 @@ bool IsVideoCaptureAcceleratedJpegDecodingEnabled() {
switches::kUseFakeMjpegDecodeAccelerator)) {
return true;
}
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return true;
#endif
return false;
diff --git a/chromium/media/base/media_switches.h b/chromium/media/base/media_switches.h
index 0f47b38a949..e08ba0324d0 100644
--- a/chromium/media/base/media_switches.h
+++ b/chromium/media/base/media_switches.h
@@ -11,6 +11,7 @@
#include "base/feature_list.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/media_export.h"
#include "media/media_buildflags.h"
@@ -55,10 +56,6 @@ MEDIA_EXPORT extern const char kDisableAudioInput[];
MEDIA_EXPORT extern const char kUseOverlaysForVideo[];
#endif
-#if defined(OS_CHROMEOS)
-MEDIA_EXPORT extern const char kPlatformDisallowsChromeOSDirectVideoDecoder[];
-#endif
-
#if defined(USE_CRAS)
MEDIA_EXPORT extern const char kUseCras[];
#endif
@@ -117,7 +114,6 @@ MEDIA_EXPORT extern const base::Feature kD3D11PrintCodecOnCrash;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoder;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderIgnoreWorkarounds;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderVP9Profile2;
-MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderCopyPictures;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderSkipMultithreaded;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAlwaysCopy;
MEDIA_EXPORT extern const base::Feature kD3D11VideoDecoderAllowOverlay;
@@ -153,7 +149,6 @@ MEDIA_EXPORT extern const base::Feature kMediaEngagementHTTPSOnly;
MEDIA_EXPORT extern const base::Feature kMediaFeeds;
MEDIA_EXPORT extern const base::Feature kMediaFeedsBackgroundFetching;
MEDIA_EXPORT extern const base::Feature kMediaFeedsSafeSearch;
-MEDIA_EXPORT extern const base::Feature kMediaInspectorLogging;
MEDIA_EXPORT extern const base::Feature kMediaLearningExperiment;
MEDIA_EXPORT extern const base::Feature kMediaLearningFramework;
MEDIA_EXPORT extern const base::Feature kMediaLearningSmoothnessExperiment;
@@ -177,6 +172,7 @@ MEDIA_EXPORT extern const base::Feature kUseFakeDeviceForMediaStream;
MEDIA_EXPORT extern const base::Feature kUseMediaHistoryStore;
MEDIA_EXPORT extern const base::Feature kUseR16Texture;
MEDIA_EXPORT extern const base::Feature kUseSodaForLiveCaption;
+MEDIA_EXPORT extern const base::Feature kVaapiAV1Decoder;
MEDIA_EXPORT extern const base::Feature kVaapiLowPowerEncoderGen9x;
MEDIA_EXPORT extern const base::Feature kVaapiVP8Encoder;
MEDIA_EXPORT extern const base::Feature kVaapiVP9Encoder;
@@ -188,9 +184,9 @@ MEDIA_EXPORT extern const base::Feature kResolutionBasedDecoderPriority;
MEDIA_EXPORT extern const base::Feature kForceHardwareVideoDecoders;
MEDIA_EXPORT extern const base::Feature kForceHardwareAudioDecoders;
-#if defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
MEDIA_EXPORT extern const base::Feature kVp9kSVCHWDecoding;
-#endif // defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
#if defined(OS_ANDROID)
MEDIA_EXPORT extern const base::Feature kAllowNonSecureOverlays;
@@ -205,14 +201,13 @@ MEDIA_EXPORT extern const base::Feature kHlsPlayer;
MEDIA_EXPORT extern const base::Feature kRequestSystemAudioFocus;
MEDIA_EXPORT extern const base::Feature kUseAudioLatencyFromHAL;
MEDIA_EXPORT extern const base::Feature kUsePooledSharedImageVideoProvider;
-MEDIA_EXPORT extern const base::Feature kWebViewZeroCopyVideo;
#endif // defined(OS_ANDROID)
-#if defined(OS_CHROMEOS) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
MEDIA_EXPORT extern const base::Feature kUseChromeOSDirectVideoDecoder;
MEDIA_EXPORT extern const base::Feature kUseAlternateVideoDecoderImplementation;
-#endif // defined(OS_CHROMEOS) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
+#endif // BUILDFLAG(IS_ASH) && BUILDFLAG(USE_CHROMEOS_MEDIA_ACCELERATION)
#if defined(OS_WIN)
MEDIA_EXPORT extern const base::Feature kDelayCopyNV12Textures;
@@ -240,6 +235,17 @@ MEDIA_EXPORT std::string GetEffectiveAutoplayPolicy(
MEDIA_EXPORT bool IsVideoCaptureAcceleratedJpegDecodingEnabled();
+enum class kCrosGlobalMediaControlsPinOptions {
+ kPin,
+ kNotPin,
+ kHeuristic,
+};
+
+// Feature param used to force default pin/unpin for global media controls in
+// CrOS.
+MEDIA_EXPORT extern const base::FeatureParam<kCrosGlobalMediaControlsPinOptions>
+ kCrosGlobalMediaControlsPinParam;
+
} // namespace media
#endif // MEDIA_BASE_MEDIA_SWITCHES_H_
diff --git a/chromium/media/base/media_types.h b/chromium/media/base/media_types.h
index b96ee7bb4f4..f0b910ab1a9 100644
--- a/chromium/media/base/media_types.h
+++ b/chromium/media/base/media_types.h
@@ -33,7 +33,7 @@ struct MEDIA_EXPORT VideoType {
VideoCodecProfile profile;
int level;
VideoColorSpace color_space;
- gl::HdrMetadataType hdr_metadata_type;
+ gfx::HdrMetadataType hdr_metadata_type;
};
MEDIA_EXPORT bool operator==(const AudioType& x, const AudioType& y);
diff --git a/chromium/media/base/media_url_demuxer_unittest.cc b/chromium/media/base/media_url_demuxer_unittest.cc
index 65869054e4e..136f0ac4794 100644
--- a/chromium/media/base/media_url_demuxer_unittest.cc
+++ b/chromium/media/base/media_url_demuxer_unittest.cc
@@ -5,7 +5,7 @@
#include "media/base/media_url_demuxer.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/base/mock_filters.cc b/chromium/media/base/mock_filters.cc
index 34821e901da..f7fc5c0cf91 100644
--- a/chromium/media/base/mock_filters.cc
+++ b/chromium/media/base/mock_filters.cc
@@ -100,6 +100,11 @@ std::string MockVideoDecoder::GetDisplayName() const {
return decoder_name_;
}
+MockVideoEncoder::MockVideoEncoder() = default;
+MockVideoEncoder::~MockVideoEncoder() {
+ Dtor();
+}
+
MockAudioDecoder::MockAudioDecoder() : MockAudioDecoder("MockAudioDecoder") {}
MockAudioDecoder::MockAudioDecoder(std::string decoder_name)
diff --git a/chromium/media/base/mock_filters.h b/chromium/media/base/mock_filters.h
index 25a78b098af..832c7b4650d 100644
--- a/chromium/media/base/mock_filters.h
+++ b/chromium/media/base/mock_filters.h
@@ -42,6 +42,7 @@
#include "media/base/time_source.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
+#include "media/base/video_encoder.h"
#include "media/base/video_frame.h"
#include "media/base/video_renderer.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -261,6 +262,43 @@ class MockVideoDecoder : public VideoDecoder {
DISALLOW_COPY_AND_ASSIGN(MockVideoDecoder);
};
+class MockVideoEncoder : public VideoEncoder {
+ public:
+ MockVideoEncoder();
+ ~MockVideoEncoder() override;
+
+ // VideoEncoder implementation.
+ MOCK_METHOD(void,
+ Initialize,
+ (VideoCodecProfile profile,
+ const VideoEncoder::Options& options,
+ VideoEncoder::OutputCB output_cb,
+ VideoEncoder::StatusCB done_cb),
+ (override));
+
+ MOCK_METHOD(void,
+ Encode,
+ (scoped_refptr<VideoFrame> frame,
+ bool key_frame,
+ VideoEncoder::StatusCB done_cb),
+ (override));
+
+ MOCK_METHOD(void,
+ ChangeOptions,
+ (const VideoEncoder::Options& options,
+ VideoEncoder::OutputCB output_cb,
+ VideoEncoder::StatusCB done_cb),
+ (override));
+
+ MOCK_METHOD(void, Flush, (VideoEncoder::StatusCB done_cb), (override));
+
+ // A function for mocking destructor calls
+ MOCK_METHOD(void, Dtor, ());
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockVideoEncoder);
+};
+
class MockAudioDecoder : public AudioDecoder {
public:
MockAudioDecoder();
diff --git a/chromium/media/base/multi_channel_resampler.cc b/chromium/media/base/multi_channel_resampler.cc
index f7e52a70278..b37b72632a0 100644
--- a/chromium/media/base/multi_channel_resampler.cc
+++ b/chromium/media/base/multi_channel_resampler.cc
@@ -8,7 +8,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/check_op.h"
#include "media/base/audio_bus.h"
diff --git a/chromium/media/base/multi_channel_resampler_unittest.cc b/chromium/media/base/multi_channel_resampler_unittest.cc
index dae0c72a805..1741286ad4f 100644
--- a/chromium/media/base/multi_channel_resampler_unittest.cc
+++ b/chromium/media/base/multi_channel_resampler_unittest.cc
@@ -8,7 +8,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "media/base/audio_bus.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/media/base/offloading_video_encoder.cc b/chromium/media/base/offloading_video_encoder.cc
new file mode 100644
index 00000000000..02acf7135fe
--- /dev/null
+++ b/chromium/media/base/offloading_video_encoder.cc
@@ -0,0 +1,89 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/offloading_video_encoder.h"
+
+#include "base/sequenced_task_runner.h"
+#include "base/task/task_traits.h"
+#include "base/task/thread_pool.h"
+#include "media/base/bind_to_current_loop.h"
+#include "media/base/video_frame.h"
+
+namespace media {
+
+OffloadingVideoEncoder::OffloadingVideoEncoder(
+ std::unique_ptr<VideoEncoder> wrapped_encoder,
+ const scoped_refptr<base::SequencedTaskRunner> work_runner,
+ const scoped_refptr<base::SequencedTaskRunner> callback_runner)
+ : wrapped_encoder_(std::move(wrapped_encoder)),
+ work_runner_(std::move(work_runner)),
+ callback_runner_(std::move(callback_runner)) {
+ DCHECK(wrapped_encoder_);
+ DCHECK(work_runner_);
+ DCHECK(callback_runner_);
+ DCHECK_NE(callback_runner_, work_runner_);
+}
+
+OffloadingVideoEncoder::OffloadingVideoEncoder(
+ std::unique_ptr<VideoEncoder> wrapped_encoder)
+ : OffloadingVideoEncoder(std::move(wrapped_encoder),
+ base::ThreadPool::CreateSequencedTaskRunner(
+ {base::TaskPriority::USER_BLOCKING}),
+ base::SequencedTaskRunnerHandle::Get()) {}
+
+void OffloadingVideoEncoder::Initialize(VideoCodecProfile profile,
+ const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VideoEncoder::Initialize,
+ base::Unretained(wrapped_encoder_.get()), profile, options,
+ WrapCallback(std::move(output_cb)),
+ WrapCallback(std::move(done_cb))));
+}
+
+void OffloadingVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
+ bool key_frame,
+ StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&VideoEncoder::Encode,
+ base::Unretained(wrapped_encoder_.get()), std::move(frame),
+ key_frame, WrapCallback(std::move(done_cb))));
+}
+
+void OffloadingVideoEncoder::ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VideoEncoder::ChangeOptions,
+ base::Unretained(wrapped_encoder_.get()),
+ options, WrapCallback(std::move(output_cb)),
+ WrapCallback(std::move(done_cb))));
+}
+
+void OffloadingVideoEncoder::Flush(StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->PostTask(
+ FROM_HERE, base::BindOnce(&VideoEncoder::Flush,
+ base::Unretained(wrapped_encoder_.get()),
+ WrapCallback(std::move(done_cb))));
+}
+
+OffloadingVideoEncoder::~OffloadingVideoEncoder() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ work_runner_->DeleteSoon(FROM_HERE, std::move(wrapped_encoder_));
+}
+
+template <class T>
+T OffloadingVideoEncoder::WrapCallback(T cb) {
+ DCHECK(callback_runner_);
+ return media::BindToLoop(callback_runner_.get(), std::move(cb));
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/base/offloading_video_encoder.h b/chromium/media/base/offloading_video_encoder.h
new file mode 100644
index 00000000000..6000c4ccb09
--- /dev/null
+++ b/chromium/media/base/offloading_video_encoder.h
@@ -0,0 +1,67 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_OFFLOADING_VIDEO_ENCODER_H_
+#define MEDIA_BASE_OFFLOADING_VIDEO_ENCODER_H_
+
+#include <memory>
+#include <type_traits>
+
+#include "base/sequence_checker.h"
+#include "media/base/video_encoder.h"
+
+namespace base {
+class SequencedTaskRunner;
+}
+
+namespace media {
+
+// A wrapper around video encoder that offloads all the calls to a dedicated
+// task runner. It's used to move synchronous software encoding work off the
+// current (main) thread.
+class MEDIA_EXPORT OffloadingVideoEncoder final : public VideoEncoder {
+ public:
+ // |work_runner| - task runner for encoding work
+ // |callback_runner| - all encoder's callbacks will be executed on this task
+ // runner.
+ OffloadingVideoEncoder(
+ std::unique_ptr<VideoEncoder> wrapped_encoder,
+ const scoped_refptr<base::SequencedTaskRunner> work_runner,
+ const scoped_refptr<base::SequencedTaskRunner> callback_runner);
+
+ // Uses current task runner for callbacks and asks thread pool for a new task
+ // runner to do actual encoding work.
+ explicit OffloadingVideoEncoder(
+ std::unique_ptr<VideoEncoder> wrapped_encoder);
+
+ ~OffloadingVideoEncoder() override;
+
+ void Initialize(VideoCodecProfile profile,
+ const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) override;
+
+ void Encode(scoped_refptr<VideoFrame> frame,
+ bool key_frame,
+ StatusCB done_cb) override;
+
+ void ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) override;
+
+ void Flush(StatusCB done_cb) override;
+
+ private:
+ template <class T>
+ T WrapCallback(T cb);
+
+ std::unique_ptr<VideoEncoder> wrapped_encoder_;
+ const scoped_refptr<base::SequencedTaskRunner> work_runner_;
+ const scoped_refptr<base::SequencedTaskRunner> callback_runner_;
+ SEQUENCE_CHECKER(sequence_checker_);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_OFFLOADING_VIDEO_ENCODER_H_
diff --git a/chromium/media/base/offloading_video_encoder_unittest.cc b/chromium/media/base/offloading_video_encoder_unittest.cc
new file mode 100644
index 00000000000..3bccecf3552
--- /dev/null
+++ b/chromium/media/base/offloading_video_encoder_unittest.cc
@@ -0,0 +1,148 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <memory>
+#include <vector>
+
+#include "base/bind.h"
+#include "base/callback_helpers.h"
+#include "base/run_loop.h"
+#include "base/sequenced_task_runner.h"
+#include "base/test/bind.h"
+#include "base/test/gmock_callback_support.h"
+#include "base/test/task_environment.h"
+#include "media/base/media_util.h"
+#include "media/base/mock_filters.h"
+#include "media/base/offloading_video_encoder.h"
+#include "media/base/video_types.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::base::test::RunCallback;
+using ::base::test::RunOnceCallback;
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Invoke;
+using ::testing::Return;
+
+namespace media {
+
+class OffloadingVideoEncoderTest : public testing::Test {
+ protected:
+ void SetUp() override {
+ auto mock_video_encoder = std::make_unique<MockVideoEncoder>();
+ mock_video_encoder_ = mock_video_encoder.get();
+ work_runner_ = base::ThreadPool::CreateSequencedTaskRunner({});
+ callback_runner_ = base::SequencedTaskRunnerHandle::Get();
+ offloading_encoder_ = std::make_unique<OffloadingVideoEncoder>(
+ std::move(mock_video_encoder), work_runner_, callback_runner_);
+ EXPECT_CALL(*mock_video_encoder_, Dtor()).WillOnce(Invoke([this]() {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ }));
+ }
+
+ void RunLoop() { task_environment_.RunUntilIdle(); }
+
+ base::test::TaskEnvironment task_environment_;
+ scoped_refptr<base::SequencedTaskRunner> work_runner_;
+ scoped_refptr<base::SequencedTaskRunner> callback_runner_;
+ MockVideoEncoder* mock_video_encoder_;
+ std::unique_ptr<OffloadingVideoEncoder> offloading_encoder_;
+};
+
+TEST_F(OffloadingVideoEncoderTest, Initialize) {
+ bool called_done = false;
+ bool called_output = false;
+ VideoEncoder::Options options;
+ VideoCodecProfile profile = VIDEO_CODEC_PROFILE_UNKNOWN;
+ VideoEncoder::OutputCB output_cb = base::BindLambdaForTesting(
+ [&](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_output = true;
+ });
+ VideoEncoder::StatusCB done_cb = base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_done = true;
+ });
+
+ EXPECT_CALL(*mock_video_encoder_, Initialize(_, _, _, _))
+ .WillOnce(Invoke([this](VideoCodecProfile profile,
+ const VideoEncoder::Options& options,
+ VideoEncoder::OutputCB output_cb,
+ VideoEncoder::StatusCB done_cb) {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ std::move(done_cb).Run(Status());
+ std::move(output_cb).Run(VideoEncoderOutput(), {});
+ }));
+
+ offloading_encoder_->Initialize(profile, options, std::move(output_cb),
+ std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
+ EXPECT_TRUE(called_output);
+}
+
+TEST_F(OffloadingVideoEncoderTest, Encode) {
+ bool called_done = false;
+ VideoEncoder::StatusCB done_cb = base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_done = true;
+ });
+
+ EXPECT_CALL(*mock_video_encoder_, Encode(_, _, _))
+ .WillOnce(Invoke([this](scoped_refptr<VideoFrame> frame, bool key_frame,
+ VideoEncoder::StatusCB done_cb) {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ std::move(done_cb).Run(Status());
+ }));
+
+ offloading_encoder_->Encode(nullptr, false, std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
+}
+
+TEST_F(OffloadingVideoEncoderTest, ChangeOptions) {
+ bool called_done = false;
+ VideoEncoder::Options options;
+ VideoEncoder::StatusCB done_cb = base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_done = true;
+ });
+
+ VideoEncoder::OutputCB output_cb = base::BindRepeating(
+ [](VideoEncoderOutput, base::Optional<VideoEncoder::CodecDescription>) {
+ });
+
+ EXPECT_CALL(*mock_video_encoder_, ChangeOptions(_, _, _))
+ .WillOnce(Invoke([this](const VideoEncoder::Options& options,
+ VideoEncoder::OutputCB output_cb,
+ VideoEncoder::StatusCB done_cb) {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ std::move(done_cb).Run(Status());
+ }));
+
+ offloading_encoder_->ChangeOptions(options, std::move(output_cb),
+ std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
+}
+
+TEST_F(OffloadingVideoEncoderTest, Flush) {
+ bool called_done = false;
+ VideoEncoder::StatusCB done_cb = base::BindLambdaForTesting([&](Status s) {
+ EXPECT_TRUE(callback_runner_->RunsTasksInCurrentSequence());
+ called_done = true;
+ });
+
+ EXPECT_CALL(*mock_video_encoder_, Flush(_))
+ .WillOnce(Invoke([this](VideoEncoder::StatusCB done_cb) {
+ EXPECT_TRUE(work_runner_->RunsTasksInCurrentSequence());
+ std::move(done_cb).Run(Status());
+ }));
+
+ offloading_encoder_->Flush(std::move(done_cb));
+ RunLoop();
+ EXPECT_TRUE(called_done);
+}
+
+} // namespace media
diff --git a/chromium/media/base/pipeline_impl.cc b/chromium/media/base/pipeline_impl.cc
index 8104646400b..67f988b9dde 100644
--- a/chromium/media/base/pipeline_impl.cc
+++ b/chromium/media/base/pipeline_impl.cc
@@ -8,7 +8,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
@@ -51,8 +50,8 @@ gfx::Size GetRotatedVideoSize(VideoRotation rotation, gfx::Size natural_size) {
// |default_renderer| in Start() and Resume() helps avoid a round trip to the
// render main task runner for Renderer creation in most cases which could add
// latency to start-to-play time.
-class PipelineImpl::RendererWrapper : public DemuxerHost,
- public RendererClient {
+class PipelineImpl::RendererWrapper final : public DemuxerHost,
+ public RendererClient {
public:
RendererWrapper(scoped_refptr<base::SingleThreadTaskRunner> media_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
diff --git a/chromium/media/base/provision_fetcher.h b/chromium/media/base/provision_fetcher.h
index e63b4786be2..48135306722 100644
--- a/chromium/media/base/provision_fetcher.h
+++ b/chromium/media/base/provision_fetcher.h
@@ -9,6 +9,7 @@
#include <string>
#include "base/callback.h"
+#include "url/gurl.h"
namespace media {
@@ -29,7 +30,7 @@ class ProvisionFetcher {
// MediaDrm.ProvisionRequest.
// The implementation must call |response_cb| asynchronously on the same
// thread that this method is called.
- virtual void Retrieve(const std::string& default_url,
+ virtual void Retrieve(const GURL& default_url,
const std::string& request_data,
ResponseCB response_cb) = 0;
};
diff --git a/chromium/media/base/sinc_resampler_perftest.cc b/chromium/media/base/sinc_resampler_perftest.cc
index 70436975874..905d8b98c2e 100644
--- a/chromium/media/base/sinc_resampler_perftest.cc
+++ b/chromium/media/base/sinc_resampler_perftest.cc
@@ -3,7 +3,7 @@
// found in the LICENSE file.
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/sinc_resampler.h"
diff --git a/chromium/media/base/sinc_resampler_unittest.cc b/chromium/media/base/sinc_resampler_unittest.cc
index 0197ffdec6d..195da5d1f9e 100644
--- a/chromium/media/base/sinc_resampler_unittest.cc
+++ b/chromium/media/base/sinc_resampler_unittest.cc
@@ -5,7 +5,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/numerics/math_constants.h"
#include "base/strings/string_number_conversions.h"
diff --git a/chromium/media/base/status.h b/chromium/media/base/status.h
index 96e26e6e421..ef49cc32781 100644
--- a/chromium/media/base/status.h
+++ b/chromium/media/base/status.h
@@ -152,9 +152,14 @@ MEDIA_EXPORT Status OkStatus();
// TODO(liberato): Add more helper functions for common error returns.
-// Helper class to allow returning a |T| or a Status. Typical usage:
+// Helper class to allow returning a `T` or a Status.
//
-// ErrorOr<std::unique_ptr<MyObject>> FactoryFn() {
+// It is not okay to send a StatusOr with a status code of `kOk`. `kOk` is
+// reserved for cases where there is a `T` rather than a Status.
+//
+// Typical usage:
+//
+// StatusOr<std::unique_ptr<MyObject>> FactoryFn() {
// if (success)
// return std::make_unique<MyObject>();
// return Status(StatusCodes::kSomethingBadHappened);
@@ -164,12 +169,24 @@ MEDIA_EXPORT Status OkStatus();
// if (result.has_error()) return std::move(result.error());
// my_object_ = std::move(result.value());
//
+// Can also be combined into a single switch using `code()`:
+//
+// switch (result.code()) {
+// case StatusCode::kOk:
+// // `kOk` is special; it means the StatusOr has a `T`.
+// // Do something with result.value()
+// break;
+// // Maybe switch on specific non-kOk codes for special processing.
+// default: // Send unknown errors upwards.
+// return std::move(result.error());
+// }
+//
// Also useful if one would like to get an enum class return value, unless an
// error occurs:
//
// enum class ResultType { kNeedMoreInput, kOutputIsReady, kFormatChanged };
//
-// ErrorOr<ResultType> Foo() { ... }
+// StatusOr<ResultType> Foo() { ... }
//
// auto result = Foo();
// if (result.has_error()) return std::move(result.error());
@@ -178,26 +195,32 @@ MEDIA_EXPORT Status OkStatus();
// ...
// }
template <typename T>
-class ErrorOr {
+class StatusOr {
public:
// All of these may be implicit, so that one may just return Status or
// the value in question.
- ErrorOr(Status&& error) : error_(std::move(error)) {}
- ErrorOr(const Status& error) : error_(error) {}
- ErrorOr(StatusCode code,
- const base::Location& location = base::Location::Current())
- : error_(Status(code, "", location)) {}
+ StatusOr(Status&& error) : error_(std::move(error)) {
+ DCHECK(!this->error().is_ok());
+ }
+ StatusOr(const Status& error) : error_(error) {
+ DCHECK(!this->error().is_ok());
+ }
+ StatusOr(StatusCode code,
+ const base::Location& location = base::Location::Current())
+ : error_(Status(code, "", location)) {
+ DCHECK(!error().is_ok());
+ }
- ErrorOr(T&& value) : value_(std::move(value)) {}
- ErrorOr(const T& value) : value_(value) {}
+ StatusOr(T&& value) : value_(std::move(value)) {}
+ StatusOr(const T& value) : value_(value) {}
- ~ErrorOr() = default;
+ ~StatusOr() = default;
// Move- and copy- construction and assignment are okay.
- ErrorOr(const ErrorOr&) = default;
- ErrorOr(ErrorOr&&) = default;
- ErrorOr& operator=(ErrorOr&) = default;
- ErrorOr& operator=(ErrorOr&&) = default;
+ StatusOr(const StatusOr&) = default;
+ StatusOr(StatusOr&&) = default;
+ StatusOr& operator=(StatusOr&) = default;
+ StatusOr& operator=(StatusOr&&) = default;
// Do we have a value?
bool has_value() const { return value_.has_value(); }
@@ -209,10 +232,18 @@ class ErrorOr {
// have one via |!has_value()|.
Status& error() { return *error_; }
+ const Status& error() const { return *error_; }
+
// Return a ref to the value. It's up to the caller to verify that we have a
// value before calling this.
T& value() { return std::get<0>(*value_); }
+ // Returns the error code we have, if any, or `kOk` if we have a value. If
+ // this returns `kOk`, then it is equivalent to has_value().
+ StatusCode code() const {
+ return has_error() ? error().code() : StatusCode::kOk;
+ }
+
private:
base::Optional<Status> error_;
// We wrap |T| in a container so that windows COM wrappers work. They
diff --git a/chromium/media/base/status_unittest.cc b/chromium/media/base/status_unittest.cc
index b9e6604c7ac..0a2f7cc5e21 100644
--- a/chromium/media/base/status_unittest.cc
+++ b/chromium/media/base/status_unittest.cc
@@ -65,8 +65,8 @@ class StatusTest : public testing::Test {
return me;
}
- // Make sure that the typical usage of ErrorOr actually compiles.
- ErrorOr<std::unique_ptr<int>> TypicalErrorOrUsage(bool succeed) {
+ // Make sure that the typical usage of StatusOr actually compiles.
+ StatusOr<std::unique_ptr<int>> TypicalStatusOrUsage(bool succeed) {
if (succeed)
return std::make_unique<int>(123);
return Status(StatusCode::kCodeOnlyForTesting);
@@ -192,16 +192,16 @@ TEST_F(StatusTest, CanCopyEasily) {
ASSERT_EQ(actual.FindDictPath("data")->DictSize(), 1ul);
}
-TEST_F(StatusTest, ErrorOrTypicalUsage) {
+TEST_F(StatusTest, StatusOrTypicalUsage) {
// Mostly so we have some code coverage on the default usage.
- EXPECT_TRUE(TypicalErrorOrUsage(true).has_value());
- EXPECT_FALSE(TypicalErrorOrUsage(true).has_error());
- EXPECT_FALSE(TypicalErrorOrUsage(false).has_value());
- EXPECT_TRUE(TypicalErrorOrUsage(false).has_error());
+ EXPECT_TRUE(TypicalStatusOrUsage(true).has_value());
+ EXPECT_FALSE(TypicalStatusOrUsage(true).has_error());
+ EXPECT_FALSE(TypicalStatusOrUsage(false).has_value());
+ EXPECT_TRUE(TypicalStatusOrUsage(false).has_error());
}
-TEST_F(StatusTest, ErrorOrWithMoveOnlyType) {
- ErrorOr<std::unique_ptr<int>> error_or(std::make_unique<int>(123));
+TEST_F(StatusTest, StatusOrWithMoveOnlyType) {
+ StatusOr<std::unique_ptr<int>> error_or(std::make_unique<int>(123));
EXPECT_TRUE(error_or.has_value());
EXPECT_FALSE(error_or.has_error());
std::unique_ptr<int> result = std::move(error_or.value());
@@ -210,8 +210,8 @@ TEST_F(StatusTest, ErrorOrWithMoveOnlyType) {
EXPECT_EQ(*result, 123);
}
-TEST_F(StatusTest, ErrorOrWithCopyableType) {
- ErrorOr<int> error_or(123);
+TEST_F(StatusTest, StatusOrWithCopyableType) {
+ StatusOr<int> error_or(123);
EXPECT_TRUE(error_or.has_value());
EXPECT_FALSE(error_or.has_error());
int result = std::move(error_or.value());
@@ -220,14 +220,14 @@ TEST_F(StatusTest, ErrorOrWithCopyableType) {
EXPECT_EQ(error_or.value(), 123);
}
-TEST_F(StatusTest, ErrorOrMoveConstructionAndAssignment) {
+TEST_F(StatusTest, StatusOrMoveConstructionAndAssignment) {
// Make sure that we can move-construct and move-assign a move-only value.
- ErrorOr<std::unique_ptr<int>> error_or_0(std::make_unique<int>(123));
+ StatusOr<std::unique_ptr<int>> error_or_0(std::make_unique<int>(123));
- ErrorOr<std::unique_ptr<int>> error_or_1(std::move(error_or_0));
+ StatusOr<std::unique_ptr<int>> error_or_1(std::move(error_or_0));
EXPECT_EQ(error_or_0.value(), nullptr);
- ErrorOr<std::unique_ptr<int>> error_or_2 = std::move(error_or_1);
+ StatusOr<std::unique_ptr<int>> error_or_2 = std::move(error_or_1);
EXPECT_EQ(error_or_1.value(), nullptr);
// |error_or_2| should have gotten the original.
@@ -235,12 +235,22 @@ TEST_F(StatusTest, ErrorOrMoveConstructionAndAssignment) {
EXPECT_EQ(*value, 123);
}
-TEST_F(StatusTest, ErrorOrCopyWorks) {
+TEST_F(StatusTest, StatusOrCopyWorks) {
// Make sure that we can move-construct and move-assign a move-only value.
- ErrorOr<int> error_or_0(123);
- ErrorOr<int> error_or_1(std::move(error_or_0));
- ErrorOr<int> error_or_2 = std::move(error_or_1);
+ StatusOr<int> error_or_0(123);
+ StatusOr<int> error_or_1(std::move(error_or_0));
+ StatusOr<int> error_or_2 = std::move(error_or_1);
EXPECT_EQ(error_or_2.value(), 123);
}
+TEST_F(StatusTest, StatusOrCodeIsOkWithValue) {
+ StatusOr<int> error_or(123);
+ EXPECT_EQ(error_or.code(), StatusCode::kOk);
+}
+
+TEST_F(StatusTest, StatusOrCodeIsNotOkWithoutValue) {
+ StatusOr<int> error_or(StatusCode::kCodeOnlyForTesting);
+ EXPECT_EQ(error_or.code(), StatusCode::kCodeOnlyForTesting);
+}
+
} // namespace media
diff --git a/chromium/media/base/supported_types.cc b/chromium/media/base/supported_types.cc
index 2047bb52af4..05de5ab4464 100644
--- a/chromium/media/base/supported_types.cc
+++ b/chromium/media/base/supported_types.cc
@@ -9,6 +9,7 @@
#include "base/no_destructor.h"
#include "base/notreached.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/media.h"
#include "media/base/media_client.h"
#include "media/base/media_switches.h"
@@ -35,14 +36,14 @@ namespace media {
namespace {
-bool IsSupportedHdrMetadata(const gl::HdrMetadataType& hdr_metadata_type) {
+bool IsSupportedHdrMetadata(const gfx::HdrMetadataType& hdr_metadata_type) {
switch (hdr_metadata_type) {
- case gl::HdrMetadataType::kNone:
+ case gfx::HdrMetadataType::kNone:
return true;
- case gl::HdrMetadataType::kSmpteSt2086:
- case gl::HdrMetadataType::kSmpteSt2094_10:
- case gl::HdrMetadataType::kSmpteSt2094_40:
+ case gfx::HdrMetadataType::kSmpteSt2086:
+ case gfx::HdrMetadataType::kSmpteSt2094_10:
+ case gfx::HdrMetadataType::kSmpteSt2094_40:
return false;
}
@@ -242,7 +243,7 @@ bool IsDefaultSupportedAudioType(const AudioType& type) {
case kCodecAMR_NB:
case kCodecAMR_WB:
case kCodecGSM_MS:
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return true;
#else
return false;
@@ -324,7 +325,7 @@ bool IsDefaultSupportedVideoType(const VideoType& type) {
return false;
case kCodecMPEG4:
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return true;
#else
return false;
diff --git a/chromium/media/base/supported_types_unittest.cc b/chromium/media/base/supported_types_unittest.cc
index 15232a77bb2..f7e04b945c7 100644
--- a/chromium/media/base/supported_types_unittest.cc
+++ b/chromium/media/base/supported_types_unittest.cc
@@ -5,6 +5,7 @@
#include "media/base/supported_types.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#if defined(OS_ANDROID)
@@ -19,7 +20,7 @@ const bool kPropCodecsEnabled = true;
const bool kPropCodecsEnabled = false;
#endif
-#if defined(OS_CHROMEOS) && BUILDFLAG(USE_PROPRIETARY_CODECS)
+#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_PROPRIETARY_CODECS)
const bool kMpeg4Supported = true;
#else
const bool kMpeg4Supported = false;
@@ -170,8 +171,8 @@ TEST(SupportedTypesTest, IsSupportedVideoType_VP9Profiles) {
// VP9 Profile2 are supported on x86, ChromeOS on ARM and Mac/Win on ARM64.
// See third_party/libvpx/BUILD.gn.
-#if defined(ARCH_CPU_X86_FAMILY) || \
- (defined(ARCH_CPU_ARM_FAMILY) && defined(OS_CHROMEOS)) || \
+#if defined(ARCH_CPU_X86_FAMILY) || \
+ (defined(ARCH_CPU_ARM_FAMILY) && BUILDFLAG(IS_ASH)) || \
(defined(ARCH_CPU_ARM64) && (defined(OS_MAC) || defined(OS_WIN)))
EXPECT_TRUE(IsSupportedVideoType(
{kCodecVP9, VP9PROFILE_PROFILE2, kUnspecifiedLevel, kColorSpace}));
@@ -276,14 +277,14 @@ TEST(SupportedTypesTest, IsSupportedVideoTypeWithHdrMetadataBasics) {
// No HDR metadata types are supported.
EXPECT_FALSE(
IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY, kUnspecifiedLevel,
- color_space, gl::HdrMetadataType::kSmpteSt2086}));
+ color_space, gfx::HdrMetadataType::kSmpteSt2086}));
EXPECT_FALSE(IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY,
kUnspecifiedLevel, color_space,
- gl::HdrMetadataType::kSmpteSt2094_10}));
+ gfx::HdrMetadataType::kSmpteSt2094_10}));
EXPECT_FALSE(IsSupportedVideoType({kCodecVP8, VP8PROFILE_ANY,
kUnspecifiedLevel, color_space,
- gl::HdrMetadataType::kSmpteSt2094_40}));
+ gfx::HdrMetadataType::kSmpteSt2094_40}));
}
} // namespace media
diff --git a/chromium/media/base/test_helpers.cc b/chromium/media/base/test_helpers.cc
index 73ac060dc52..84f8a2047cf 100644
--- a/chromium/media/base/test_helpers.cc
+++ b/chromium/media/base/test_helpers.cc
@@ -350,6 +350,45 @@ scoped_refptr<AudioBuffer> MakeAudioBuffer(SampleFormat format,
return output;
}
+template <>
+scoped_refptr<AudioBuffer> MakeAudioBuffer<float>(SampleFormat format,
+ ChannelLayout channel_layout,
+ size_t channel_count,
+ int sample_rate,
+ float start,
+ float increment,
+ size_t frames,
+ base::TimeDelta timestamp) {
+ const size_t channels = ChannelLayoutToChannelCount(channel_layout);
+ scoped_refptr<AudioBuffer> output = AudioBuffer::CreateBuffer(
+ format, channel_layout, static_cast<int>(channel_count), sample_rate,
+ static_cast<int>(frames));
+ output->set_timestamp(timestamp);
+
+ const bool is_planar =
+ format == kSampleFormatPlanarS16 || format == kSampleFormatPlanarF32;
+
+ // Values in channel 0 will be:
+ // (start) / max_value
+ // (start + increment) / max_value
+ // (start + 2 * increment) / max_value, ...
+ // While, values in channel 1 will be:
+ // (start + frames * increment) / max_value
+ // (start + (frames + 1) * increment) / max_value
+ // (start + (frames + 2) * increment) / max_value, ...
+ for (size_t ch = 0; ch < channels; ++ch) {
+ float* buffer =
+ reinterpret_cast<float*>(output->channel_data()[is_planar ? ch : 0]);
+ const float v = static_cast<float>(start + ch * frames * increment);
+ for (size_t i = 0; i < frames; ++i) {
+ buffer[is_planar ? i : ch + i * channels] =
+ static_cast<float>(v + i * increment) /
+ std::numeric_limits<uint16_t>::max();
+ }
+ }
+ return output;
+}
+
scoped_refptr<AudioBuffer> MakeBitstreamAudioBuffer(
SampleFormat format,
ChannelLayout channel_layout,
@@ -408,7 +447,6 @@ void VerifyBitstreamAudioBus(AudioBus* bus,
DEFINE_MAKE_AUDIO_BUFFER_INSTANCE(uint8_t);
DEFINE_MAKE_AUDIO_BUFFER_INSTANCE(int16_t);
DEFINE_MAKE_AUDIO_BUFFER_INSTANCE(int32_t);
-DEFINE_MAKE_AUDIO_BUFFER_INSTANCE(float);
static const char kFakeVideoBufferHeader[] = "FakeVideoBufferForTest";
diff --git a/chromium/media/base/test_helpers.h b/chromium/media/base/test_helpers.h
index 3a0df8eda59..d7bcf89cfc8 100644
--- a/chromium/media/base/test_helpers.h
+++ b/chromium/media/base/test_helpers.h
@@ -177,6 +177,18 @@ scoped_refptr<AudioBuffer> MakeAudioBuffer(SampleFormat format,
size_t frames,
base::TimeDelta timestamp);
+// Similar to above, but for float types where the maximum range is limited to
+// [-1.0f, 1.0f]. Here the stored values will be divided by 65536.
+template <>
+scoped_refptr<AudioBuffer> MakeAudioBuffer<float>(SampleFormat format,
+ ChannelLayout channel_layout,
+ size_t channel_count,
+ int sample_rate,
+ float start,
+ float increment,
+ size_t frames,
+ base::TimeDelta timestamp);
+
// Create an AudioBuffer containing bitstream data. |start| and |increment| are
// used to specify the values for the data. The value is determined by:
// start + frames * increment
diff --git a/chromium/media/base/user_input_monitor_linux.cc b/chromium/media/base/user_input_monitor_linux.cc
index 13c373fadfd..58b7d3033fe 100644
--- a/chromium/media/base/user_input_monitor_linux.cc
+++ b/chromium/media/base/user_input_monitor_linux.cc
@@ -19,57 +19,84 @@
#include "base/single_thread_task_runner.h"
#include "base/synchronization/lock.h"
#include "base/task/current_thread.h"
-#include "media/base/keyboard_event_counter.h"
#include "third_party/skia/include/core/SkPoint.h"
-#include "ui/base/ui_base_features.h"
#include "ui/events/devices/x11/xinput_util.h"
+#include "ui/events/keyboard_event_counter.h"
#include "ui/events/keycodes/keyboard_code_conversion_x.h"
-#include "ui/gfx/x/x11.h"
-#include "ui/gfx/x/x11_types.h"
#include "ui/gfx/x/xinput.h"
+#if defined(USE_X11)
+#include "ui/base/x/x11_user_input_monitor.h" // nogncheck
+#endif
+
+#if defined(USE_OZONE)
+#include "ui/base/ui_base_features.h" // nogncheck
+#include "ui/ozone/public/ozone_platform.h" // nogncheck
+#include "ui/ozone/public/platform_user_input_monitor.h" // nogncheck
+#endif
+
namespace media {
namespace {
-// This is the actual implementation of event monitoring. It's separated from
-// UserInputMonitorLinux since it needs to be deleted on the IO thread.
-class UserInputMonitorLinuxCore
- : public base::SupportsWeakPtr<UserInputMonitorLinuxCore>,
- public base::CurrentThread::DestructionObserver,
- public x11::Connection::Delegate {
- public:
- explicit UserInputMonitorLinuxCore(
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner);
- ~UserInputMonitorLinuxCore() override;
-
- // base::CurrentThread::DestructionObserver:
- void WillDestroyCurrentMessageLoop() override;
+using WriteKeyPressCallback =
+ base::RepeatingCallback<void(const base::WritableSharedMemoryMapping& shmem,
+ uint32_t count)>;
- // x11::Connection::Delegate:
- bool ShouldContinueStream() const override;
- void DispatchXEvent(x11::Event* event) override;
+// Provides a unified interface for using user input monitors of unrelated
+// classes.
+// TODO(crbug.com/1096425): remove this when non-Ozone path is deprecated.
+class UserInputMonitorAdapter
+ : public base::SupportsWeakPtr<UserInputMonitorAdapter> {
+ public:
+ UserInputMonitorAdapter() = default;
+ UserInputMonitorAdapter(const UserInputMonitorAdapter&) = delete;
+ UserInputMonitorAdapter& operator=(const UserInputMonitorAdapter&) = delete;
+ virtual ~UserInputMonitorAdapter() = default;
+
+ virtual uint32_t GetKeyPressCount() const = 0;
+ virtual void StartMonitor(WriteKeyPressCallback callback) = 0;
+ virtual void StartMonitorWithMapping(
+ WriteKeyPressCallback callback,
+ base::WritableSharedMemoryMapping mapping) = 0;
+ virtual void StopMonitor() = 0;
+};
- uint32_t GetKeyPressCount() const;
- void StartMonitor();
- void StartMonitorWithMapping(base::WritableSharedMemoryMapping mapping);
- void StopMonitor();
+// Wraps a specific user input monitor into UserInputMonitorAdapter interface.
+template <typename Impl>
+class UserInputMonitorLinuxCore : public UserInputMonitorAdapter {
+ public:
+ explicit UserInputMonitorLinuxCore(std::unique_ptr<Impl> user_input_monitor)
+ : user_input_monitor_(std::move(user_input_monitor)) {}
+ UserInputMonitorLinuxCore(const UserInputMonitorLinuxCore&) = delete;
+ UserInputMonitorLinuxCore operator=(const UserInputMonitorLinuxCore&) =
+ delete;
+ ~UserInputMonitorLinuxCore() override = default;
+
+ uint32_t GetKeyPressCount() const override {
+ if (!user_input_monitor_)
+ return 0;
+ return user_input_monitor_->GetKeyPressCount();
+ }
+ void StartMonitor(WriteKeyPressCallback callback) override {
+ if (!user_input_monitor_)
+ return;
+ user_input_monitor_->StartMonitor(callback);
+ }
+ void StartMonitorWithMapping(
+ WriteKeyPressCallback callback,
+ base::WritableSharedMemoryMapping mapping) override {
+ if (!user_input_monitor_)
+ return;
+ user_input_monitor_->StartMonitorWithMapping(callback, std::move(mapping));
+ }
+ void StopMonitor() override {
+ if (!user_input_monitor_)
+ return;
+ user_input_monitor_->StopMonitor();
+ }
private:
- void OnConnectionData();
-
- scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
-
- // Used for sharing key press count value.
- std::unique_ptr<base::WritableSharedMemoryMapping> key_press_count_mapping_;
-
- //
- // The following members should only be accessed on the IO thread.
- //
- std::unique_ptr<base::FileDescriptorWatcher::Controller> watch_controller_;
- std::unique_ptr<x11::Connection> connection_;
- KeyboardEventCounter counter_;
-
- DISALLOW_COPY_AND_ASSIGN(UserInputMonitorLinuxCore);
+ std::unique_ptr<Impl> user_input_monitor_;
};
class UserInputMonitorLinux : public UserInputMonitorBase {
@@ -89,130 +116,27 @@ class UserInputMonitorLinux : public UserInputMonitorBase {
void StopKeyboardMonitoring() override;
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_;
- UserInputMonitorLinuxCore* core_;
+ UserInputMonitorAdapter* core_;
DISALLOW_COPY_AND_ASSIGN(UserInputMonitorLinux);
};
-UserInputMonitorLinuxCore::UserInputMonitorLinuxCore(
- const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
- : io_task_runner_(io_task_runner) {}
-
-UserInputMonitorLinuxCore::~UserInputMonitorLinuxCore() {
- DCHECK(!connection_);
-}
-
-void UserInputMonitorLinuxCore::WillDestroyCurrentMessageLoop() {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
- StopMonitor();
-}
-
-bool UserInputMonitorLinuxCore::ShouldContinueStream() const {
- return true;
-}
-
-void UserInputMonitorLinuxCore::DispatchXEvent(x11::Event* event) {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
-
- auto* raw = event->As<x11::Input::RawDeviceEvent>();
- if (!raw || (raw->opcode != x11::Input::RawDeviceEvent::RawKeyPress &&
- raw->opcode != x11::Input::RawDeviceEvent::RawKeyRelease)) {
- return;
- }
-
- ui::EventType type = raw->opcode == x11::Input::RawDeviceEvent::RawKeyPress
- ? ui::ET_KEY_PRESSED
- : ui::ET_KEY_RELEASED;
-
- auto key_sym = connection_->KeycodeToKeysym(raw->detail, 0);
- ui::KeyboardCode key_code =
- ui::KeyboardCodeFromXKeysym(static_cast<uint32_t>(key_sym));
- counter_.OnKeyboardEvent(type, key_code);
-
- // Update count value in shared memory.
- if (key_press_count_mapping_)
- WriteKeyPressMonitorCount(*key_press_count_mapping_, GetKeyPressCount());
-}
-
-uint32_t UserInputMonitorLinuxCore::GetKeyPressCount() const {
- return counter_.GetKeyPressCount();
-}
-
-void UserInputMonitorLinuxCore::StartMonitor() {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
-
- // TODO(https://crbug.com/1116414): support UserInputMonitorLinux on
- // Ozone/Linux.
+UserInputMonitorAdapter* CreateUserInputMonitor(
+ const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner) {
+#if defined(USE_OZONE)
if (features::IsUsingOzonePlatform()) {
- NOTIMPLEMENTED_LOG_ONCE();
- StopMonitor();
- return;
+ return new UserInputMonitorLinuxCore<ui::PlatformUserInputMonitor>(
+ ui::OzonePlatform::GetInstance()->GetPlatformUserInputMonitor(
+ io_task_runner));
}
-
- if (!connection_) {
- // TODO(jamiewalch): We should pass the connection in.
- if (auto* connection = x11::Connection::Get()) {
- connection_ = x11::Connection::Get()->Clone();
- } else {
- LOG(ERROR) << "Couldn't open X connection";
- StopMonitor();
- return;
- }
- }
-
- if (!connection_->xinput().present()) {
- LOG(ERROR) << "X Input extension not available.";
- StopMonitor();
- return;
- }
- // Let the server know the client XInput version.
- connection_->xinput().XIQueryVersion(
- {x11::Input::major_version, x11::Input::minor_version});
-
- x11::Input::XIEventMask mask;
- ui::SetXinputMask(&mask, x11::Input::RawDeviceEvent::RawKeyPress);
- ui::SetXinputMask(&mask, x11::Input::RawDeviceEvent::RawKeyRelease);
- connection_->xinput().XISelectEvents(
- {connection_->default_root(),
- {{x11::Input::DeviceId::AllMaster, {mask}}}});
- connection_->Flush();
-
- // Register OnConnectionData() to be called every time there is something to
- // read from |connection_|.
- watch_controller_ = base::FileDescriptorWatcher::WatchReadable(
- connection_->GetFd(),
- base::BindRepeating(&UserInputMonitorLinuxCore::OnConnectionData,
- base::Unretained(this)));
-
- // Start observing message loop destruction if we start monitoring the first
- // event.
- base::CurrentThread::Get()->AddDestructionObserver(this);
-
- // Fetch pending events if any.
- OnConnectionData();
-}
-
-void UserInputMonitorLinuxCore::StartMonitorWithMapping(
- base::WritableSharedMemoryMapping mapping) {
- StartMonitor();
- key_press_count_mapping_ =
- std::make_unique<base::WritableSharedMemoryMapping>(std::move(mapping));
-}
-
-void UserInputMonitorLinuxCore::StopMonitor() {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
-
- watch_controller_.reset();
- connection_.reset();
- key_press_count_mapping_.reset();
-
- // Stop observing message loop destruction if no event is being monitored.
- base::CurrentThread::Get()->RemoveDestructionObserver(this);
-}
-
-void UserInputMonitorLinuxCore::OnConnectionData() {
- DCHECK(io_task_runner_->BelongsToCurrentThread());
- connection_->Dispatch(this);
+#endif
+#if defined(USE_X11)
+ return new UserInputMonitorLinuxCore<ui::XUserInputMonitor>(
+ std::make_unique<ui::XUserInputMonitor>(io_task_runner));
+#else
+ NOTREACHED();
+ return nullptr;
+#endif
}
//
@@ -222,34 +146,44 @@ void UserInputMonitorLinuxCore::OnConnectionData() {
UserInputMonitorLinux::UserInputMonitorLinux(
const scoped_refptr<base::SingleThreadTaskRunner>& io_task_runner)
: io_task_runner_(io_task_runner),
- core_(new UserInputMonitorLinuxCore(io_task_runner)) {}
+ core_(CreateUserInputMonitor(io_task_runner_)) {}
UserInputMonitorLinux::~UserInputMonitorLinux() {
- if (!io_task_runner_->DeleteSoon(FROM_HERE, core_))
+ if (core_ && !io_task_runner_->DeleteSoon(FROM_HERE, core_))
delete core_;
}
uint32_t UserInputMonitorLinux::GetKeyPressCount() const {
+ if (!core_)
+ return 0;
return core_->GetKeyPressCount();
}
void UserInputMonitorLinux::StartKeyboardMonitoring() {
+ if (!core_)
+ return;
io_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&UserInputMonitorLinuxCore::StartMonitor,
- core_->AsWeakPtr()));
+ FROM_HERE,
+ base::BindOnce(&UserInputMonitorAdapter::StartMonitor, core_->AsWeakPtr(),
+ base::BindRepeating(&WriteKeyPressMonitorCount)));
}
void UserInputMonitorLinux::StartKeyboardMonitoring(
base::WritableSharedMemoryMapping mapping) {
+ if (!core_)
+ return;
io_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(&UserInputMonitorLinuxCore::StartMonitorWithMapping,
- core_->AsWeakPtr(), std::move(mapping)));
+ base::BindOnce(
+ &UserInputMonitorAdapter::StartMonitorWithMapping, core_->AsWeakPtr(),
+ base::BindRepeating(&WriteKeyPressMonitorCount), std::move(mapping)));
}
void UserInputMonitorLinux::StopKeyboardMonitoring() {
+ if (!core_)
+ return;
io_task_runner_->PostTask(
- FROM_HERE, base::BindOnce(&UserInputMonitorLinuxCore::StopMonitor,
+ FROM_HERE, base::BindOnce(&UserInputMonitorAdapter::StopMonitor,
core_->AsWeakPtr()));
}
diff --git a/chromium/media/base/user_input_monitor_unittest.cc b/chromium/media/base/user_input_monitor_unittest.cc
index fafc98e2db2..5e85052bad6 100644
--- a/chromium/media/base/user_input_monitor_unittest.cc
+++ b/chromium/media/base/user_input_monitor_unittest.cc
@@ -17,9 +17,21 @@
#include "base/files/file_descriptor_watcher_posix.h"
#endif
+#if defined(USE_OZONE)
+#include "ui/base/ui_base_features.h" // nogncheck
+#endif
+
namespace media {
TEST(UserInputMonitorTest, CreatePlatformSpecific) {
+#if defined(USE_OZONE)
+ // TODO(crbug.com/1109112): enable those tests for Ozone.
+ // Here, the only issue why they don't work is that the Ozone platform is not
+ // initialised.
+ if (features::IsUsingOzonePlatform())
+ return;
+#endif
+
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
base::test::TaskEnvironment task_environment(
base::test::TaskEnvironment::MainThreadType::IO);
@@ -42,6 +54,14 @@ TEST(UserInputMonitorTest, CreatePlatformSpecific) {
}
TEST(UserInputMonitorTest, CreatePlatformSpecificWithMapping) {
+#if defined(USE_OZONE)
+ // TODO(crbug.com/1109112): enable those tests for Ozone.
+ // Here, the only issue why they don't work is that the Ozone platform is not
+ // initialised.
+ if (features::IsUsingOzonePlatform())
+ return;
+#endif
+
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
base::test::TaskEnvironment task_environment(
base::test::TaskEnvironment::MainThreadType::IO);
diff --git a/chromium/media/base/user_input_monitor_win.cc b/chromium/media/base/user_input_monitor_win.cc
index 48abe735741..8187d23f282 100644
--- a/chromium/media/base/user_input_monitor_win.cc
+++ b/chromium/media/base/user_input_monitor_win.cc
@@ -17,8 +17,8 @@
#include "base/synchronization/lock.h"
#include "base/task/current_thread.h"
#include "base/win/message_window.h"
-#include "media/base/keyboard_event_counter.h"
#include "third_party/skia/include/core/SkPoint.h"
+#include "ui/events/keyboard_event_counter.h"
#include "ui/events/keycodes/keyboard_code_conversion_win.h"
namespace media {
@@ -77,7 +77,7 @@ class UserInputMonitorWinCore
// These members are only accessed on the UI thread.
std::unique_ptr<base::win::MessageWindow> window_;
- KeyboardEventCounter counter_;
+ ui::KeyboardEventCounter counter_;
DISALLOW_COPY_AND_ASSIGN(UserInputMonitorWinCore);
};
@@ -164,8 +164,7 @@ void UserInputMonitorWinCore::StopMonitor() {
// Stop receiving raw input.
std::unique_ptr<RAWINPUTDEVICE> device(
- GetRawInputDevices(window_->hwnd(), RIDEV_REMOVE));
-
+ GetRawInputDevices(nullptr, RIDEV_REMOVE));
if (!RegisterRawInputDevices(device.get(), 1, sizeof(*device))) {
PLOG(INFO) << "RegisterRawInputDevices() failed for RIDEV_REMOVE";
}
diff --git a/chromium/media/base/video_codecs.cc b/chromium/media/base/video_codecs.cc
index 78a829ce194..6c58bbabd66 100644
--- a/chromium/media/base/video_codecs.cc
+++ b/chromium/media/base/video_codecs.cc
@@ -9,6 +9,7 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
#include "media/base/video_color_space.h"
namespace media {
@@ -110,6 +111,50 @@ std::string GetProfileName(VideoCodecProfile profile) {
return "";
}
+std::string BuildH264MimeSuffix(media::VideoCodecProfile profile,
+ uint8_t level) {
+ std::string profile_str;
+ switch (profile) {
+ case media::VideoCodecProfile::H264PROFILE_BASELINE:
+ profile_str = "42";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_MAIN:
+ profile_str = "4d";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_SCALABLEBASELINE:
+ profile_str = "53";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_SCALABLEHIGH:
+ profile_str = "56";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_EXTENDED:
+ profile_str = "58";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_HIGH:
+ profile_str = "64";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_HIGH10PROFILE:
+ profile_str = "6e";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_MULTIVIEWHIGH:
+ profile_str = "76";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_HIGH422PROFILE:
+ profile_str = "7a";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_STEREOHIGH:
+ profile_str = "80";
+ break;
+ case media::VideoCodecProfile::H264PROFILE_HIGH444PREDICTIVEPROFILE:
+ profile_str = "f4";
+ break;
+ default:
+ return "";
+ }
+
+ return base::StringPrintf(".%s%04x", profile_str.c_str(), level);
+}
+
bool ParseNewStyleVp9CodecID(const std::string& codec_id,
VideoCodecProfile* profile,
uint8_t* level_idc,
@@ -889,4 +934,46 @@ void ParseCodec(const std::string& codec_id,
codec = kUnknownVideoCodec;
}
+VideoCodec VideoCodecProfileToVideoCodec(VideoCodecProfile profile) {
+ switch (profile) {
+ case VIDEO_CODEC_PROFILE_UNKNOWN:
+ return kUnknownVideoCodec;
+ case H264PROFILE_BASELINE:
+ case H264PROFILE_MAIN:
+ case H264PROFILE_EXTENDED:
+ case H264PROFILE_HIGH:
+ case H264PROFILE_HIGH10PROFILE:
+ case H264PROFILE_HIGH422PROFILE:
+ case H264PROFILE_HIGH444PREDICTIVEPROFILE:
+ case H264PROFILE_SCALABLEBASELINE:
+ case H264PROFILE_SCALABLEHIGH:
+ case H264PROFILE_STEREOHIGH:
+ case H264PROFILE_MULTIVIEWHIGH:
+ return kCodecH264;
+ case HEVCPROFILE_MAIN:
+ case HEVCPROFILE_MAIN10:
+ case HEVCPROFILE_MAIN_STILL_PICTURE:
+ return kCodecHEVC;
+ case VP8PROFILE_ANY:
+ return kCodecVP8;
+ case VP9PROFILE_PROFILE0:
+ case VP9PROFILE_PROFILE1:
+ case VP9PROFILE_PROFILE2:
+ case VP9PROFILE_PROFILE3:
+ return kCodecVP9;
+ case DOLBYVISION_PROFILE0:
+ case DOLBYVISION_PROFILE4:
+ case DOLBYVISION_PROFILE5:
+ case DOLBYVISION_PROFILE7:
+ case DOLBYVISION_PROFILE8:
+ case DOLBYVISION_PROFILE9:
+ return kCodecDolbyVision;
+ case THEORAPROFILE_ANY:
+ return kCodecTheora;
+ case AV1PROFILE_PROFILE_MAIN:
+ case AV1PROFILE_PROFILE_HIGH:
+ case AV1PROFILE_PROFILE_PRO:
+ return kCodecAV1;
+ }
+}
} // namespace media
diff --git a/chromium/media/base/video_codecs.h b/chromium/media/base/video_codecs.h
index a28472dcef5..0e7dce197d8 100644
--- a/chromium/media/base/video_codecs.h
+++ b/chromium/media/base/video_codecs.h
@@ -107,6 +107,8 @@ struct CodecProfileLevel {
std::string MEDIA_EXPORT GetCodecName(VideoCodec codec);
std::string MEDIA_EXPORT GetProfileName(VideoCodecProfile profile);
+std::string MEDIA_EXPORT BuildH264MimeSuffix(VideoCodecProfile profile,
+ uint8_t level);
// ParseNewStyleVp9CodecID handles parsing of new style vp9 codec IDs per
// proposed VP Codec ISO Media File Format Binding specification:
@@ -154,6 +156,9 @@ MEDIA_EXPORT void ParseCodec(const std::string& codec_id,
VideoColorSpace& color_space);
MEDIA_EXPORT VideoCodec StringToVideoCodec(const std::string& codec_id);
+MEDIA_EXPORT VideoCodec
+VideoCodecProfileToVideoCodec(VideoCodecProfile profile);
+
#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
// Translate legacy avc1 codec ids (like avc1.66.30 or avc1.77.31) into a new
// style standard avc1 codec ids like avc1.4D002F. If the input codec is not
diff --git a/chromium/media/base/video_decoder_config.cc b/chromium/media/base/video_decoder_config.cc
index f59b7394a03..85cc6a64396 100644
--- a/chromium/media/base/video_decoder_config.cc
+++ b/chromium/media/base/video_decoder_config.cc
@@ -24,51 +24,6 @@ static bool IsValidSize(const gfx::Size& size) {
size.height() <= limits::kMaxDimension;
}
-VideoCodec VideoCodecProfileToVideoCodec(VideoCodecProfile profile) {
- switch (profile) {
- case VIDEO_CODEC_PROFILE_UNKNOWN:
- return kUnknownVideoCodec;
- case H264PROFILE_BASELINE:
- case H264PROFILE_MAIN:
- case H264PROFILE_EXTENDED:
- case H264PROFILE_HIGH:
- case H264PROFILE_HIGH10PROFILE:
- case H264PROFILE_HIGH422PROFILE:
- case H264PROFILE_HIGH444PREDICTIVEPROFILE:
- case H264PROFILE_SCALABLEBASELINE:
- case H264PROFILE_SCALABLEHIGH:
- case H264PROFILE_STEREOHIGH:
- case H264PROFILE_MULTIVIEWHIGH:
- return kCodecH264;
- case HEVCPROFILE_MAIN:
- case HEVCPROFILE_MAIN10:
- case HEVCPROFILE_MAIN_STILL_PICTURE:
- return kCodecHEVC;
- case VP8PROFILE_ANY:
- return kCodecVP8;
- case VP9PROFILE_PROFILE0:
- case VP9PROFILE_PROFILE1:
- case VP9PROFILE_PROFILE2:
- case VP9PROFILE_PROFILE3:
- return kCodecVP9;
- case DOLBYVISION_PROFILE0:
- case DOLBYVISION_PROFILE4:
- case DOLBYVISION_PROFILE5:
- case DOLBYVISION_PROFILE7:
- case DOLBYVISION_PROFILE8:
- case DOLBYVISION_PROFILE9:
- return kCodecDolbyVision;
- case THEORAPROFILE_ANY:
- return kCodecTheora;
- case AV1PROFILE_PROFILE_MAIN:
- case AV1PROFILE_PROFILE_HIGH:
- case AV1PROFILE_PROFILE_PRO:
- return kCodecAV1;
- }
- NOTREACHED();
- return kUnknownVideoCodec;
-}
-
VideoDecoderConfig::VideoDecoderConfig() = default;
VideoDecoderConfig::VideoDecoderConfig(VideoCodec codec,
diff --git a/chromium/media/base/video_decoder_config.h b/chromium/media/base/video_decoder_config.h
index 01ad01d48ca..5d04d524bad 100644
--- a/chromium/media/base/video_decoder_config.h
+++ b/chromium/media/base/video_decoder_config.h
@@ -20,13 +20,10 @@
#include "media/base/video_types.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
-MEDIA_EXPORT VideoCodec
-VideoCodecProfileToVideoCodec(VideoCodecProfile profile);
-
// Describes the content of a video stream, as described by the media container
// (or otherwise determined by the demuxer).
class MEDIA_EXPORT VideoDecoderConfig {
@@ -80,6 +77,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
VideoCodec codec() const { return codec_; }
VideoCodecProfile profile() const { return profile_; }
+ void set_profile(VideoCodecProfile profile) { profile_ = profile; }
AlphaMode alpha_mode() const { return alpha_mode_; }
// Difference between encoded and display orientation.
@@ -154,10 +152,10 @@ class MEDIA_EXPORT VideoDecoderConfig {
const VideoColorSpace& color_space_info() const { return color_space_info_; }
// Dynamic range of the image data.
- void set_hdr_metadata(const gl::HDRMetadata& hdr_metadata) {
+ void set_hdr_metadata(const gfx::HDRMetadata& hdr_metadata) {
hdr_metadata_ = hdr_metadata;
}
- const base::Optional<gl::HDRMetadata>& hdr_metadata() const {
+ const base::Optional<gfx::HDRMetadata>& hdr_metadata() const {
return hdr_metadata_;
}
@@ -192,7 +190,7 @@ class MEDIA_EXPORT VideoDecoderConfig {
EncryptionScheme encryption_scheme_ = EncryptionScheme::kUnencrypted;
VideoColorSpace color_space_info_;
- base::Optional<gl::HDRMetadata> hdr_metadata_;
+ base::Optional<gfx::HDRMetadata> hdr_metadata_;
// Not using DISALLOW_COPY_AND_ASSIGN here intentionally to allow the compiler
// generated copy constructor and assignment operator. Since the extra data is
diff --git a/chromium/media/base/video_decoder_config_unittest.cc b/chromium/media/base/video_decoder_config_unittest.cc
index 6c083a0da02..20929bc4036 100644
--- a/chromium/media/base/video_decoder_config_unittest.cc
+++ b/chromium/media/base/video_decoder_config_unittest.cc
@@ -91,4 +91,13 @@ TEST(VideoDecoderConfigTest, Invalid_AspectRatioDenominatorVeryLarge) {
EXPECT_FALSE(config.IsValidConfig());
}
+TEST(VideoDecoderConfigTest, SetProfile) {
+ VideoDecoderConfig config(
+ kCodecVP9, VP9PROFILE_PROFILE0, VideoDecoderConfig::AlphaMode::kIsOpaque,
+ VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
+ kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
+ config.set_profile(VP9PROFILE_PROFILE2);
+ EXPECT_EQ(config.profile(), VP9PROFILE_PROFILE2);
+}
+
} // namespace media
diff --git a/chromium/media/base/video_encoder.h b/chromium/media/base/video_encoder.h
index 9b1badf5835..1513367c15c 100644
--- a/chromium/media/base/video_encoder.h
+++ b/chromium/media/base/video_encoder.h
@@ -39,10 +39,9 @@ class MEDIA_EXPORT VideoEncoder {
Options(const Options&);
~Options();
base::Optional<uint64_t> bitrate;
- double framerate = 30.0;
+ base::Optional<double> framerate;
- int width = 0;
- int height = 0;
+ gfx::Size frame_size;
base::Optional<int> keyframe_interval = 10000;
};
@@ -92,14 +91,16 @@ class MEDIA_EXPORT VideoEncoder {
bool key_frame,
StatusCB done_cb) = 0;
- // Adjust encoder options for future frames, executing the
- // |done_cb| upon completion.
+ // Adjust encoder options and the output callback for future frames, executing
+ // the |done_cb| upon completion.
//
// Note:
// 1. Not all options can be changed on the fly.
// 2. ChangeOptions() should be called after calling Flush() and waiting
// for it to finish.
- virtual void ChangeOptions(const Options& options, StatusCB done_cb) = 0;
+ virtual void ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) = 0;
// Requests all outputs for already encoded frames to be
// produced via |output_cb| and calls |dene_cb| after that.
diff --git a/chromium/media/base/video_frame.cc b/chromium/media/base/video_frame.cc
index e889678706f..f3833229a48 100644
--- a/chromium/media/base/video_frame.cc
+++ b/chromium/media/base/video_frame.cc
@@ -606,6 +606,11 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalGpuMemoryBuffer(
return nullptr;
}
+ const size_t num_planes =
+ NumberOfPlanesForLinearBufferFormat(gpu_memory_buffer->GetFormat());
+ std::vector<ColorPlaneLayout> planes(num_planes);
+ for (size_t i = 0; i < num_planes; ++i)
+ planes[i].stride = gpu_memory_buffer->stride(i);
uint64_t modifier = gfx::NativePixmapHandle::kNoModifier;
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
if (gpu_memory_buffer->GetType() == gfx::NATIVE_PIXMAP) {
@@ -615,17 +620,24 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalGpuMemoryBuffer(
DLOG(ERROR) << "Failed to clone the GpuMemoryBufferHandle";
return nullptr;
}
+ if (gmb_handle.native_pixmap_handle.planes.size() != num_planes) {
+ DLOG(ERROR) << "Invalid number of planes="
+ << gmb_handle.native_pixmap_handle.planes.size()
+ << ", expected num_planes=" << num_planes;
+ return nullptr;
+ }
+ for (size_t i = 0; i < num_planes; ++i) {
+ const auto& plane = gmb_handle.native_pixmap_handle.planes[i];
+ planes[i].stride = plane.stride;
+ planes[i].offset = plane.offset;
+ planes[i].size = plane.size;
+ }
modifier = gmb_handle.native_pixmap_handle.modifier;
}
#endif
- const size_t num_planes =
- NumberOfPlanesForLinearBufferFormat(gpu_memory_buffer->GetFormat());
- std::vector<int32_t> strides;
- for (size_t i = 0; i < num_planes; ++i)
- strides.push_back(gpu_memory_buffer->stride(i));
- const auto layout = VideoFrameLayout::CreateWithStrides(
- *format, coded_size, std::move(strides),
+ const auto layout = VideoFrameLayout::CreateWithPlanes(
+ *format, coded_size, std::move(planes),
VideoFrameLayout::kBufferAddressAlignment, modifier);
if (!layout) {
DLOG(ERROR) << __func__ << " Invalid layout";
@@ -689,7 +701,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs(
#if defined(OS_MAC)
// static
-scoped_refptr<VideoFrame> VideoFrame::WrapIOSurface(
+scoped_refptr<VideoFrame> VideoFrame::WrapUnacceleratedIOSurface(
gfx::GpuMemoryBufferHandle handle,
const gfx::Rect& visible_rect,
base::TimeDelta timestamp) {
@@ -697,8 +709,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapIOSurface(
DLOG(ERROR) << "Non-IOSurface handle.";
return nullptr;
}
- base::ScopedCFTypeRef<IOSurfaceRef> io_surface =
- gfx::IOSurfaceMachPortToIOSurface(std::move(handle.mach_port));
+ gfx::ScopedIOSurface io_surface = handle.io_surface;
if (!io_surface) {
return nullptr;
}
@@ -1102,12 +1113,9 @@ bool VideoFrame::IsMappable() const {
bool VideoFrame::HasTextures() const {
// A SharedImage can be turned into a texture, and so it counts as a texture
// in the context of this call.
- if (mailbox_holders_[0].mailbox.IsSharedImage())
- return true;
-
- DCHECK(!wrapped_frame_ || !wrapped_frame_->HasTextures());
return wrapped_frame_ ? wrapped_frame_->HasTextures()
- : !mailbox_holders_[0].mailbox.IsZero();
+ : (mailbox_holders_[0].mailbox.IsSharedImage() ||
+ !mailbox_holders_[0].mailbox.IsZero());
}
size_t VideoFrame::NumTextures() const {
@@ -1299,12 +1307,6 @@ VideoFrame::~VideoFrame() {
std::move(mailbox_holders_release_cb_).Run(release_sync_token);
}
- // Someone might be monitoring original wrapped frame for feedback.
- // Ensure all accumulated feedback is propagated to the original frame.
- if (wrapped_frame_) {
- wrapped_frame_->feedback()->Combine(feedback_);
- }
-
for (auto& callback : done_callbacks_)
std::move(callback).Run();
}
diff --git a/chromium/media/base/video_frame.h b/chromium/media/base/video_frame.h
index e31d70dfe0f..4185e3ce506 100644
--- a/chromium/media/base/video_frame.h
+++ b/chromium/media/base/video_frame.h
@@ -27,14 +27,13 @@
#include "build/build_config.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "gpu/ipc/common/vulkan_ycbcr_info.h"
-#include "media/base/video_frame_feedback.h"
#include "media/base/video_frame_layout.h"
#include "media/base/video_frame_metadata.h"
#include "media/base/video_types.h"
#include "ui/gfx/color_space.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
#if defined(OS_MAC)
#include <CoreVideo/CVPixelBuffer.h>
@@ -293,8 +292,11 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::TimeDelta timestamp);
// Wraps a provided IOSurface with a VideoFrame. The IOSurface is retained
- // and locked for the lifetime of the VideoFrame.
- static scoped_refptr<VideoFrame> WrapIOSurface(
+ // and locked for the lifetime of the VideoFrame. This is for unaccelerated
+ // (CPU-only) access to the IOSurface, and is not efficient. It is the path
+ // that video capture uses when hardware acceleration is disabled.
+ // https://crbug.com/1125879
+ static scoped_refptr<VideoFrame> WrapUnacceleratedIOSurface(
gfx::GpuMemoryBufferHandle handle,
const gfx::Rect& visible_rect,
base::TimeDelta timestamp);
@@ -440,11 +442,11 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
color_space_ = color_space;
}
- const base::Optional<gl::HDRMetadata>& hdr_metadata() const {
+ const base::Optional<gfx::HDRMetadata>& hdr_metadata() const {
return hdr_metadata_;
}
- void set_hdr_metadata(const base::Optional<gl::HDRMetadata>& hdr_metadata) {
+ void set_hdr_metadata(const base::Optional<gfx::HDRMetadata>& hdr_metadata) {
hdr_metadata_ = hdr_metadata;
}
@@ -568,9 +570,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// Resets |metadata_|.
void clear_metadata() { set_metadata(VideoFrameMetadata()); }
- const VideoFrameFeedback* feedback() const { return &feedback_; }
- VideoFrameFeedback* feedback() { return &feedback_; }
-
// The time span between the current frame and the first frame of the stream.
// This is the media timestamp, and not the reference time.
// See VideoFrameMetadata::REFERENCE_TIME for details.
@@ -724,13 +723,11 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
VideoFrameMetadata metadata_;
- VideoFrameFeedback feedback_;
-
// Generated at construction time.
const int unique_id_;
gfx::ColorSpace color_space_;
- base::Optional<gl::HDRMetadata> hdr_metadata_;
+ base::Optional<gfx::HDRMetadata> hdr_metadata_;
// Sampler conversion information which is used in vulkan context for android.
base::Optional<gpu::VulkanYCbCrInfo> ycbcr_info_;
diff --git a/chromium/media/base/video_frame_feedback.h b/chromium/media/base/video_frame_feedback.h
index efb89f5034a..3e64b3fded9 100644
--- a/chromium/media/base/video_frame_feedback.h
+++ b/chromium/media/base/video_frame_feedback.h
@@ -7,10 +7,16 @@
#include <limits>
+#include "base/callback.h"
#include "media/base/media_export.h"
namespace media {
+struct VideoFrameFeedback;
+
+using VideoCaptureFeedbackCB =
+ base::RepeatingCallback<void(const VideoFrameFeedback&)>;
+
// Feedback from the frames consumer.
// This class is passed from the frames sink to the capturer to limit
// incoming video feed frame-rate and/or resolution.
diff --git a/chromium/media/base/video_frame_metadata.cc b/chromium/media/base/video_frame_metadata.cc
index 584bdf6bc43..6cd1c5dcfb0 100644
--- a/chromium/media/base/video_frame_metadata.cc
+++ b/chromium/media/base/video_frame_metadata.cc
@@ -56,6 +56,7 @@ void VideoFrameMetadata::MergeMetadataFrom(
MERGE_FIELD(rtp_timestamp, metadata_source);
MERGE_FIELD(receive_time, metadata_source);
MERGE_FIELD(wallclock_frame_duration, metadata_source);
+ MERGE_FIELD(maximum_composition_delay_in_frames, metadata_source);
}
} // namespace media
diff --git a/chromium/media/base/video_frame_metadata.h b/chromium/media/base/video_frame_metadata.h
index eed8b133a9b..ded4b13adf6 100644
--- a/chromium/media/base/video_frame_metadata.h
+++ b/chromium/media/base/video_frame_metadata.h
@@ -178,6 +178,13 @@ struct MEDIA_EXPORT VideoFrameMetadata {
// expected to spend on the screen during playback. Unlike FRAME_DURATION
// this field takes into account current playback rate.
base::Optional<base::TimeDelta> wallclock_frame_duration;
+
+ // WebRTC streams only: if present, this field represents the maximum
+ // composition delay that is allowed for this frame. This is respected
+ // in a best effort manner.
+ // This is an experimental feature, see crbug.com/1138888 for more
+ // information.
+ base::Optional<int> maximum_composition_delay_in_frames;
};
} // namespace media
diff --git a/chromium/media/base/video_frame_unittest.cc b/chromium/media/base/video_frame_unittest.cc
index da2f4a9120f..30f2bbc64e0 100644
--- a/chromium/media/base/video_frame_unittest.cc
+++ b/chromium/media/base/video_frame_unittest.cc
@@ -9,7 +9,6 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/format_macros.h"
#include "base/memory/aligned_memory.h"
diff --git a/chromium/media/base/video_thumbnail_decoder.cc b/chromium/media/base/video_thumbnail_decoder.cc
index e1ba856efe0..ab3033cc166 100644
--- a/chromium/media/base/video_thumbnail_decoder.cc
+++ b/chromium/media/base/video_thumbnail_decoder.cc
@@ -5,7 +5,7 @@
#include "media/base/video_thumbnail_decoder.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_frame.h"
diff --git a/chromium/media/base/video_thumbnail_decoder_unittest.cc b/chromium/media/base/video_thumbnail_decoder_unittest.cc
index 93e343345b9..10aa6e34f6b 100644
--- a/chromium/media/base/video_thumbnail_decoder_unittest.cc
+++ b/chromium/media/base/video_thumbnail_decoder_unittest.cc
@@ -6,7 +6,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/base/win/BUILD.gn b/chromium/media/base/win/BUILD.gn
index b3b33bd42d2..b4a7cb764a1 100644
--- a/chromium/media/base/win/BUILD.gn
+++ b/chromium/media/base/win/BUILD.gn
@@ -2,13 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-# This file depends on the legacy global sources assignment filter. It should
-# be converted to check target platform before assigning source files to the
-# sources variable. Remove this import and set_sources_assignment_filter call
-# when the file has been converted. See https://crbug.com/1018739 for details.
-import("//build/config/deprecated_default_sources_assignment_filter.gni")
-set_sources_assignment_filter(deprecated_default_sources_assignment_filter)
-
assert(is_win)
config("delay_load_mf") {
@@ -21,7 +14,6 @@ config("delay_load_mf") {
component("media_foundation_util") {
defines = [ "MF_INITIALIZER_IMPLEMENTATION" ]
- set_sources_assignment_filter([])
sources = [
"mf_helpers.cc",
"mf_helpers.h",
@@ -29,7 +21,6 @@ component("media_foundation_util") {
"mf_initializer.h",
"mf_initializer_export.h",
]
- set_sources_assignment_filter(deprecated_default_sources_assignment_filter)
configs += [
# TODO(crbug.com/167187): Fix size_t to int truncations.
"//build/config/compiler:no_size_t_to_int_warning",
diff --git a/chromium/media/base/win/d3d11_mocks.cc b/chromium/media/base/win/d3d11_mocks.cc
index 8d41b19a940..41853a4fa24 100644
--- a/chromium/media/base/win/d3d11_mocks.cc
+++ b/chromium/media/base/win/d3d11_mocks.cc
@@ -50,4 +50,7 @@ D3D11VideoProcessorEnumeratorMock::~D3D11VideoProcessorEnumeratorMock() =
D3D11DeviceContextMock::D3D11DeviceContextMock() = default;
D3D11DeviceContextMock::~D3D11DeviceContextMock() = default;
+D3D11FenceMock::D3D11FenceMock() = default;
+D3D11FenceMock::~D3D11FenceMock() = default;
+
} // namespace media \ No newline at end of file
diff --git a/chromium/media/base/win/d3d11_mocks.h b/chromium/media/base/win/d3d11_mocks.h
index 71314f08199..d2ad6b8b426 100644
--- a/chromium/media/base/win/d3d11_mocks.h
+++ b/chromium/media/base/win/d3d11_mocks.h
@@ -5,8 +5,7 @@
#ifndef MEDIA_BASE_WIN_D3D11_MOCKS_H_
#define MEDIA_BASE_WIN_D3D11_MOCKS_H_
-#include <d3d11.h>
-#include <d3d11_1.h>
+#include <d3d11_4.h>
#include <dxgi1_4.h>
#include <dxgi1_6.h>
#include <wrl/client.h>
@@ -58,7 +57,7 @@ class D3D11BufferMock
class D3D11DeviceMock
: public Microsoft::WRL::RuntimeClass<
Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::ClassicCom>,
- ID3D11Device> {
+ ID3D11Device5> {
public:
D3D11DeviceMock();
~D3D11DeviceMock() override;
@@ -211,6 +210,108 @@ class D3D11DeviceMock
MOCK_STDCALL_METHOD1(SetExceptionMode, HRESULT(UINT));
MOCK_STDCALL_METHOD0(GetExceptionMode, UINT());
+
+ // ID3D11Device1
+ MOCK_STDCALL_METHOD1(GetImmediateContext1, void(ID3D11DeviceContext1**));
+
+ MOCK_STDCALL_METHOD2(CreateDeferredContext1,
+ HRESULT(UINT, ID3D11DeviceContext1**));
+
+ MOCK_STDCALL_METHOD2(CreateBlendState1,
+ HRESULT(const D3D11_BLEND_DESC1*, ID3D11BlendState1**));
+
+ MOCK_STDCALL_METHOD2(CreateRasterizerState1,
+ HRESULT(const D3D11_RASTERIZER_DESC1*,
+ ID3D11RasterizerState1**));
+
+ MOCK_STDCALL_METHOD7(CreateDeviceContextState,
+ HRESULT(UINT,
+ const D3D_FEATURE_LEVEL*,
+ UINT,
+ UINT,
+ REFIID,
+ D3D_FEATURE_LEVEL*,
+ ID3DDeviceContextState**));
+
+ MOCK_STDCALL_METHOD3(OpenSharedResource1, HRESULT(HANDLE, REFIID, void**));
+
+ MOCK_STDCALL_METHOD4(OpenSharedResourceByName,
+ HRESULT(LPCWSTR, DWORD, REFIID, void**));
+
+ // ID3D11Device2
+ MOCK_STDCALL_METHOD4(CheckMultisampleQualityLevels1,
+ HRESULT(DXGI_FORMAT, UINT, UINT, UINT*));
+
+ MOCK_STDCALL_METHOD2(CreateDeferredContext2,
+ HRESULT(UINT, ID3D11DeviceContext2**));
+
+ MOCK_STDCALL_METHOD1(GetImmediateContext2, void(ID3D11DeviceContext2**));
+
+ MOCK_STDCALL_METHOD7(GetResourceTiling,
+ void(ID3D11Resource*,
+ UINT*,
+ D3D11_PACKED_MIP_DESC*,
+ D3D11_TILE_SHAPE*,
+ UINT*,
+ UINT,
+ D3D11_SUBRESOURCE_TILING*));
+
+ // ID3D11Device3
+ MOCK_STDCALL_METHOD3(CreateTexture2D1,
+ HRESULT(const D3D11_TEXTURE2D_DESC1*,
+ const D3D11_SUBRESOURCE_DATA*,
+ ID3D11Texture2D1**));
+
+ MOCK_STDCALL_METHOD3(CreateTexture3D1,
+ HRESULT(const D3D11_TEXTURE3D_DESC1*,
+ const D3D11_SUBRESOURCE_DATA*,
+ ID3D11Texture3D1**));
+
+ MOCK_STDCALL_METHOD2(CreateRasterizerState2,
+ HRESULT(const D3D11_RASTERIZER_DESC2*,
+ ID3D11RasterizerState2**));
+
+ MOCK_STDCALL_METHOD3(CreateShaderResourceView1,
+ HRESULT(ID3D11Resource*,
+ const D3D11_SHADER_RESOURCE_VIEW_DESC1*,
+ ID3D11ShaderResourceView1**));
+
+ MOCK_STDCALL_METHOD3(CreateUnorderedAccessView1,
+ HRESULT(ID3D11Resource*,
+ const D3D11_UNORDERED_ACCESS_VIEW_DESC1*,
+ ID3D11UnorderedAccessView1**));
+
+ MOCK_STDCALL_METHOD3(CreateRenderTargetView1,
+ HRESULT(ID3D11Resource*,
+ const D3D11_RENDER_TARGET_VIEW_DESC1*,
+ ID3D11RenderTargetView1**));
+
+ MOCK_STDCALL_METHOD2(CreateQuery1,
+ HRESULT(const D3D11_QUERY_DESC1*, ID3D11Query1**));
+
+ MOCK_STDCALL_METHOD1(GetImmediateContext3, void(ID3D11DeviceContext3**));
+
+ MOCK_STDCALL_METHOD2(CreateDeferredContext3,
+ HRESULT(UINT, ID3D11DeviceContext3**));
+
+ MOCK_STDCALL_METHOD6(
+ WriteToSubresource,
+ void(ID3D11Resource*, UINT, const D3D11_BOX*, const void*, UINT, UINT));
+
+ MOCK_STDCALL_METHOD6(
+ ReadFromSubresource,
+ void(void*, UINT, UINT, ID3D11Resource*, UINT, const D3D11_BOX*));
+
+ // ID3D11Device4
+ MOCK_STDCALL_METHOD2(RegisterDeviceRemovedEvent, HRESULT(HANDLE, DWORD*));
+
+ MOCK_STDCALL_METHOD1(UnregisterDeviceRemoved, void(DWORD));
+
+ // ID3D11Device5
+ MOCK_STDCALL_METHOD3(OpenSharedFence, HRESULT(HANDLE, REFIID, void**));
+
+ MOCK_STDCALL_METHOD4(CreateFence,
+ HRESULT(UINT64, D3D11_FENCE_FLAG, REFIID, void**));
};
class DXGIFactoryMock
@@ -773,7 +874,7 @@ class D3D11VideoDecoderMock
class D3D11DeviceContextMock
: public Microsoft::WRL::RuntimeClass<
Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::ClassicCom>,
- ID3D11DeviceContext> {
+ ID3D11DeviceContext4> {
public:
D3D11DeviceContextMock();
~D3D11DeviceContextMock() override;
@@ -1299,6 +1400,167 @@ class D3D11DeviceContextMock
MOCK_STDCALL_METHOD2(FinishCommandList,
HRESULT(BOOL RestoreDeferredContextState,
ID3D11CommandList** ppCommandList));
+
+ // ID3D11DeviceContext1
+ MOCK_STDCALL_METHOD9(CopySubresourceRegion1,
+ void(ID3D11Resource*,
+ UINT,
+ UINT,
+ UINT,
+ UINT,
+ ID3D11Resource*,
+ UINT,
+ const D3D11_BOX*,
+ UINT));
+
+ MOCK_STDCALL_METHOD7(UpdateSubresource1,
+ void(ID3D11Resource*,
+ UINT,
+ const D3D11_BOX*,
+ const void*,
+ UINT,
+ UINT,
+ UINT));
+
+ MOCK_STDCALL_METHOD1(DiscardResource, void(ID3D11Resource*));
+
+ MOCK_STDCALL_METHOD1(DiscardView, void(ID3D11View*));
+
+ MOCK_STDCALL_METHOD5(
+ VSSetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer* const*, const UINT*, const UINT*));
+
+ MOCK_STDCALL_METHOD5(
+ HSSetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer* const*, const UINT*, const UINT*));
+
+ MOCK_STDCALL_METHOD5(
+ DSSetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer* const*, const UINT*, const UINT*));
+
+ MOCK_STDCALL_METHOD5(
+ GSSetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer* const*, const UINT*, const UINT*));
+
+ MOCK_STDCALL_METHOD5(
+ PSSetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer* const*, const UINT*, const UINT*));
+
+ MOCK_STDCALL_METHOD5(
+ CSSetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer* const*, const UINT*, const UINT*));
+
+ MOCK_STDCALL_METHOD5(VSGetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer**, UINT*, UINT*));
+
+ MOCK_STDCALL_METHOD5(HSGetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer**, UINT*, UINT*));
+
+ MOCK_STDCALL_METHOD5(DSGetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer**, UINT*, UINT*));
+
+ MOCK_STDCALL_METHOD5(GSGetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer**, UINT*, UINT*));
+
+ MOCK_STDCALL_METHOD5(PSGetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer**, UINT*, UINT*));
+
+ MOCK_STDCALL_METHOD5(CSGetConstantBuffers1,
+ void(UINT, UINT, ID3D11Buffer**, UINT*, UINT*));
+
+ MOCK_STDCALL_METHOD2(SwapDeviceContextState,
+ void(ID3DDeviceContextState*, ID3DDeviceContextState**));
+
+ MOCK_STDCALL_METHOD4(
+ ClearView,
+ void(ID3D11View*, const FLOAT[4], const D3D11_RECT*, UINT));
+
+ MOCK_STDCALL_METHOD3(DiscardView1,
+ void(ID3D11View*, const D3D11_RECT*, UINT));
+
+ // ID3D11DeviceContext2
+ MOCK_STDCALL_METHOD10(UpdateTileMappings,
+ HRESULT(ID3D11Resource*,
+ UINT,
+ const D3D11_TILED_RESOURCE_COORDINATE*,
+ const D3D11_TILE_REGION_SIZE*,
+ ID3D11Buffer*,
+ UINT,
+ const UINT*,
+ const UINT*,
+ const UINT*,
+ UINT));
+
+ MOCK_STDCALL_METHOD6(CopyTileMappings,
+ HRESULT(ID3D11Resource*,
+ const D3D11_TILED_RESOURCE_COORDINATE*,
+ ID3D11Resource*,
+ const D3D11_TILED_RESOURCE_COORDINATE*,
+ const D3D11_TILE_REGION_SIZE*,
+ UINT));
+
+ MOCK_STDCALL_METHOD6(CopyTiles,
+ void(ID3D11Resource*,
+ const D3D11_TILED_RESOURCE_COORDINATE*,
+ const D3D11_TILE_REGION_SIZE*,
+ ID3D11Buffer*,
+ UINT64,
+ UINT));
+
+ MOCK_STDCALL_METHOD5(UpdateTiles,
+ void(ID3D11Resource*,
+ const D3D11_TILED_RESOURCE_COORDINATE*,
+ const D3D11_TILE_REGION_SIZE*,
+ const void*,
+ UINT));
+
+ MOCK_STDCALL_METHOD2(ResizeTilePool, HRESULT(ID3D11Buffer*, UINT64));
+
+ MOCK_STDCALL_METHOD2(TiledResourceBarrier,
+ void(ID3D11DeviceChild*, ID3D11DeviceChild*));
+
+ MOCK_STDCALL_METHOD0(IsAnnotationEnabled, BOOL());
+
+ MOCK_STDCALL_METHOD2(SetMarkerInt, void(LPCWSTR, INT));
+
+ MOCK_STDCALL_METHOD2(BeginEventInt, void(LPCWSTR, INT));
+
+ MOCK_STDCALL_METHOD0(EndEvent, void());
+
+ // ID3D11DeviceContext3
+ MOCK_STDCALL_METHOD2(Flush1, void(D3D11_CONTEXT_TYPE, HANDLE));
+
+ MOCK_STDCALL_METHOD1(SetHardwareProtectionState, void(BOOL));
+
+ MOCK_STDCALL_METHOD1(GetHardwareProtectionState, void(BOOL*));
+
+ // ID3D11DeviceContext4
+ MOCK_STDCALL_METHOD2(Signal, HRESULT(ID3D11Fence*, UINT64));
+
+ MOCK_STDCALL_METHOD2(Wait, HRESULT(ID3D11Fence*, UINT64));
+};
+
+class D3D11FenceMock
+ : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::ClassicCom>,
+ ID3D11Fence> {
+ public:
+ D3D11FenceMock();
+ ~D3D11FenceMock() override;
+
+ MOCK_STDCALL_METHOD1(GetDevice, void(ID3D11Device**));
+ MOCK_STDCALL_METHOD3(GetPrivateData, HRESULT(const GUID&, UINT*, void*));
+ MOCK_STDCALL_METHOD3(SetPrivateData, HRESULT(const GUID&, UINT, const void*));
+ MOCK_STDCALL_METHOD2(SetPrivateDataInterface,
+ HRESULT(const GUID&, const IUnknown*));
+
+ MOCK_STDCALL_METHOD4(
+ CreateSharedHandle,
+ HRESULT(const SECURITY_ATTRIBUTES*, DWORD, LPCWSTR, HANDLE*));
+
+ MOCK_STDCALL_METHOD0(GetCompletedValue, UINT64());
+
+ MOCK_STDCALL_METHOD2(SetEventOnCompletion, HRESULT(UINT64, HANDLE));
};
} // namespace media
diff --git a/chromium/media/base/win/test_utils.h b/chromium/media/base/win/test_utils.h
index 9af55425019..8c7243af34a 100644
--- a/chromium/media/base/win/test_utils.h
+++ b/chromium/media/base/win/test_utils.h
@@ -43,6 +43,9 @@
#define MOCK_STDCALL_METHOD9(Name, Types) \
MOCK_METHOD9_WITH_CALLTYPE(STDMETHODCALLTYPE, Name, Types)
+#define MOCK_STDCALL_METHOD10(Name, Types) \
+ MOCK_METHOD10_WITH_CALLTYPE(STDMETHODCALLTYPE, Name, Types)
+
// Helper ON_CALL and EXPECT_CALL for Microsoft::WRL::ComPtr, e.g.
// COM_EXPECT_CALL(foo_, Bar());
// where |foo_| is ComPtr<D3D11FooMock>.
diff --git a/chromium/media/blink/BUILD.gn b/chromium/media/blink/BUILD.gn
index fa583550b88..5e5ede151da 100644
--- a/chromium/media/blink/BUILD.gn
+++ b/chromium/media/blink/BUILD.gn
@@ -51,10 +51,6 @@ component("blink") {
"video_decode_stats_reporter.h",
"video_frame_compositor.cc",
"video_frame_compositor.h",
- "watch_time_component.cc",
- "watch_time_component.h",
- "watch_time_reporter.cc",
- "watch_time_reporter.h",
"webcontentdecryptionmodule_impl.cc",
"webcontentdecryptionmodule_impl.h",
"webcontentdecryptionmoduleaccess_impl.cc",
@@ -161,7 +157,6 @@ test("media_blink_unittests") {
"url_index_unittest.cc",
"video_decode_stats_reporter_unittest.cc",
"video_frame_compositor_unittest.cc",
- "watch_time_component_unittest.cc",
"watch_time_reporter_unittest.cc",
"webmediaplayer_impl_unittest.cc",
]
diff --git a/chromium/media/blink/DEPS b/chromium/media/blink/DEPS
index 87f8a9e3926..35711a7450e 100644
--- a/chromium/media/blink/DEPS
+++ b/chromium/media/blink/DEPS
@@ -18,6 +18,7 @@ include_rules = [
"+services/network/public/cpp",
"+services/network/public/mojom",
"+services/service_manager/public/cpp",
+ "+third_party/blink/public/common",
"+third_party/blink/public/mojom",
"+third_party/blink/public/platform",
"+third_party/blink/public/strings/grit/blink_strings.h",
diff --git a/chromium/media/blink/multibuffer_data_source_unittest.cc b/chromium/media/blink/multibuffer_data_source_unittest.cc
index c5f644f59c8..45d0fbc481e 100644
--- a/chromium/media/blink/multibuffer_data_source_unittest.cc
+++ b/chromium/media/blink/multibuffer_data_source_unittest.cc
@@ -9,7 +9,6 @@
#include "base/run_loop.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
-#include "base/test/scoped_feature_list.h"
#include "media/base/media_switches.h"
#include "media/base/media_util.h"
#include "media/base/mock_filters.h"
diff --git a/chromium/media/blink/multibuffer_unittest.cc b/chromium/media/blink/multibuffer_unittest.cc
index 8e3c095d34d..29d81549b42 100644
--- a/chromium/media/blink/multibuffer_unittest.cc
+++ b/chromium/media/blink/multibuffer_unittest.cc
@@ -9,7 +9,6 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
#include "base/logging.h"
diff --git a/chromium/media/blink/resource_multibuffer_data_provider.cc b/chromium/media/blink/resource_multibuffer_data_provider.cc
index 17f9a748510..4d252387b5e 100644
--- a/chromium/media/blink/resource_multibuffer_data_provider.cc
+++ b/chromium/media/blink/resource_multibuffer_data_provider.cc
@@ -425,12 +425,6 @@ void ResourceMultiBufferDataProvider::DidDownloadData(uint64_t dataLength) {
NOTIMPLEMENTED();
}
-void ResourceMultiBufferDataProvider::DidReceiveCachedMetadata(
- const char* data,
- int data_length) {
- NOTIMPLEMENTED();
-}
-
void ResourceMultiBufferDataProvider::DidFinishLoading() {
DVLOG(1) << "didFinishLoading";
DCHECK(active_loader_.get());
diff --git a/chromium/media/blink/resource_multibuffer_data_provider.h b/chromium/media/blink/resource_multibuffer_data_provider.h
index 999b67546a5..7e9271f2c08 100644
--- a/chromium/media/blink/resource_multibuffer_data_provider.h
+++ b/chromium/media/blink/resource_multibuffer_data_provider.h
@@ -56,7 +56,6 @@ class MEDIA_BLINK_EXPORT ResourceMultiBufferDataProvider
void DidReceiveResponse(const blink::WebURLResponse& response) override;
void DidDownloadData(uint64_t data_length) override;
void DidReceiveData(const char* data, int data_length) override;
- void DidReceiveCachedMetadata(const char* data, int dataLength) override;
void DidFinishLoading() override;
void DidFail(const blink::WebURLError&) override;
diff --git a/chromium/media/blink/url_index_unittest.cc b/chromium/media/blink/url_index_unittest.cc
index 0e4992c808d..018a23a02d8 100644
--- a/chromium/media/blink/url_index_unittest.cc
+++ b/chromium/media/blink/url_index_unittest.cc
@@ -10,7 +10,6 @@
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/stringprintf.h"
-#include "base/test/scoped_feature_list.h"
#include "media/base/media_switches.h"
#include "media/blink/url_index.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/media/blink/video_frame_compositor.cc b/chromium/media/blink/video_frame_compositor.cc
index 700d193cb33..ffb1eedfc60 100644
--- a/chromium/media/blink/video_frame_compositor.cc
+++ b/chromium/media/blink/video_frame_compositor.cc
@@ -6,6 +6,7 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
+#include "base/synchronization/waitable_event.h"
#include "base/time/default_tick_clock.h"
#include "base/trace_event/trace_event.h"
#include "components/viz/common/frame_sinks/begin_frame_args.h"
@@ -57,9 +58,13 @@ VideoFrameCompositor::GetUpdateSubmissionStateCallback() {
return update_submission_state_callback_;
}
-void VideoFrameCompositor::SetIsSurfaceVisible(bool is_visible) {
+void VideoFrameCompositor::SetIsSurfaceVisible(
+ bool is_visible,
+ base::WaitableEvent* done_event) {
DCHECK(task_runner_->BelongsToCurrentThread());
submitter_->SetIsSurfaceVisible(is_visible);
+ if (done_event)
+ done_event->Signal();
}
void VideoFrameCompositor::InitializeSubmitter() {
diff --git a/chromium/media/blink/video_frame_compositor.h b/chromium/media/blink/video_frame_compositor.h
index e848f2825a0..c6c3e0976e0 100644
--- a/chromium/media/blink/video_frame_compositor.h
+++ b/chromium/media/blink/video_frame_compositor.h
@@ -25,6 +25,10 @@
#include "third_party/blink/public/platform/web_video_frame_submitter.h"
#include "ui/gfx/geometry/size.h"
+namespace base {
+class WaitableEvent;
+}
+
namespace viz {
class SurfaceId;
}
@@ -177,7 +181,7 @@ class MEDIA_BLINK_EXPORT VideoFrameCompositor : public VideoRendererSink,
// Signals the VideoFrameSubmitter to stop submitting frames. Sets whether the
// video surface is visible within the view port.
- void SetIsSurfaceVisible(bool is_visible);
+ void SetIsSurfaceVisible(bool is_visible, base::WaitableEvent* done_event);
// Indicates whether the endpoint for the VideoFrame exists.
bool IsClientSinkAvailable();
diff --git a/chromium/media/blink/video_frame_compositor_unittest.cc b/chromium/media/blink/video_frame_compositor_unittest.cc
index 2ef6497c88b..de0ba3c05b0 100644
--- a/chromium/media/blink/video_frame_compositor_unittest.cc
+++ b/chromium/media/blink/video_frame_compositor_unittest.cc
@@ -7,7 +7,6 @@
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
-#include "base/test/scoped_feature_list.h"
#include "base/test/simple_test_tick_clock.h"
#include "components/viz/common/frame_sinks/begin_frame_args.h"
#include "components/viz/common/surfaces/frame_sink_id.h"
@@ -49,38 +48,24 @@ class MockWebVideoFrameSubmitter : public blink::WebVideoFrameSubmitter {
};
class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
- public ::testing::TestWithParam<bool> {
+ public testing::Test {
public:
VideoFrameCompositorTest()
: client_(new StrictMock<MockWebVideoFrameSubmitter>()) {}
void SetUp() override {
- if (IsSurfaceLayerForVideoEnabled()) {
- feature_list_.InitFromCommandLine("UseSurfaceLayerForVideo", "");
-
- // When SurfaceLayerForVideo is enabled, |compositor_| owns the
- // |submitter_|. Otherwise, the |compositor_| treats the |submitter_| as
- // if it were a VideoFrameProviderClient in the VideoLayer code path,
- // holding only a bare pointer.
- }
submitter_ = client_.get();
- if (!IsSurfaceLayerForVideoEnabled()) {
- compositor_ = std::make_unique<VideoFrameCompositor>(
- base::ThreadTaskRunnerHandle::Get(), nullptr);
- compositor_->SetVideoFrameProviderClient(client_.get());
- } else {
- EXPECT_CALL(*submitter_, Initialize(_, _));
- compositor_ = std::make_unique<VideoFrameCompositor>(
- base::ThreadTaskRunnerHandle::Get(), std::move(client_));
- base::RunLoop().RunUntilIdle();
- EXPECT_CALL(*submitter_,
- SetRotation(Eq(media::VideoRotation::VIDEO_ROTATION_90)));
- EXPECT_CALL(*submitter_, SetForceSubmit(false));
- EXPECT_CALL(*submitter_, EnableSubmission(Eq(viz::SurfaceId())));
- compositor_->EnableSubmission(
- viz::SurfaceId(), media::VideoRotation::VIDEO_ROTATION_90, false);
- }
+ EXPECT_CALL(*submitter_, Initialize(_, _));
+ compositor_ = std::make_unique<VideoFrameCompositor>(
+ base::ThreadTaskRunnerHandle::Get(), std::move(client_));
+ base::RunLoop().RunUntilIdle();
+ EXPECT_CALL(*submitter_,
+ SetRotation(Eq(media::VideoRotation::VIDEO_ROTATION_90)));
+ EXPECT_CALL(*submitter_, SetForceSubmit(false));
+ EXPECT_CALL(*submitter_, EnableSubmission(Eq(viz::SurfaceId())));
+ compositor_->EnableSubmission(
+ viz::SurfaceId(), media::VideoRotation::VIDEO_ROTATION_90, false);
compositor_->set_tick_clock_for_testing(&tick_clock_);
// Disable background rendering by default.
@@ -109,8 +94,6 @@ class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
}
protected:
- bool IsSurfaceLayerForVideoEnabled() { return GetParam(); }
-
// VideoRendererSink::RenderCallback implementation.
MOCK_METHOD3(Render,
scoped_refptr<VideoFrame>(base::TimeTicks,
@@ -154,34 +137,59 @@ class VideoFrameCompositorTest : public VideoRendererSink::RenderCallback,
std::unique_ptr<VideoFrameCompositor> compositor_;
private:
- base::test::ScopedFeatureList feature_list_;
-
DISALLOW_COPY_AND_ASSIGN(VideoFrameCompositorTest);
};
-TEST_P(VideoFrameCompositorTest, InitialValues) {
+TEST_F(VideoFrameCompositorTest, InitialValues) {
EXPECT_FALSE(compositor()->GetCurrentFrame().get());
}
-TEST_P(VideoFrameCompositorTest, SetIsSurfaceVisible) {
- if (!IsSurfaceLayerForVideoEnabled())
- return;
-
+TEST_F(VideoFrameCompositorTest, SetIsSurfaceVisible) {
auto cb = compositor()->GetUpdateSubmissionStateCallback();
- EXPECT_CALL(*submitter_, SetIsSurfaceVisible(true));
- cb.Run(true);
- base::RunLoop().RunUntilIdle();
+ {
+ base::RunLoop run_loop;
+ EXPECT_CALL(*submitter_, SetIsSurfaceVisible(true));
+ cb.Run(true, nullptr);
+ base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE,
+ run_loop.QuitClosure());
+ run_loop.Run();
+ }
- EXPECT_CALL(*submitter_, SetIsSurfaceVisible(false));
- cb.Run(false);
- base::RunLoop().RunUntilIdle();
-}
+ {
+ base::RunLoop run_loop;
+ EXPECT_CALL(*submitter_, SetIsSurfaceVisible(false));
+ cb.Run(false, nullptr);
+ base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE,
+ run_loop.QuitClosure());
+ run_loop.Run();
+ }
+
+ {
+ base::RunLoop run_loop;
+ base::WaitableEvent true_event;
+ EXPECT_CALL(*submitter_, SetIsSurfaceVisible(true));
+ cb.Run(true, &true_event);
+ base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE,
+ run_loop.QuitClosure());
+ run_loop.Run();
+ EXPECT_TRUE(true_event.IsSignaled());
+ }
-TEST_P(VideoFrameCompositorTest, SetIsPageVisible) {
- if (!IsSurfaceLayerForVideoEnabled())
- return;
+ {
+ base::RunLoop run_loop;
+ base::WaitableEvent false_event;
+ EXPECT_CALL(*submitter_, SetIsSurfaceVisible(false));
+ cb.Run(false, &false_event);
+ base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE,
+ run_loop.QuitClosure());
+ run_loop.Run();
+ EXPECT_TRUE(false_event.IsSignaled());
+ }
+}
+
+TEST_F(VideoFrameCompositorTest, SetIsPageVisible) {
EXPECT_CALL(*submitter_, SetIsPageVisible(true));
compositor()->SetIsPageVisible(true);
@@ -189,7 +197,7 @@ TEST_P(VideoFrameCompositorTest, SetIsPageVisible) {
compositor()->SetIsPageVisible(false);
}
-TEST_P(VideoFrameCompositorTest, PaintSingleFrame) {
+TEST_F(VideoFrameCompositorTest, PaintSingleFrame) {
scoped_refptr<VideoFrame> expected = VideoFrame::CreateEOSFrame();
// Should notify compositor synchronously.
@@ -200,7 +208,7 @@ TEST_P(VideoFrameCompositorTest, PaintSingleFrame) {
EXPECT_EQ(1, submitter_->did_receive_frame_count());
}
-TEST_P(VideoFrameCompositorTest, RenderFiresPresentationCallback) {
+TEST_F(VideoFrameCompositorTest, RenderFiresPresentationCallback) {
// Advance the clock so we can differentiate between base::TimeTicks::Now()
// and base::TimeTicks().
tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
@@ -218,10 +226,7 @@ TEST_P(VideoFrameCompositorTest, RenderFiresPresentationCallback) {
EXPECT_NE(base::TimeTicks(), metadata->expected_display_time);
}
-TEST_P(VideoFrameCompositorTest, PresentationCallbackForcesBeginFrames) {
- if (!IsSurfaceLayerForVideoEnabled())
- return;
-
+TEST_F(VideoFrameCompositorTest, PresentationCallbackForcesBeginFrames) {
// A call to the requestVideoFrameCallback() API should set ForceBeginFrames.
EXPECT_CALL(*submitter_, SetForceBeginFrames(true));
compositor()->SetOnFramePresentedCallback(GetNewFramePresentedCB());
@@ -238,7 +243,7 @@ TEST_P(VideoFrameCompositorTest, PresentationCallbackForcesBeginFrames) {
testing::Mock::VerifyAndClear(submitter_);
}
-TEST_P(VideoFrameCompositorTest, MultiplePresentationCallbacks) {
+TEST_F(VideoFrameCompositorTest, MultiplePresentationCallbacks) {
// Advance the clock so we can differentiate between base::TimeTicks::Now()
// and base::TimeTicks().
tick_clock_.Advance(base::TimeDelta::FromSeconds(1));
@@ -279,7 +284,7 @@ TEST_P(VideoFrameCompositorTest, MultiplePresentationCallbacks) {
EXPECT_EQ(metadata->width, kSize3);
}
-TEST_P(VideoFrameCompositorTest, VideoRendererSinkFrameDropped) {
+TEST_F(VideoFrameCompositorTest, VideoRendererSinkFrameDropped) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
EXPECT_CALL(*this, Render(_, _, _)).WillRepeatedly(Return(opaque_frame));
@@ -313,23 +318,14 @@ TEST_P(VideoFrameCompositorTest, VideoRendererSinkFrameDropped) {
StopVideoRendererSink(true);
}
-TEST_P(VideoFrameCompositorTest, VideoLayerShutdownWhileRendering) {
- if (!IsSurfaceLayerForVideoEnabled()) {
- EXPECT_CALL(*this, Render(_, _, true)).WillOnce(Return(nullptr));
- StartVideoRendererSink();
- compositor_->SetVideoFrameProviderClient(nullptr);
- StopVideoRendererSink(false);
- }
-}
-
-TEST_P(VideoFrameCompositorTest, StartFiresBackgroundRender) {
+TEST_F(VideoFrameCompositorTest, StartFiresBackgroundRender) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
EXPECT_CALL(*this, Render(_, _, true)).WillRepeatedly(Return(opaque_frame));
StartVideoRendererSink();
StopVideoRendererSink(true);
}
-TEST_P(VideoFrameCompositorTest, BackgroundRenderTicks) {
+TEST_F(VideoFrameCompositorTest, BackgroundRenderTicks) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
@@ -350,7 +346,7 @@ TEST_P(VideoFrameCompositorTest, BackgroundRenderTicks) {
StopVideoRendererSink(true);
}
-TEST_P(VideoFrameCompositorTest,
+TEST_F(VideoFrameCompositorTest,
UpdateCurrentFrameWorksWhenBackgroundRendered) {
scoped_refptr<VideoFrame> opaque_frame = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
@@ -378,7 +374,7 @@ TEST_P(VideoFrameCompositorTest,
StopVideoRendererSink(true);
}
-TEST_P(VideoFrameCompositorTest, UpdateCurrentFrameIfStale) {
+TEST_F(VideoFrameCompositorTest, UpdateCurrentFrameIfStale) {
scoped_refptr<VideoFrame> opaque_frame_1 = CreateOpaqueFrame();
scoped_refptr<VideoFrame> opaque_frame_2 = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
@@ -437,7 +433,7 @@ TEST_P(VideoFrameCompositorTest, UpdateCurrentFrameIfStale) {
StopVideoRendererSink(false);
}
-TEST_P(VideoFrameCompositorTest, UpdateCurrentFrameIfStale_ClientBypass) {
+TEST_F(VideoFrameCompositorTest, UpdateCurrentFrameIfStale_ClientBypass) {
scoped_refptr<VideoFrame> opaque_frame_1 = CreateOpaqueFrame();
scoped_refptr<VideoFrame> opaque_frame_2 = CreateOpaqueFrame();
compositor_->set_background_rendering_for_testing(true);
@@ -466,7 +462,7 @@ TEST_P(VideoFrameCompositorTest, UpdateCurrentFrameIfStale_ClientBypass) {
StopVideoRendererSink(true);
}
-TEST_P(VideoFrameCompositorTest, PreferredRenderInterval) {
+TEST_F(VideoFrameCompositorTest, PreferredRenderInterval) {
preferred_render_interval_ = base::TimeDelta::FromSeconds(1);
compositor_->Start(this);
EXPECT_EQ(compositor_->GetPreferredRenderInterval(),
@@ -476,8 +472,4 @@ TEST_P(VideoFrameCompositorTest, PreferredRenderInterval) {
viz::BeginFrameArgs::MinInterval());
}
-INSTANTIATE_TEST_SUITE_P(SubmitterEnabled,
- VideoFrameCompositorTest,
- ::testing::Bool());
-
} // namespace media
diff --git a/chromium/media/blink/watch_time_component.cc b/chromium/media/blink/watch_time_component.cc
deleted file mode 100644
index c7555dbdb77..00000000000
--- a/chromium/media/blink/watch_time_component.cc
+++ /dev/null
@@ -1,133 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/blink/watch_time_component.h"
-
-#include "media/blink/media_blink_export.h"
-#include "third_party/blink/public/platform/web_media_player.h"
-
-namespace media {
-
-template <typename T>
-WatchTimeComponent<T>::WatchTimeComponent(
- T initial_value,
- std::vector<WatchTimeKey> keys_to_finalize,
- ValueToKeyCB value_to_key_cb,
- GetMediaTimeCB get_media_time_cb,
- mojom::WatchTimeRecorder* recorder)
- : keys_to_finalize_(std::move(keys_to_finalize)),
- value_to_key_cb_(std::move(value_to_key_cb)),
- get_media_time_cb_(std::move(get_media_time_cb)),
- recorder_(recorder),
- current_value_(initial_value),
- pending_value_(initial_value) {}
-
-template <typename T>
-WatchTimeComponent<T>::~WatchTimeComponent() = default;
-
-template <typename T>
-void WatchTimeComponent<T>::OnReportingStarted(
- base::TimeDelta start_timestamp) {
- start_timestamp_ = start_timestamp;
- end_timestamp_ = last_timestamp_ = kNoTimestamp;
-}
-
-template <typename T>
-void WatchTimeComponent<T>::SetPendingValue(T new_value) {
- pending_value_ = new_value;
- if (current_value_ != new_value) {
- // Don't trample an existing finalize; the first takes precedence.
- //
- // Note: For components with trinary or higher state, which experience
- // multiple state changes during an existing finalize, this will drop all
- // watch time between the current and final state. E.g., state=0 {0ms} ->
- // state=1 {1ms} -> state=2 {2ms} will result in loss of state=1 watch time.
- if (end_timestamp_ != kNoTimestamp)
- return;
-
- end_timestamp_ = get_media_time_cb_.Run();
- return;
- }
-
- // Clear any pending finalize since we returned to the previous value before
- // the finalize could completed. I.e., assume this is a continuation.
- end_timestamp_ = kNoTimestamp;
-}
-
-template <typename T>
-void WatchTimeComponent<T>::SetCurrentValue(T new_value) {
- current_value_ = new_value;
-}
-
-template <typename T>
-void WatchTimeComponent<T>::RecordWatchTime(base::TimeDelta current_timestamp) {
- DCHECK_NE(current_timestamp, kNoTimestamp);
- DCHECK_NE(current_timestamp, kInfiniteDuration);
- DCHECK_GE(current_timestamp, base::TimeDelta());
-
- // If we're finalizing, use the media time at time of finalization. We only
- // use the |end_timestamp_| if it's less than the current timestamp, otherwise
- // we may report more watch time than expected.
- if (NeedsFinalize() && end_timestamp_ < current_timestamp)
- current_timestamp = end_timestamp_;
-
- // Don't update watch time if media time hasn't changed since the last run;
- // this may occur if a seek is taking some time to complete or the playback
- // is stalled for some reason.
- if (last_timestamp_ == current_timestamp)
- return;
-
- last_timestamp_ = current_timestamp;
- const base::TimeDelta elapsed = last_timestamp_ - start_timestamp_;
- if (elapsed <= base::TimeDelta())
- return;
-
- // If no value to key callback has been provided, record |elapsed| to every
- // key in the |keys_to_finalize_| list.
- if (!value_to_key_cb_) {
- for (auto k : keys_to_finalize_)
- recorder_->RecordWatchTime(k, elapsed);
- return;
- }
-
- // A conversion callback has been specified, so only report elapsed to the
- // key provided by the callback.
- //
- // Record watch time using |current_value_| and not |pending_value_| since
- // that transition should not happen until Finalize().
- recorder_->RecordWatchTime(value_to_key_cb_.Run(current_value_), elapsed);
-}
-
-template <typename T>
-void WatchTimeComponent<T>::Finalize(
- std::vector<WatchTimeKey>* keys_to_finalize) {
- DCHECK(NeedsFinalize());
- // Update |current_value_| and |start_timestamp_| to |end_timestamp_| since
- // that's when the |pending_value_| was set.
- current_value_ = pending_value_;
- start_timestamp_ = end_timestamp_;
-
- // Complete the finalize and indicate which keys need to be finalized.
- end_timestamp_ = kNoTimestamp;
- keys_to_finalize->insert(keys_to_finalize->end(), keys_to_finalize_.begin(),
- keys_to_finalize_.end());
- DCHECK(!NeedsFinalize());
-}
-
-template <typename T>
-bool WatchTimeComponent<T>::NeedsFinalize() const {
- return end_timestamp_ != kNoTimestamp;
-}
-
-// Required to avoid linking errors since we've split this file into a .cc + .h
-// file set instead of putting the function definitions in the header file. Any
-// new component type must be added here.
-//
-// Note: These must be the last line in this file, otherwise you will also see
-// linking errors since the templates won't have been fully defined prior.
-template class MEDIA_BLINK_EXPORT WatchTimeComponent<bool>;
-template class MEDIA_BLINK_EXPORT
- WatchTimeComponent<blink::WebMediaPlayer::DisplayType>;
-
-} // namespace media
diff --git a/chromium/media/blink/watch_time_component.h b/chromium/media/blink/watch_time_component.h
deleted file mode 100644
index be25696490a..00000000000
--- a/chromium/media/blink/watch_time_component.h
+++ /dev/null
@@ -1,135 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_BLINK_WATCH_TIME_COMPONENT_H_
-#define MEDIA_BLINK_WATCH_TIME_COMPONENT_H_
-
-#include <vector>
-
-#include "base/callback.h"
-#include "base/macros.h"
-#include "base/time/time.h"
-#include "media/base/timestamp_constants.h"
-#include "media/base/watch_time_keys.h"
-#include "media/blink/media_blink_export.h"
-#include "media/mojo/mojom/watch_time_recorder.mojom.h"
-
-namespace media {
-
-// Every input used to calculate watch time functions the same way, so we use a
-// common WatchTimeComponent class to avoid lots of copy/paste and enforce rigor
-// in the reporter. Components are not thread-safe.
-//
-// E.g., each component does something like flip pending value, record timestamp
-// of that value change, wait for next reporting cycle, finalize the elapsed
-// time, flip the actual value, and then start recording from that previous
-// finalize time. They may also clear the pending value flip if the value
-// changes back to the previous value.
-template <typename T>
-class WatchTimeComponent {
- public:
- // Callback used to convert |current_value_| into a WatchTimeKey which will be
- // given to WatchTimeRecorder::RecordWatchTime().
- using ValueToKeyCB = base::RepeatingCallback<WatchTimeKey(T value)>;
-
- // Mirror of WatchTimeReporter::GetMediaTimeCB to avoid circular dependency.
- using GetMediaTimeCB = base::RepeatingCallback<base::TimeDelta(void)>;
-
- // |initial_value| is the starting value for |current_value_| and
- // |pending_value_|.
- //
- // |keys_to_finalize| is the list of keys which should be finalized.
- //
- // |value_to_key_cb| is optional, if unspecified every time RecordWatchTime()
- // is called, |keys_to_finalize| will also be treated as the list of keys to
- // record watch time too.
- //
- // See WatchTimeReporter constructor for |get_media_time_cb| and |recorder|.
- WatchTimeComponent(T initial_value,
- std::vector<WatchTimeKey> keys_to_finalize,
- ValueToKeyCB value_to_key_cb,
- GetMediaTimeCB get_media_time_cb,
- mojom::WatchTimeRecorder* recorder);
- ~WatchTimeComponent();
-
- // Called when the main WatchTimeReporter timer is started. Reinitializes
- // tracking variables and sets |start_timestamp_|. May be called at any time.
- void OnReportingStarted(base::TimeDelta start_timestamp);
-
- // Called when the primary value tracked by this component changes but the
- // change shouldn't take effect until the next Finalize() call.
- //
- // |pending_value_| is set to |new_value| when different than |current_value_|
- // and a finalize is marked at the current media time. If the |current_value_|
- // is unchanged any pending finalize is cleared.
- void SetPendingValue(T new_value);
-
- // Called when the primary value tracked by this component changes and the
- // change should take effect immediately. This is typically only called when
- // the watch time timer is not running.
- void SetCurrentValue(T new_value);
-
- // If there's no pending finalize, records the amount of watch time which has
- // elapsed between |current_timestamp| and |start_timestamp_| by calling into
- // mojom::WatchTimeRecorder::RecordWatchTime(). The key to be recorded to is
- // determined by the |value_to_key_cb_|; or if none is present, all keys in
- // |keys_to_finalize_| are recorded to.
- //
- // If there's a pending finalize it records the delta between |end_timestamp_|
- // and |start_timestamp_| if |end_timestamp_| < |current_timestamp|. Does not
- // complete any pending finalize. May be called multiple times even if a
- // finalize is pending.
- void RecordWatchTime(base::TimeDelta current_timestamp);
-
- // Completes any pending finalize. Which means setting |current_value_| to
- // |pending_value_| and setting |start_timestamp_| to |end_timestamp_| so that
- // reporting may continue on a new key if desired. Adds all keys that should
- // be finalized to |keys_to_finalize|.
- //
- // Callers must call mojom::WatchTimeRecorder::FinalizeWatchTime() for the
- // resulting keys in order to actually complete the finalize. We rely on the
- // calling class to perform the actual finalization since it may desire to
- // batch a set of keys into one finalize call to the recorder.
- //
- // E.g., some components may stop reporting upon Finalize() while others want
- // to report to a new key for all watch time going forward.
- void Finalize(std::vector<WatchTimeKey>* keys_to_finalize);
-
- // Returns true if Finalize() should be called.
- bool NeedsFinalize() const;
-
- // Returns the current value for |end_timestamp_|.
- base::TimeDelta end_timestamp() const { return end_timestamp_; }
-
- T current_value_for_testing() const { return current_value_; }
-
- private:
- // Initialized during construction. See constructor for details.
- const std::vector<WatchTimeKey> keys_to_finalize_;
- const ValueToKeyCB value_to_key_cb_;
- const GetMediaTimeCB get_media_time_cb_;
- mojom::WatchTimeRecorder* const recorder_;
-
- // The current value which will be used to select keys for reporting WatchTime
- // during the next RecordWatchTime() call.
- T current_value_;
-
- // A pending value which will be used to set |current_value_| once Finalize()
- // has been called.
- T pending_value_;
-
- // The starting and ending timestamps used for reporting watch time. The end
- // timestamp may be kNoTimestamp if reporting is ongoing.
- base::TimeDelta start_timestamp_;
- base::TimeDelta end_timestamp_ = kNoTimestamp;
-
- // The last media timestamp seen by RecordWatchTime().
- base::TimeDelta last_timestamp_ = kNoTimestamp;
-
- DISALLOW_COPY_AND_ASSIGN(WatchTimeComponent);
-};
-
-} // namespace media
-
-#endif // MEDIA_BLINK_WATCH_TIME_COMPONENT_H_
diff --git a/chromium/media/blink/watch_time_component_unittest.cc b/chromium/media/blink/watch_time_component_unittest.cc
deleted file mode 100644
index 6646239c200..00000000000
--- a/chromium/media/blink/watch_time_component_unittest.cc
+++ /dev/null
@@ -1,302 +0,0 @@
-// Copyright 2018 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/blink/watch_time_component.h"
-
-#include "base/bind.h"
-#include "base/bind_helpers.h"
-#include "testing/gmock/include/gmock/gmock.h"
-#include "testing/gtest/include/gtest/gtest.h"
-#include "third_party/blink/public/platform/web_media_player.h"
-
-namespace media {
-
-class WatchTimeInterceptor : public mojom::WatchTimeRecorder {
- public:
- WatchTimeInterceptor() = default;
- ~WatchTimeInterceptor() override = default;
-
- // mojom::WatchTimeRecorder implementation:
- MOCK_METHOD2(RecordWatchTime, void(WatchTimeKey, base::TimeDelta));
- MOCK_METHOD1(FinalizeWatchTime, void(const std::vector<WatchTimeKey>&));
- MOCK_METHOD1(OnError, void(PipelineStatus));
- MOCK_METHOD1(SetAutoplayInitiated, void(bool));
- MOCK_METHOD1(OnDurationChanged, void(base::TimeDelta));
- MOCK_METHOD2(UpdateVideoDecodeStats, void(uint32_t, uint32_t));
- MOCK_METHOD1(UpdateUnderflowCount, void(int32_t));
- MOCK_METHOD2(UpdateUnderflowDuration, void(int32_t, base::TimeDelta));
- MOCK_METHOD1(UpdateSecondaryProperties,
- void(mojom::SecondaryPlaybackPropertiesPtr));
- MOCK_METHOD1(OnCurrentTimestampChanged, void(base::TimeDelta));
-};
-
-class WatchTimeComponentTest : public testing::Test {
- public:
- WatchTimeComponentTest() = default;
- ~WatchTimeComponentTest() override = default;
-
- protected:
- template <typename T>
- std::unique_ptr<WatchTimeComponent<T>> CreateComponent(
- T initial_value,
- std::vector<WatchTimeKey> keys_to_finalize,
- typename WatchTimeComponent<T>::ValueToKeyCB value_to_key_cb) {
- return std::make_unique<WatchTimeComponent<T>>(
- initial_value, std::move(keys_to_finalize), std::move(value_to_key_cb),
- base::BindRepeating(&WatchTimeComponentTest::GetMediaTime,
- base::Unretained(this)),
- &recorder_);
- }
-
- MOCK_METHOD0(GetMediaTime, base::TimeDelta(void));
-
- // Usage of StrictMock is intentional here. This ensures all mock method calls
- // are accounted for in tests.
- testing::StrictMock<WatchTimeInterceptor> recorder_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(WatchTimeComponentTest);
-};
-
-// Components should be key agnostic so just choose an arbitrary key for running
-// most of the tests.
-constexpr WatchTimeKey kTestKey = WatchTimeKey::kAudioAll;
-
-// This is a test of the standard flow for most components. Most components will
-// be created, be enabled, start reporting, record watch time, be disabled,
-// report a finalize, and then record watch time again.
-TEST_F(WatchTimeComponentTest, BasicFlow) {
- auto test_component = CreateComponent<bool>(
- false, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
- EXPECT_FALSE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Simulate flag enabled after construction, but before timer is running; this
- // should set the current value immediately.
- test_component->SetCurrentValue(true);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Notify the start of reporting to set the starting timestamp.
- const base::TimeDelta kStartTime = base::TimeDelta::FromSeconds(1);
- test_component->OnReportingStarted(kStartTime);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Simulate a single recording tick.
- const base::TimeDelta kWatchTime = base::TimeDelta::FromSeconds(2);
- EXPECT_CALL(recorder_, RecordWatchTime(kTestKey, kWatchTime - kStartTime));
- test_component->RecordWatchTime(kWatchTime);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Simulate the flag being flipped to false while the timer is running; which
- // should trigger a finalize, but not yet set the current value.
- const base::TimeDelta kFinalWatchTime = base::TimeDelta::FromSeconds(3);
- EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kFinalWatchTime));
- test_component->SetPendingValue(false);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_TRUE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
-
- // If record is called again it should use the finalize timestamp instead of
- // whatever timestamp we provide.
- EXPECT_CALL(recorder_,
- RecordWatchTime(kTestKey, kFinalWatchTime - kStartTime));
- test_component->RecordWatchTime(base::TimeDelta::FromSeconds(1234));
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_TRUE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
-
- // Calling it twice or more should not change anything; nor even generate a
- // report since that time has already been recorded.
- for (int i = 0; i < 2; ++i) {
- test_component->RecordWatchTime(base::TimeDelta::FromSeconds(1234 + i));
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_TRUE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
- }
-
- // Trigger finalize which should transition the pending value to the current
- // value as well as clear the finalize.
- std::vector<WatchTimeKey> finalize_keys;
- test_component->Finalize(&finalize_keys);
- EXPECT_FALSE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
- ASSERT_EQ(finalize_keys.size(), 1u);
- EXPECT_EQ(finalize_keys[0], kTestKey);
-
- // The start timestamps should be equal to the previous end timestamp now, so
- // if we call RecordWatchTime again, the value should be relative.
- const base::TimeDelta kNewWatchTime = base::TimeDelta::FromSeconds(4);
- EXPECT_CALL(recorder_,
- RecordWatchTime(kTestKey, kNewWatchTime - kFinalWatchTime));
- test_component->RecordWatchTime(kNewWatchTime);
- EXPECT_FALSE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-}
-
-TEST_F(WatchTimeComponentTest, SetCurrentValue) {
- auto test_component = CreateComponent<bool>(
- true, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // An update when the timer isn't running should take effect immediately.
- test_component->SetCurrentValue(false);
- EXPECT_FALSE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- test_component->SetCurrentValue(true);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-}
-
-TEST_F(WatchTimeComponentTest, RecordDuringFinalizeRespectsCurrentTime) {
- auto test_component = CreateComponent<bool>(
- true, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Simulate the flag being flipped to false while the timer is running; which
- // should trigger a finalize, but not yet set the current value.
- const base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(3);
- EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kWatchTime1));
- test_component->SetPendingValue(false);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_TRUE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kWatchTime1);
-
- // Now issue a RecordWatchTime() call with a media time before the finalize
- // time. This can happen when the TimeDelta provided to RecordWatchTime has
- // been clamped for some reason (e.g., a superseding finalize).
- const base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(2);
- EXPECT_CALL(recorder_, RecordWatchTime(kTestKey, kWatchTime2));
- test_component->RecordWatchTime(kWatchTime2);
-}
-
-TEST_F(WatchTimeComponentTest, SetPendingValue) {
- auto test_component = CreateComponent<bool>(
- true, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // A change when running should trigger a finalize.
- const base::TimeDelta kFinalWatchTime = base::TimeDelta::FromSeconds(1);
- EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kFinalWatchTime));
- test_component->SetPendingValue(false);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_TRUE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
-
- // Issuing the same property change again should do nothing since there's a
- // pending finalize already.
- test_component->SetPendingValue(false);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_TRUE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kFinalWatchTime);
-
- // Changing the value back, should cancel the finalize.
- test_component->SetPendingValue(true);
- EXPECT_TRUE(test_component->current_value_for_testing());
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-}
-
-// Tests RecordWatchTime() behavior when a ValueToKeyCB is provided.
-TEST_F(WatchTimeComponentTest, WithValueToKeyCB) {
- using DisplayType = blink::WebMediaPlayer::DisplayType;
-
- const std::vector<WatchTimeKey> finalize_keys = {
- WatchTimeKey::kAudioVideoDisplayInline,
- WatchTimeKey::kAudioVideoDisplayFullscreen,
- WatchTimeKey::kAudioVideoDisplayPictureInPicture};
- auto test_component = CreateComponent<DisplayType>(
- DisplayType::kFullscreen, finalize_keys,
- base::BindRepeating([](DisplayType display_type) {
- switch (display_type) {
- case DisplayType::kInline:
- return WatchTimeKey::kAudioVideoDisplayInline;
- case DisplayType::kFullscreen:
- return WatchTimeKey::kAudioVideoDisplayFullscreen;
- case DisplayType::kPictureInPicture:
- return WatchTimeKey::kAudioVideoDisplayPictureInPicture;
- }
- }));
- EXPECT_EQ(test_component->current_value_for_testing(),
- DisplayType::kFullscreen);
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Notify the start of reporting to set the starting timestamp.
- const base::TimeDelta kStartTime = base::TimeDelta::FromSeconds(1);
- test_component->OnReportingStarted(kStartTime);
- EXPECT_EQ(test_component->current_value_for_testing(),
- DisplayType::kFullscreen);
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Record and verify the key recorded too matches the callback provided.
- const base::TimeDelta kWatchTime1 = base::TimeDelta::FromSeconds(2);
- EXPECT_CALL(recorder_,
- RecordWatchTime(WatchTimeKey::kAudioVideoDisplayFullscreen,
- kWatchTime1 - kStartTime));
- test_component->RecordWatchTime(kWatchTime1);
- EXPECT_EQ(test_component->current_value_for_testing(),
- DisplayType::kFullscreen);
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Change property while saying the timer isn't running to avoid finalize.
- const base::TimeDelta kWatchTime2 = base::TimeDelta::FromSeconds(3);
- test_component->SetCurrentValue(DisplayType::kInline);
- EXPECT_CALL(recorder_, RecordWatchTime(WatchTimeKey::kAudioVideoDisplayInline,
- kWatchTime2 - kStartTime));
- test_component->RecordWatchTime(kWatchTime2);
- EXPECT_EQ(test_component->current_value_for_testing(), DisplayType::kInline);
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Cycle through all three properties...
- const base::TimeDelta kWatchTime3 = base::TimeDelta::FromSeconds(4);
- test_component->SetCurrentValue(DisplayType::kPictureInPicture);
- EXPECT_CALL(recorder_,
- RecordWatchTime(WatchTimeKey::kAudioVideoDisplayPictureInPicture,
- kWatchTime3 - kStartTime));
- test_component->RecordWatchTime(kWatchTime3);
- EXPECT_EQ(test_component->current_value_for_testing(),
- DisplayType::kPictureInPicture);
- EXPECT_FALSE(test_component->NeedsFinalize());
- EXPECT_EQ(test_component->end_timestamp(), kNoTimestamp);
-
- // Verify finalize sends all three keys.
- std::vector<WatchTimeKey> actual_finalize_keys;
- const base::TimeDelta kFinalWatchTime = base::TimeDelta::FromSeconds(5);
- EXPECT_CALL(*this, GetMediaTime()).WillOnce(testing::Return(kFinalWatchTime));
- test_component->SetPendingValue(DisplayType::kFullscreen);
- test_component->Finalize(&actual_finalize_keys);
- ASSERT_EQ(actual_finalize_keys.size(), finalize_keys.size());
- for (size_t i = 0; i < finalize_keys.size(); ++i)
- EXPECT_EQ(actual_finalize_keys[i], finalize_keys[i]);
-}
-
-// Unlike WatchTimeReporter, WatchTimeComponents have no automatic finalization
-// so creating and destroying one without calls, should do nothing.
-TEST_F(WatchTimeComponentTest, NoAutomaticFinalize) {
- auto test_component = CreateComponent<bool>(
- false, {kTestKey}, WatchTimeComponent<bool>::ValueToKeyCB());
-}
-
-} // namespace media
diff --git a/chromium/media/blink/watch_time_reporter.cc b/chromium/media/blink/watch_time_reporter.cc
deleted file mode 100644
index 6a32465f145..00000000000
--- a/chromium/media/blink/watch_time_reporter.cc
+++ /dev/null
@@ -1,685 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/blink/watch_time_reporter.h"
-
-#include <numeric>
-
-#include "base/bind.h"
-#include "base/power_monitor/power_monitor.h"
-#include "base/time/time.h"
-#include "media/base/pipeline_status.h"
-#include "media/base/timestamp_constants.h"
-#include "media/base/watch_time_keys.h"
-
-namespace media {
-
-// The minimum width and height of videos to report watch time metrics for.
-constexpr gfx::Size kMinimumVideoSize = gfx::Size(200, 140);
-
-static bool IsOnBatteryPower() {
- if (base::PowerMonitor::IsInitialized())
- return base::PowerMonitor::IsOnBatteryPower();
- return false;
-}
-
-// Helper function for managing property changes. If the watch time timer is
-// running it sets the pending value otherwise it sets the current value and
-// then returns true if the component needs finalize.
-enum class PropertyAction { kNoActionRequired, kFinalizeRequired };
-template <typename T>
-PropertyAction HandlePropertyChange(T new_value,
- bool is_timer_running,
- WatchTimeComponent<T>* component) {
- if (!component)
- return PropertyAction::kNoActionRequired;
-
- if (is_timer_running)
- component->SetPendingValue(new_value);
- else
- component->SetCurrentValue(new_value);
-
- return component->NeedsFinalize() ? PropertyAction::kFinalizeRequired
- : PropertyAction::kNoActionRequired;
-}
-
-WatchTimeReporter::WatchTimeReporter(
- mojom::PlaybackPropertiesPtr properties,
- const gfx::Size& natural_size,
- GetMediaTimeCB get_media_time_cb,
- GetPipelineStatsCB get_pipeline_stats_cb,
- mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner,
- const base::TickClock* tick_clock)
- : WatchTimeReporter(std::move(properties),
- false /* is_background */,
- false /* is_muted */,
- natural_size,
- std::move(get_media_time_cb),
- std::move(get_pipeline_stats_cb),
- provider,
- task_runner,
- tick_clock) {}
-
-WatchTimeReporter::WatchTimeReporter(
- mojom::PlaybackPropertiesPtr properties,
- bool is_background,
- bool is_muted,
- const gfx::Size& natural_size,
- GetMediaTimeCB get_media_time_cb,
- GetPipelineStatsCB get_pipeline_stats_cb,
- mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner,
- const base::TickClock* tick_clock)
- : properties_(std::move(properties)),
- is_background_(is_background),
- is_muted_(is_muted),
- get_media_time_cb_(std::move(get_media_time_cb)),
- get_pipeline_stats_cb_(std::move(get_pipeline_stats_cb)),
- reporting_timer_(tick_clock),
- natural_size_(natural_size) {
- DCHECK(get_media_time_cb_);
- DCHECK(get_pipeline_stats_cb_);
- DCHECK(properties_->has_audio || properties_->has_video);
- DCHECK_EQ(is_background, properties_->is_background);
-
- // The background reporter receives play/pause events instead of visibility
- // changes, so it must always be visible to function correctly.
- if (is_background_)
- DCHECK(is_visible_);
-
- // The muted reporter receives play/pause events instead of volume changes, so
- // its volume must always be audible to function correctly.
- if (is_muted_)
- DCHECK_EQ(volume_, 1.0);
-
- base::PowerMonitor::AddObserver(this);
-
- provider->AcquireWatchTimeRecorder(properties_->Clone(),
- recorder_.BindNewPipeAndPassReceiver());
-
- reporting_timer_.SetTaskRunner(task_runner);
-
- base_component_ = CreateBaseComponent();
- power_component_ = CreatePowerComponent();
- if (!is_background_) {
- controls_component_ = CreateControlsComponent();
- if (properties_->has_video)
- display_type_component_ = CreateDisplayTypeComponent();
- }
-
- // If this is a sub-reporter we're done.
- if (is_background_ || is_muted_)
- return;
-
- // Background watch time is reported by creating an background only watch time
- // reporter which receives play when hidden and pause when shown. This avoids
- // unnecessary complexity inside the UpdateWatchTime() for handling this case.
- auto prop_copy = properties_.Clone();
- prop_copy->is_background = true;
- background_reporter_.reset(new WatchTimeReporter(
- std::move(prop_copy), true /* is_background */, false /* is_muted */,
- natural_size_, get_media_time_cb_, get_pipeline_stats_cb_, provider,
- task_runner, tick_clock));
-
- // Muted watch time is only reported for audio+video playback.
- if (!properties_->has_video || !properties_->has_audio)
- return;
-
- // Similar to the above, muted watch time is reported by creating a muted only
- // watch time reporter which receives play when muted and pause when audible.
- prop_copy = properties_.Clone();
- prop_copy->is_muted = true;
- muted_reporter_.reset(new WatchTimeReporter(
- std::move(prop_copy), false /* is_background */, true /* is_muted */,
- natural_size_, get_media_time_cb_, get_pipeline_stats_cb_, provider,
- task_runner, tick_clock));
-}
-
-WatchTimeReporter::~WatchTimeReporter() {
- background_reporter_.reset();
- muted_reporter_.reset();
-
- // This is our last chance, so finalize now if there's anything remaining.
- in_shutdown_ = true;
- MaybeFinalizeWatchTime(FinalizeTime::IMMEDIATELY);
- base::PowerMonitor::RemoveObserver(this);
-}
-
-void WatchTimeReporter::OnPlaying() {
- if (background_reporter_ && !is_visible_)
- background_reporter_->OnPlaying();
- if (muted_reporter_ && !volume_)
- muted_reporter_->OnPlaying();
-
- is_playing_ = true;
- is_seeking_ = false;
- MaybeStartReportingTimer(get_media_time_cb_.Run());
-}
-
-void WatchTimeReporter::OnPaused() {
- if (background_reporter_)
- background_reporter_->OnPaused();
- if (muted_reporter_)
- muted_reporter_->OnPaused();
-
- is_playing_ = false;
- MaybeFinalizeWatchTime(FinalizeTime::ON_NEXT_UPDATE);
-}
-
-void WatchTimeReporter::OnSeeking() {
- if (background_reporter_)
- background_reporter_->OnSeeking();
- if (muted_reporter_)
- muted_reporter_->OnSeeking();
-
- // Seek is a special case that does not have hysteresis, when this is called
- // the seek is imminent, so finalize the previous playback immediately.
- is_seeking_ = true;
- MaybeFinalizeWatchTime(FinalizeTime::IMMEDIATELY);
-}
-
-void WatchTimeReporter::OnVolumeChange(double volume) {
- if (background_reporter_)
- background_reporter_->OnVolumeChange(volume);
-
- // The muted reporter should never receive volume changes.
- DCHECK(!is_muted_);
-
- const double old_volume = volume_;
- volume_ = volume;
-
- // We're only interesting in transitions in and out of the muted state.
- if (!old_volume && volume) {
- if (muted_reporter_)
- muted_reporter_->OnPaused();
- MaybeStartReportingTimer(get_media_time_cb_.Run());
- } else if (old_volume && !volume_) {
- if (muted_reporter_ && is_playing_)
- muted_reporter_->OnPlaying();
- MaybeFinalizeWatchTime(FinalizeTime::ON_NEXT_UPDATE);
- }
-}
-
-void WatchTimeReporter::OnShown() {
- // The background reporter should never receive visibility changes.
- DCHECK(!is_background_);
-
- if (background_reporter_)
- background_reporter_->OnPaused();
- if (muted_reporter_)
- muted_reporter_->OnShown();
-
- is_visible_ = true;
- MaybeStartReportingTimer(get_media_time_cb_.Run());
-}
-
-void WatchTimeReporter::OnHidden() {
- // The background reporter should never receive visibility changes.
- DCHECK(!is_background_);
-
- if (background_reporter_ && is_playing_)
- background_reporter_->OnPlaying();
- if (muted_reporter_)
- muted_reporter_->OnHidden();
-
- is_visible_ = false;
- MaybeFinalizeWatchTime(FinalizeTime::ON_NEXT_UPDATE);
-}
-
-void WatchTimeReporter::OnError(PipelineStatus status) {
- // Since playback should have stopped by this point, go ahead and send the
- // error directly instead of on the next timer tick. It won't be recorded
- // until finalization anyways.
- recorder_->OnError(status);
- if (background_reporter_)
- background_reporter_->OnError(status);
- if (muted_reporter_)
- muted_reporter_->OnError(status);
-}
-
-void WatchTimeReporter::OnUnderflow() {
- if (background_reporter_)
- background_reporter_->OnUnderflow();
- if (muted_reporter_)
- muted_reporter_->OnUnderflow();
-
- if (!reporting_timer_.IsRunning())
- return;
-
- if (!pending_underflow_events_.empty())
- DCHECK_NE(pending_underflow_events_.back().duration, kNoTimestamp);
-
- // In the event of a pending finalize, we don't want to count underflow events
- // that occurred after the finalize time. Yet if the finalize is canceled we
- // want to ensure they are all recorded.
- pending_underflow_events_.push_back(
- {false, get_media_time_cb_.Run(), kNoTimestamp});
-}
-
-void WatchTimeReporter::OnUnderflowComplete(base::TimeDelta elapsed) {
- if (background_reporter_)
- background_reporter_->OnUnderflowComplete(elapsed);
- if (muted_reporter_)
- muted_reporter_->OnUnderflowComplete(elapsed);
-
- if (!reporting_timer_.IsRunning())
- return;
-
- // Drop this underflow completion if we don't have a corresponding underflow
- // start event; this can happen if a finalize occurs between the underflow and
- // the completion.
- if (pending_underflow_events_.empty())
- return;
-
- // There should only ever be one outstanding underflow, so stick the duration
- // in the last underflow event.
- DCHECK_EQ(pending_underflow_events_.back().duration, kNoTimestamp);
- pending_underflow_events_.back().duration = elapsed;
-}
-
-void WatchTimeReporter::OnNativeControlsEnabled() {
- OnNativeControlsChanged(true);
-}
-
-void WatchTimeReporter::OnNativeControlsDisabled() {
- OnNativeControlsChanged(false);
-}
-
-void WatchTimeReporter::OnDisplayTypeInline() {
- OnDisplayTypeChanged(DisplayType::kInline);
-}
-
-void WatchTimeReporter::OnDisplayTypeFullscreen() {
- OnDisplayTypeChanged(DisplayType::kFullscreen);
-}
-
-void WatchTimeReporter::OnDisplayTypePictureInPicture() {
- OnDisplayTypeChanged(DisplayType::kPictureInPicture);
-}
-
-void WatchTimeReporter::UpdateSecondaryProperties(
- mojom::SecondaryPlaybackPropertiesPtr secondary_properties) {
- // Flush any unrecorded watch time before updating the secondary properties to
- // ensure the UKM record is finalized with up-to-date watch time information.
- if (reporting_timer_.IsRunning())
- RecordWatchTime();
-
- recorder_->UpdateSecondaryProperties(secondary_properties.Clone());
- if (background_reporter_) {
- background_reporter_->UpdateSecondaryProperties(
- secondary_properties.Clone());
- }
- if (muted_reporter_)
- muted_reporter_->UpdateSecondaryProperties(secondary_properties.Clone());
-
- // A change in resolution may affect ShouldReportingTimerRun().
- bool original_should_run = ShouldReportingTimerRun();
- natural_size_ = secondary_properties->natural_size;
- bool should_run = ShouldReportingTimerRun();
- if (original_should_run != should_run) {
- if (should_run) {
- MaybeStartReportingTimer(get_media_time_cb_.Run());
- } else {
- MaybeFinalizeWatchTime(FinalizeTime::ON_NEXT_UPDATE);
- }
- }
-}
-
-void WatchTimeReporter::SetAutoplayInitiated(bool autoplay_initiated) {
- recorder_->SetAutoplayInitiated(autoplay_initiated);
- if (background_reporter_)
- background_reporter_->SetAutoplayInitiated(autoplay_initiated);
- if (muted_reporter_)
- muted_reporter_->SetAutoplayInitiated(autoplay_initiated);
-}
-
-void WatchTimeReporter::OnDurationChanged(base::TimeDelta duration) {
- recorder_->OnDurationChanged(duration);
- if (background_reporter_)
- background_reporter_->OnDurationChanged(duration);
- if (muted_reporter_)
- muted_reporter_->OnDurationChanged(duration);
-}
-
-void WatchTimeReporter::OnPowerStateChange(bool on_battery_power) {
- if (HandlePropertyChange<bool>(on_battery_power, reporting_timer_.IsRunning(),
- power_component_.get()) ==
- PropertyAction::kFinalizeRequired) {
- RestartTimerForHysteresis();
- }
-}
-
-void WatchTimeReporter::OnNativeControlsChanged(bool has_native_controls) {
- if (muted_reporter_)
- muted_reporter_->OnNativeControlsChanged(has_native_controls);
-
- if (HandlePropertyChange<bool>(
- has_native_controls, reporting_timer_.IsRunning(),
- controls_component_.get()) == PropertyAction::kFinalizeRequired) {
- RestartTimerForHysteresis();
- }
-}
-
-void WatchTimeReporter::OnDisplayTypeChanged(DisplayType display_type) {
- if (muted_reporter_)
- muted_reporter_->OnDisplayTypeChanged(display_type);
-
- if (HandlePropertyChange<DisplayType>(
- display_type, reporting_timer_.IsRunning(),
- display_type_component_.get()) == PropertyAction::kFinalizeRequired) {
- RestartTimerForHysteresis();
- }
-}
-
-bool WatchTimeReporter::ShouldReportWatchTime() const {
- // Report listen time or watch time for videos of sufficient size.
- return properties_->has_video
- ? (natural_size_.height() >= kMinimumVideoSize.height() &&
- natural_size_.width() >= kMinimumVideoSize.width())
- : properties_->has_audio;
-}
-
-bool WatchTimeReporter::ShouldReportingTimerRun() const {
- // TODO(dalecurtis): We should only consider |volume_| when there is actually
- // an audio track; requires updating lots of tests to fix.
- return ShouldReportWatchTime() && is_playing_ && volume_ && is_visible_ &&
- !in_shutdown_ && !is_seeking_ && has_valid_start_timestamp_;
-}
-
-void WatchTimeReporter::MaybeStartReportingTimer(
- base::TimeDelta start_timestamp) {
- DCHECK_GE(start_timestamp, base::TimeDelta());
-
- // It's possible for |current_time| to be kInfiniteDuration here if the page
- // seeks to kInfiniteDuration (2**64 - 1) when Duration() is infinite. There
- // is no possible elapsed watch time when this occurs, so don't start the
- // WatchTimeReporter at this time. If a later seek puts us earlier in the
- // stream this method will be called again after OnSeeking().
- has_valid_start_timestamp_ = start_timestamp != kInfiniteDuration;
-
- // Don't start the timer if our state indicates we shouldn't; this check is
- // important since the various event handlers do not have to care about the
- // state of other events.
- const bool should_start = ShouldReportingTimerRun();
- if (reporting_timer_.IsRunning()) {
- base_component_->SetPendingValue(should_start);
- return;
- }
-
- base_component_->SetCurrentValue(should_start);
- if (!should_start)
- return;
-
- if (properties_->has_video) {
- initial_stats_ = get_pipeline_stats_cb_.Run();
- last_stats_ = PipelineStatistics();
- }
-
- ResetUnderflowState();
- base_component_->OnReportingStarted(start_timestamp);
- power_component_->OnReportingStarted(start_timestamp);
-
- if (controls_component_)
- controls_component_->OnReportingStarted(start_timestamp);
- if (display_type_component_)
- display_type_component_->OnReportingStarted(start_timestamp);
-
- reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
- &WatchTimeReporter::UpdateWatchTime);
-}
-
-void WatchTimeReporter::MaybeFinalizeWatchTime(FinalizeTime finalize_time) {
- if (HandlePropertyChange<bool>(
- ShouldReportingTimerRun(), reporting_timer_.IsRunning(),
- base_component_.get()) == PropertyAction::kNoActionRequired) {
- return;
- }
-
- if (finalize_time == FinalizeTime::IMMEDIATELY) {
- UpdateWatchTime();
- return;
- }
-
- // Always restart the timer when finalizing, so that we allow for the full
- // length of |kReportingInterval| to elapse for hysteresis purposes.
- DCHECK_EQ(finalize_time, FinalizeTime::ON_NEXT_UPDATE);
- RestartTimerForHysteresis();
-}
-
-void WatchTimeReporter::RestartTimerForHysteresis() {
- // Restart the reporting timer so the full hysteresis is afforded.
- DCHECK(reporting_timer_.IsRunning());
- reporting_timer_.Start(FROM_HERE, reporting_interval_, this,
- &WatchTimeReporter::UpdateWatchTime);
-}
-
-void WatchTimeReporter::RecordWatchTime() {
- // If we're finalizing, use the media time at time of finalization.
- const base::TimeDelta current_timestamp =
- base_component_->NeedsFinalize() ? base_component_->end_timestamp()
- : get_media_time_cb_.Run();
-
- // Pass along any underflow events which have occurred since the last report.
- if (!pending_underflow_events_.empty()) {
- const int last_underflow_count = total_underflow_count_;
- const int last_completed_underflow_count = total_completed_underflow_count_;
-
- for (auto& ufe : pending_underflow_events_) {
- // Since the underflow occurred after finalize, ignore the event and mark
- // it for deletion.
- if (ufe.timestamp > current_timestamp) {
- ufe.reported = true;
- ufe.duration = base::TimeDelta();
- continue;
- }
-
- if (!ufe.reported) {
- ufe.reported = true;
- ++total_underflow_count_;
- }
-
- // Drop any rebuffer completions that took more than a minute. For our
- // purposes these are considered as timeouts. We want a maximum since
- // rebuffer duration is in real time and not media time, which means if
- // the rebuffer spans a suspend/resume the time can be arbitrarily long.
- constexpr base::TimeDelta kMaximumRebufferDuration =
- base::TimeDelta::FromMinutes(1);
- if (ufe.duration != kNoTimestamp &&
- ufe.duration <= kMaximumRebufferDuration) {
- ++total_completed_underflow_count_;
- total_underflow_duration_ += ufe.duration;
- }
- }
-
- base::EraseIf(pending_underflow_events_, [](const UnderflowEvent& ufe) {
- return ufe.reported && ufe.duration != kNoTimestamp;
- });
-
- if (last_underflow_count != total_underflow_count_)
- recorder_->UpdateUnderflowCount(total_underflow_count_);
- if (last_completed_underflow_count != total_completed_underflow_count_) {
- recorder_->UpdateUnderflowDuration(total_completed_underflow_count_,
- total_underflow_duration_);
- }
- }
-
- if (properties_->has_video) {
- auto stats = get_pipeline_stats_cb_.Run();
- DCHECK_GE(stats.video_frames_decoded, initial_stats_.video_frames_decoded);
- DCHECK_GE(stats.video_frames_dropped, initial_stats_.video_frames_dropped);
-
- // Offset the stats based on where they were when we started reporting.
- stats.video_frames_decoded -= initial_stats_.video_frames_decoded;
- stats.video_frames_dropped -= initial_stats_.video_frames_dropped;
-
- // Only send updates.
- if (last_stats_.video_frames_decoded != stats.video_frames_decoded ||
- last_stats_.video_frames_dropped != stats.video_frames_dropped) {
- recorder_->UpdateVideoDecodeStats(stats.video_frames_decoded,
- stats.video_frames_dropped);
- last_stats_ = stats;
- }
- }
-
- // Record watch time for all components.
- base_component_->RecordWatchTime(current_timestamp);
- power_component_->RecordWatchTime(current_timestamp);
- if (display_type_component_)
- display_type_component_->RecordWatchTime(current_timestamp);
- if (controls_component_)
- controls_component_->RecordWatchTime(current_timestamp);
-
- // Update the last timestamp with the current timestamp.
- recorder_->OnCurrentTimestampChanged(current_timestamp);
-}
-
-void WatchTimeReporter::UpdateWatchTime() {
- // First record watch time.
- RecordWatchTime();
-
- // Second, process any pending finalize events.
- std::vector<WatchTimeKey> keys_to_finalize;
- if (power_component_->NeedsFinalize())
- power_component_->Finalize(&keys_to_finalize);
- if (display_type_component_ && display_type_component_->NeedsFinalize())
- display_type_component_->Finalize(&keys_to_finalize);
- if (controls_component_ && controls_component_->NeedsFinalize())
- controls_component_->Finalize(&keys_to_finalize);
-
- // Then finalize the base component.
- if (!base_component_->NeedsFinalize()) {
- if (!keys_to_finalize.empty())
- recorder_->FinalizeWatchTime(keys_to_finalize);
- return;
- }
-
- // Always send finalize, even if we don't currently have any data, it's
- // harmless to send since nothing will be logged if we've already finalized.
- base_component_->Finalize(&keys_to_finalize);
- recorder_->FinalizeWatchTime({});
-
- // Stop the timer if this is supposed to be our last tick.
- ResetUnderflowState();
- reporting_timer_.Stop();
-}
-
-void WatchTimeReporter::ResetUnderflowState() {
- total_underflow_count_ = total_completed_underflow_count_ = 0;
- total_underflow_duration_ = base::TimeDelta();
- pending_underflow_events_.clear();
-}
-
-#define NORMAL_KEY(key) \
- ((properties_->has_video && properties_->has_audio) \
- ? (is_background_ ? WatchTimeKey::kAudioVideoBackground##key \
- : (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
- : WatchTimeKey::kAudioVideo##key)) \
- : properties_->has_video \
- ? (is_background_ ? WatchTimeKey::kVideoBackground##key \
- : WatchTimeKey::kVideo##key) \
- : (is_background_ ? WatchTimeKey::kAudioBackground##key \
- : WatchTimeKey::kAudio##key))
-
-std::unique_ptr<WatchTimeComponent<bool>>
-WatchTimeReporter::CreateBaseComponent() {
- std::vector<WatchTimeKey> keys_to_finalize;
- keys_to_finalize.emplace_back(NORMAL_KEY(All));
- if (properties_->is_mse)
- keys_to_finalize.emplace_back(NORMAL_KEY(Mse));
- else
- keys_to_finalize.emplace_back(NORMAL_KEY(Src));
-
- if (properties_->is_eme)
- keys_to_finalize.emplace_back(NORMAL_KEY(Eme));
-
- if (properties_->is_embedded_media_experience)
- keys_to_finalize.emplace_back(NORMAL_KEY(EmbeddedExperience));
-
- return std::make_unique<WatchTimeComponent<bool>>(
- false, std::move(keys_to_finalize),
- WatchTimeComponent<bool>::ValueToKeyCB(), get_media_time_cb_,
- recorder_.get());
-}
-
-std::unique_ptr<WatchTimeComponent<bool>>
-WatchTimeReporter::CreatePowerComponent() {
- std::vector<WatchTimeKey> keys_to_finalize{NORMAL_KEY(Battery),
- NORMAL_KEY(Ac)};
-
- return std::make_unique<WatchTimeComponent<bool>>(
- IsOnBatteryPower(), std::move(keys_to_finalize),
- base::BindRepeating(&WatchTimeReporter::GetPowerKey,
- base::Unretained(this)),
- get_media_time_cb_, recorder_.get());
-}
-
-WatchTimeKey WatchTimeReporter::GetPowerKey(bool is_on_battery_power) {
- return is_on_battery_power ? NORMAL_KEY(Battery) : NORMAL_KEY(Ac);
-}
-#undef NORMAL_KEY
-
-#define FOREGROUND_KEY(key) \
- ((properties_->has_video && properties_->has_audio) \
- ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
- : WatchTimeKey::kAudioVideo##key) \
- : properties_->has_audio ? WatchTimeKey::kAudio##key \
- : WatchTimeKey::kVideo##key)
-
-std::unique_ptr<WatchTimeComponent<bool>>
-WatchTimeReporter::CreateControlsComponent() {
- DCHECK(!is_background_);
-
- std::vector<WatchTimeKey> keys_to_finalize{FOREGROUND_KEY(NativeControlsOn),
- FOREGROUND_KEY(NativeControlsOff)};
-
- return std::make_unique<WatchTimeComponent<bool>>(
- false, std::move(keys_to_finalize),
- base::BindRepeating(&WatchTimeReporter::GetControlsKey,
- base::Unretained(this)),
- get_media_time_cb_, recorder_.get());
-}
-
-WatchTimeKey WatchTimeReporter::GetControlsKey(bool has_native_controls) {
- return has_native_controls ? FOREGROUND_KEY(NativeControlsOn)
- : FOREGROUND_KEY(NativeControlsOff);
-}
-
-#undef FOREGROUND_KEY
-
-#define DISPLAY_TYPE_KEY(key) \
- (properties_->has_audio ? (is_muted_ ? WatchTimeKey::kAudioVideoMuted##key \
- : WatchTimeKey::kAudioVideo##key) \
- : WatchTimeKey::kVideo##key)
-
-std::unique_ptr<WatchTimeComponent<WatchTimeReporter::DisplayType>>
-WatchTimeReporter::CreateDisplayTypeComponent() {
- DCHECK(properties_->has_video);
- DCHECK(!is_background_);
-
- std::vector<WatchTimeKey> keys_to_finalize{
- DISPLAY_TYPE_KEY(DisplayInline), DISPLAY_TYPE_KEY(DisplayFullscreen),
- DISPLAY_TYPE_KEY(DisplayPictureInPicture)};
-
- return std::make_unique<WatchTimeComponent<DisplayType>>(
- DisplayType::kInline, std::move(keys_to_finalize),
- base::BindRepeating(&WatchTimeReporter::GetDisplayTypeKey,
- base::Unretained(this)),
- get_media_time_cb_, recorder_.get());
-}
-
-WatchTimeKey WatchTimeReporter::GetDisplayTypeKey(DisplayType display_type) {
- switch (display_type) {
- case DisplayType::kInline:
- return DISPLAY_TYPE_KEY(DisplayInline);
- case DisplayType::kFullscreen:
- return DISPLAY_TYPE_KEY(DisplayFullscreen);
- case DisplayType::kPictureInPicture:
- return DISPLAY_TYPE_KEY(DisplayPictureInPicture);
- }
-}
-
-#undef DISPLAY_TYPE_KEY
-
-} // namespace media
diff --git a/chromium/media/blink/watch_time_reporter.h b/chromium/media/blink/watch_time_reporter.h
deleted file mode 100644
index c467629d629..00000000000
--- a/chromium/media/blink/watch_time_reporter.h
+++ /dev/null
@@ -1,269 +0,0 @@
-// Copyright 2016 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_BLINK_WATCH_TIME_REPORTER_H_
-#define MEDIA_BLINK_WATCH_TIME_REPORTER_H_
-
-#include <vector>
-
-#include "base/callback.h"
-#include "base/power_monitor/power_observer.h"
-#include "base/sequenced_task_runner.h"
-#include "base/time/time.h"
-#include "base/timer/timer.h"
-#include "media/base/audio_codecs.h"
-#include "media/base/media_log.h"
-#include "media/base/timestamp_constants.h"
-#include "media/base/video_codecs.h"
-#include "media/blink/media_blink_export.h"
-#include "media/blink/watch_time_component.h"
-#include "media/mojo/mojom/media_metrics_provider.mojom.h"
-#include "media/mojo/mojom/watch_time_recorder.mojom.h"
-#include "mojo/public/cpp/bindings/remote.h"
-#include "third_party/blink/public/platform/web_media_player.h"
-#include "ui/gfx/geometry/size.h"
-#include "url/origin.h"
-
-namespace media {
-
-// Class for monitoring and reporting watch time in response to various state
-// changes during the playback of media. We record metrics for audio only
-// playbacks as well as video only or audio+video playbacks of sufficient size.
-//
-// Watch time for our purposes is defined as the amount of elapsed media time.
-// Any amount of elapsed time is reported to the WatchTimeRecorder, but only
-// amounts above limits::kMinimumElapsedWatchTimeSecs are reported to UMA. Watch
-// time is checked every 5 seconds from then on and reported to multiple
-// buckets: All, MSE, SRC, EME, AC, and battery.
-//
-// Either of paused or muted is sufficient to stop watch time metric reports.
-// Each of these has a hysteresis where if the state change is undone within 5
-// seconds, the watch time will be counted as uninterrupted.
-//
-// There are both foreground and background buckets for watch time. E.g., when
-// media goes into the background foreground collection stops and background
-// collection starts. As with other events, there is hysteresis on change
-// between the foreground and background.
-//
-// Similarly, there are both muted and unmuted buckets for watch time. E.g., if
-// a playback is muted the unmuted collection stops and muted collection starts.
-// As with other events, there is hysteresis between mute and unmute.
-//
-// Power events (on/off battery power), native controls changes, or display type
-// changes have a similar hysteresis, but unlike the aforementioned properties,
-// will not stop metric collection.
-//
-// Each seek event will result in a new watch time metric being started and the
-// old metric finalized as accurately as possible.
-class MEDIA_BLINK_EXPORT WatchTimeReporter : base::PowerObserver {
- public:
- using DisplayType = blink::WebMediaPlayer::DisplayType;
- using GetMediaTimeCB = base::RepeatingCallback<base::TimeDelta(void)>;
- using GetPipelineStatsCB = base::RepeatingCallback<PipelineStatistics(void)>;
-
- // Constructor for the reporter; all requested metadata should be fully known
- // before attempting construction as incorrect values will result in the wrong
- // watch time metrics being reported.
- //
- // |properties| Properties describing the playback; these are considered
- // immutable over the lifetime of the reporter. If any of them change a new
- // WatchTimeReporter should be created with updated properties.
- //
- // |get_media_time_cb| must return the current playback time in terms of media
- // time, not wall clock time! Using media time instead of wall clock time
- // allows us to avoid a whole class of issues around clock changes during
- // suspend and resume.
- //
- // |provider| A provider of mojom::WatchTimeRecorder instances which will be
- // created and used to handle caching of metrics outside of the current
- // process.
- //
- // TODO(dalecurtis): Should we only report when rate == 1.0? Should we scale
- // the elapsed media time instead?
- WatchTimeReporter(mojom::PlaybackPropertiesPtr properties,
- const gfx::Size& natural_size,
- GetMediaTimeCB get_media_time_cb,
- GetPipelineStatsCB get_pipeline_stats_cb,
- mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner,
- const base::TickClock* tick_clock = nullptr);
- ~WatchTimeReporter() override;
-
- // These methods are used to ensure that watch time is only reported for media
- // that is actually playing. They should be called whenever the media starts
- // or stops playing for any reason. If the media is currently hidden,
- // OnPlaying() will start background watch time reporting.
- void OnPlaying();
- void OnPaused();
-
- // This will immediately finalize any outstanding watch time reports and stop
- // the reporting timer. Clients should call OnPlaying() upon seek completion
- // to restart the reporting timer.
- void OnSeeking();
-
- // This method is used to ensure that watch time is only reported for media
- // that is actually audible to the user. It should be called whenever the
- // volume changes.
- //
- // Note: This does not catch all cases. E.g., headphones that are not being
- // listened too, or even OS level volume state.
- void OnVolumeChange(double volume);
-
- // These methods are used to ensure that watch time is only reported for media
- // that is actually visible to the user. They should be called when the media
- // is shown or hidden respectively. OnHidden() will start background watch
- // time reporting.
- void OnShown();
- void OnHidden();
-
- // Called when a playback ends in error.
- void OnError(PipelineStatus status);
-
- // Indicates a rebuffering event occurred during playback. When watch time is
- // finalized the total watch time for a given category will be divided by the
- // number of rebuffering events. Reset to zero after a finalize event.
- void OnUnderflow();
- void OnUnderflowComplete(base::TimeDelta elapsed);
-
- // These methods are used to ensure that the watch time is reported relative
- // to whether the media is using native controls.
- void OnNativeControlsEnabled();
- void OnNativeControlsDisabled();
-
- // These methods are used to ensure that the watch time is reported relative
- // to the display type of the media.
- void OnDisplayTypeInline();
- void OnDisplayTypeFullscreen();
- void OnDisplayTypePictureInPicture();
-
- // Mutates various properties that may change over the lifetime of a playback
- // but for which we don't want to interrupt reporting for. UMA watch time will
- // not be interrupted by changes to these properties, while UKM will.
- //
- // Note: Both UMA and UMK watch time will be interrupted if the natural size
- // transitions above/below kMinimumVideoSize.
- void UpdateSecondaryProperties(
- mojom::SecondaryPlaybackPropertiesPtr secondary_properties);
-
- // Notifies the autoplay status of the playback. Must not be called multiple
- // times with different values.
- void SetAutoplayInitiated(bool autoplay_initiated);
-
- // Updates the duration maintained by the recorder. May be called any number
- // of times during playback.
- void OnDurationChanged(base::TimeDelta duration);
-
- private:
- friend class WatchTimeReporterTest;
-
- // Internal constructor for marking background status.
- WatchTimeReporter(mojom::PlaybackPropertiesPtr properties,
- bool is_background,
- bool is_muted,
- const gfx::Size& natural_size,
- GetMediaTimeCB get_media_time_cb,
- GetPipelineStatsCB get_pipeline_stats_cb,
- mojom::MediaMetricsProvider* provider,
- scoped_refptr<base::SequencedTaskRunner> task_runner,
- const base::TickClock* tick_clock);
-
- // base::PowerObserver implementation.
- //
- // We only observe power source changes. We don't need to observe suspend and
- // resume events because we report watch time in terms of elapsed media time
- // and not in terms of elapsed real time.
- void OnPowerStateChange(bool on_battery_power) override;
- void OnNativeControlsChanged(bool has_native_controls);
- void OnDisplayTypeChanged(blink::WebMediaPlayer::DisplayType display_type);
-
- bool ShouldReportWatchTime() const;
- bool ShouldReportingTimerRun() const;
- void MaybeStartReportingTimer(base::TimeDelta start_timestamp);
- enum class FinalizeTime { IMMEDIATELY, ON_NEXT_UPDATE };
- void MaybeFinalizeWatchTime(FinalizeTime finalize_time);
- void RestartTimerForHysteresis();
-
- // UpdateWatchTime() both records watch time and processes any finalize event.
- void RecordWatchTime();
- void UpdateWatchTime();
-
- void ResetUnderflowState();
-
- // Helper methods for creating the components that make up the watch time
- // report. All components except the base component require a creation method
- // and a conversion method to get the correct WatchTimeKey.
- std::unique_ptr<WatchTimeComponent<bool>> CreateBaseComponent();
- std::unique_ptr<WatchTimeComponent<bool>> CreatePowerComponent();
- WatchTimeKey GetPowerKey(bool is_on_battery_power);
- std::unique_ptr<WatchTimeComponent<bool>> CreateControlsComponent();
- WatchTimeKey GetControlsKey(bool has_native_controls);
- std::unique_ptr<WatchTimeComponent<DisplayType>> CreateDisplayTypeComponent();
- WatchTimeKey GetDisplayTypeKey(DisplayType display_type);
-
- // Initialized during construction.
- const mojom::PlaybackPropertiesPtr properties_;
- const bool is_background_;
- const bool is_muted_;
- const GetMediaTimeCB get_media_time_cb_;
- const GetPipelineStatsCB get_pipeline_stats_cb_;
- mojo::Remote<mojom::WatchTimeRecorder> recorder_;
-
- // The amount of time between each UpdateWatchTime(); this is the frequency by
- // which the watch times are updated. In the event of a process crash or kill
- // this is also the most amount of watch time that we might lose.
- base::TimeDelta reporting_interval_ = base::TimeDelta::FromSeconds(5);
-
- base::RepeatingTimer reporting_timer_;
-
- // Updated by the OnXXX() methods above; controls timer state.
- bool is_playing_ = false;
- bool is_visible_ = true;
- bool is_seeking_ = false;
- bool in_shutdown_ = false;
- bool has_valid_start_timestamp_ = false;
- double volume_ = 1.0;
-
- // Updated by UpdateSecondaryProperties(); controls timer state when
- // transitioning above/below kMinimumVideoSize.
- gfx::Size natural_size_;
-
- int total_underflow_count_ = 0;
- int total_completed_underflow_count_ = 0;
- base::TimeDelta total_underflow_duration_;
- struct UnderflowEvent {
- bool reported = false;
- base::TimeDelta timestamp = kNoTimestamp;
- base::TimeDelta duration = kNoTimestamp;
- };
- std::vector<UnderflowEvent> pending_underflow_events_;
-
- PipelineStatistics initial_stats_;
- PipelineStatistics last_stats_;
-
- // The various components making up WatchTime. If the |base_component_| is
- // finalized, all reporting will be stopped and finalized using its ending
- // timestamp.
- //
- // Note: If you are adding a new type of component (i.e., one that is not
- // bool, etc) you must also update the end of the WatchTimeComponent .cc file
- // to add a new template class definition or you will get linking errors.
- std::unique_ptr<WatchTimeComponent<bool>> base_component_;
- std::unique_ptr<WatchTimeComponent<bool>> power_component_;
- std::unique_ptr<WatchTimeComponent<DisplayType>> display_type_component_;
- std::unique_ptr<WatchTimeComponent<bool>> controls_component_;
-
- // Special case reporter for handling background video watch time. Configured
- // as an audio only WatchTimeReporter with |is_background_| set to true.
- std::unique_ptr<WatchTimeReporter> background_reporter_;
-
- // Similar to the above, but for muted audio+video watch time. Configured as
- // an audio+video WatchTimeReporter with |is_muted_| set to true.
- std::unique_ptr<WatchTimeReporter> muted_reporter_;
-
- DISALLOW_COPY_AND_ASSIGN(WatchTimeReporter);
-};
-
-} // namespace media
-
-#endif // MEDIA_BLINK_WATCH_TIME_REPORTER_H_
diff --git a/chromium/media/blink/watch_time_reporter_unittest.cc b/chromium/media/blink/watch_time_reporter_unittest.cc
index dbeab26b783..7626b30c7fd 100644
--- a/chromium/media/blink/watch_time_reporter_unittest.cc
+++ b/chromium/media/blink/watch_time_reporter_unittest.cc
@@ -5,7 +5,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
#include "base/task/current_thread.h"
@@ -15,13 +15,15 @@
#include "media/base/mock_media_log.h"
#include "media/base/pipeline_status.h"
#include "media/base/watch_time_keys.h"
-#include "media/blink/watch_time_reporter.h"
#include "media/mojo/mojom/media_metrics_provider.mojom.h"
#include "media/mojo/mojom/watch_time_recorder.mojom.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/self_owned_receiver.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/blink/public/common/media/display_type.h"
+#include "third_party/blink/public/common/media/watch_time_component.h"
+#include "third_party/blink/public/common/media/watch_time_reporter.h"
#include "third_party/blink/public/platform/scheduler/test/renderer_scheduler_test_support.h"
namespace media {
@@ -29,7 +31,6 @@ namespace media {
constexpr gfx::Size kSizeTooSmall = gfx::Size(101, 101);
constexpr gfx::Size kSizeJustRight = gfx::Size(201, 201);
-using blink::WebMediaPlayer;
using testing::_;
#define EXPECT_WATCH_TIME(key, value) \
@@ -307,7 +308,7 @@ class WatchTimeReporterTest
if (wtr_ && IsMonitoring())
EXPECT_WATCH_TIME_FINALIZED();
- wtr_.reset(new WatchTimeReporter(
+ wtr_ = std::make_unique<blink::WatchTimeReporter>(
mojom::PlaybackProperties::New(has_audio_, has_video_, false, false,
is_mse, is_encrypted, false),
initial_video_size,
@@ -317,7 +318,7 @@ class WatchTimeReporterTest
base::Unretained(this)),
&fake_metrics_provider_,
blink::scheduler::GetSequencedTaskRunnerForTesting(),
- task_runner_->GetMockTickClock()));
+ task_runner_->GetMockTickClock());
reporting_interval_ = wtr_->reporting_interval_;
// Most tests don't care about this.
@@ -368,7 +369,7 @@ class WatchTimeReporterTest
: wtr_->OnNativeControlsDisabled();
}
- void OnDisplayTypeChanged(WebMediaPlayer::DisplayType display_type) {
+ void OnDisplayTypeChanged(blink::DisplayType display_type) {
wtr_->OnDisplayTypeChanged(display_type);
}
@@ -439,7 +440,7 @@ class WatchTimeReporterTest
if (TestFlags & kStartWithNativeControls)
OnNativeControlsEnabled(true);
if (TestFlags & kStartWithDisplayFullscreen)
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(blink::DisplayType::kFullscreen);
// Setup all current time expectations first since they need to use the
// InSequence macro for ease of use, but we don't want the watch time
@@ -603,11 +604,12 @@ class WatchTimeReporterTest
EXPECT_WATCH_TIME(Src, kWatchTime4);
EXPECT_WATCH_TIME(Ac, kWatchTime4);
EXPECT_WATCH_TIME(NativeControlsOff, kWatchTime4);
- if (TestFlags & kStartWithDisplayFullscreen)
+ if (TestFlags & kStartWithDisplayFullscreen) {
EXPECT_WATCH_TIME_IF_VIDEO(DisplayInline, kWatchTime4 - kWatchTime2);
- else
+ } else {
EXPECT_WATCH_TIME_IF_VIDEO(DisplayFullscreen,
kWatchTime4 - kWatchTime2);
+ }
}
EXPECT_WATCH_TIME_FINALIZED();
@@ -642,7 +644,7 @@ class WatchTimeReporterTest
scoped_refptr<base::SingleThreadTaskRunner> original_task_runner_;
FakeMediaMetricsProvider fake_metrics_provider_;
- std::unique_ptr<WatchTimeReporter> wtr_;
+ std::unique_ptr<blink::WatchTimeReporter> wtr_;
base::TimeDelta reporting_interval_;
private:
@@ -1433,7 +1435,7 @@ TEST_P(DisplayTypeWatchTimeReporterTest,
EXPECT_TRUE(IsBackgroundMonitoring());
EXPECT_FALSE(IsMonitoring());
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(blink::DisplayType::kFullscreen);
EXPECT_BACKGROUND_WATCH_TIME(Ac, kWatchTime1);
EXPECT_BACKGROUND_WATCH_TIME(All, kWatchTime1);
@@ -1567,7 +1569,7 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterMultiplePartialFinalize) {
wtr_->OnPlaying();
EXPECT_TRUE(IsMonitoring());
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(blink::DisplayType::kFullscreen);
OnPowerStateChange(true);
EXPECT_WATCH_TIME(Ac, kWatchTime1);
@@ -1609,7 +1611,7 @@ TEST_P(WatchTimeReporterTest, WatchTimeReporterMultiplePartialFinalize) {
OnNativeControlsEnabled(true);
OnPowerStateChange(true);
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kPictureInPicture);
+ OnDisplayTypeChanged(blink::DisplayType::kPictureInPicture);
EXPECT_WATCH_TIME(Ac, kWatchTime1);
EXPECT_WATCH_TIME(All, kWatchTime1);
@@ -1850,7 +1852,7 @@ TEST_P(WatchTimeReporterTest, WatchTimeCategoryMapping) {
.WillOnce(testing::Return(base::TimeDelta()))
.WillOnce(testing::Return(kWatchTime));
Initialize(false, false, kSizeJustRight);
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(blink::DisplayType::kFullscreen);
wtr_->OnPlaying();
SetOnBatteryPower(true);
EXPECT_TRUE(IsMonitoring());
@@ -1868,7 +1870,7 @@ TEST_P(WatchTimeReporterTest, WatchTimeCategoryMapping) {
.WillOnce(testing::Return(kWatchTime));
Initialize(false, false, kSizeJustRight);
OnNativeControlsEnabled(true);
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kPictureInPicture);
+ OnDisplayTypeChanged(blink::DisplayType::kPictureInPicture);
wtr_->OnPlaying();
EXPECT_TRUE(IsMonitoring());
EXPECT_WATCH_TIME(Ac, kWatchTime);
@@ -1999,8 +2001,8 @@ TEST_P(DisplayTypeWatchTimeReporterTest,
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeExitDoesNotRequireCurrentTime |
kStartWithDisplayFullscreen>([this]() {
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kInline);
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(blink::DisplayType::kInline);
+ OnDisplayTypeChanged(blink::DisplayType::kFullscreen);
});
}
@@ -2008,24 +2010,23 @@ TEST_P(DisplayTypeWatchTimeReporterTest,
OnDisplayTypeChangeHysteresisNativeFinalized) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeDisplayWatchTime | kStartWithDisplayFullscreen>(
- [this]() { OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kInline); });
+ [this]() { OnDisplayTypeChanged(blink::DisplayType::kInline); });
}
TEST_P(DisplayTypeWatchTimeReporterTest,
OnDisplayTypeChangeHysteresisInlineContinuation) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeExitDoesNotRequireCurrentTime>([this]() {
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kInline);
+ OnDisplayTypeChanged(blink::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(blink::DisplayType::kInline);
});
}
TEST_P(DisplayTypeWatchTimeReporterTest,
OnDisplayTypeChangeHysteresisNativeOffFinalized) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
- kFinalizeDisplayWatchTime>([this]() {
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
- });
+ kFinalizeDisplayWatchTime>(
+ [this]() { OnDisplayTypeChanged(blink::DisplayType::kFullscreen); });
}
TEST_P(DisplayTypeWatchTimeReporterTest,
@@ -2033,16 +2034,14 @@ TEST_P(DisplayTypeWatchTimeReporterTest,
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeDisplayWatchTime | kStartWithDisplayFullscreen |
kTransitionDisplayWatchTime>(
- [this]() { OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kInline); });
+ [this]() { OnDisplayTypeChanged(blink::DisplayType::kInline); });
}
TEST_P(DisplayTypeWatchTimeReporterTest,
OnDisplayTypeChangeFullscreenToInline) {
RunHysteresisTest<kAccumulationContinuesAfterTest |
kFinalizeDisplayWatchTime | kTransitionDisplayWatchTime>(
- [this]() {
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
- });
+ [this]() { OnDisplayTypeChanged(blink::DisplayType::kFullscreen); });
}
// Tests that the first finalize is the only one that matters.
@@ -2337,7 +2336,7 @@ TEST_P(MutedWatchTimeReporterTest, MutedDisplayType) {
EXPECT_TRUE(IsMutedMonitoring());
EXPECT_FALSE(IsMonitoring());
- OnDisplayTypeChanged(WebMediaPlayer::DisplayType::kFullscreen);
+ OnDisplayTypeChanged(blink::DisplayType::kFullscreen);
EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Ac, kWatchTime1);
EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(All, kWatchTime1);
EXPECT_MUTED_WATCH_TIME_IF_AUDIO_VIDEO(Eme, kWatchTime1);
diff --git a/chromium/media/blink/webmediaplayer_impl.cc b/chromium/media/blink/webmediaplayer_impl.cc
index 28ada62395b..a08e0f048e2 100644
--- a/chromium/media/blink/webmediaplayer_impl.cc
+++ b/chromium/media/blink/webmediaplayer_impl.cc
@@ -12,7 +12,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
@@ -48,7 +47,6 @@
#include "media/blink/texttrack_impl.h"
#include "media/blink/url_index.h"
#include "media/blink/video_decode_stats_reporter.h"
-#include "media/blink/watch_time_reporter.h"
#include "media/blink/webcontentdecryptionmodule_impl.h"
#include "media/blink/webinbandtexttrack_impl.h"
#include "media/blink/webmediasource_impl.h"
@@ -60,7 +58,7 @@
#include "media/remoting/remoting_constants.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "net/base/data_url.h"
-#include "third_party/blink/public/platform/media/webmediaplayer_delegate.h"
+#include "third_party/blink/public/common/media/watch_time_reporter.h"
#include "third_party/blink/public/platform/web_encrypted_media_types.h"
#include "third_party/blink/public/platform/web_fullscreen_video_status.h"
#include "third_party/blink/public/platform/web_media_player_client.h"
@@ -485,6 +483,7 @@ WebMediaPlayerImpl::~WebMediaPlayerImpl() {
delegate_->PlayerGone(delegate_id_);
delegate_->RemoveObserver(delegate_id_);
+ delegate_ = nullptr;
// Finalize any watch time metrics before destroying the pipeline.
watch_time_reporter_.reset();
@@ -690,28 +689,26 @@ void WebMediaPlayerImpl::OnHasNativeControlsChanged(bool has_native_controls) {
watch_time_reporter_->OnNativeControlsDisabled();
}
-void WebMediaPlayerImpl::OnDisplayTypeChanged(
- WebMediaPlayer::DisplayType display_type) {
+void WebMediaPlayerImpl::OnDisplayTypeChanged(blink::DisplayType display_type) {
if (surface_layer_for_video_enabled_) {
vfc_task_runner_->PostTask(
FROM_HERE,
- base::BindOnce(
- &VideoFrameCompositor::SetForceSubmit,
- base::Unretained(compositor_.get()),
- display_type == WebMediaPlayer::DisplayType::kPictureInPicture));
+ base::BindOnce(&VideoFrameCompositor::SetForceSubmit,
+ base::Unretained(compositor_.get()),
+ display_type == blink::DisplayType::kPictureInPicture));
}
if (!watch_time_reporter_)
return;
switch (display_type) {
- case WebMediaPlayer::DisplayType::kInline:
+ case blink::DisplayType::kInline:
watch_time_reporter_->OnDisplayTypeInline();
break;
- case WebMediaPlayer::DisplayType::kFullscreen:
+ case blink::DisplayType::kFullscreen:
watch_time_reporter_->OnDisplayTypeFullscreen();
break;
- case WebMediaPlayer::DisplayType::kPictureInPicture:
+ case blink::DisplayType::kPictureInPicture:
watch_time_reporter_->OnDisplayTypePictureInPicture();
// Resumes playback if it was paused when hidden.
@@ -975,6 +972,11 @@ void WebMediaPlayerImpl::DoSeek(base::TimeDelta time, bool time_updated) {
if (watch_time_reporter_)
watch_time_reporter_->OnSeeking();
+ // Send the seek updates only when the seek pipeline hasn't started,
+ // OnPipelineSeeked is not called yet.
+ if (!seeking_)
+ delegate_->DidSeek(delegate_id_);
+
// TODO(sandersd): Move |seeking_| to PipelineController.
// TODO(sandersd): Do we want to reset the idle timer here?
delegate_->SetIdle(delegate_id_, false);
@@ -1339,14 +1341,10 @@ void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl:paint");
- // We can't copy from protected frames.
- if (cdm_context_ref_)
- return;
-
scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
gfx::Rect gfx_rect(rect);
- if (video_frame.get() && video_frame->HasTextures()) {
+ if (video_frame && video_frame->HasTextures()) {
if (!raster_context_provider_)
return; // Unable to get/create a shared main thread context.
if (!raster_context_provider_->GrContext())
@@ -1367,6 +1365,10 @@ void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
raster_context_provider_.get());
}
+scoped_refptr<VideoFrame> WebMediaPlayerImpl::GetCurrentFrame() {
+ return GetCurrentFrameFromCompositor();
+}
+
bool WebMediaPlayerImpl::WouldTaintOrigin() const {
if (demuxer_found_hls_) {
// HLS manifests might pull segments from a different origin. We can't know
@@ -1431,14 +1433,11 @@ bool WebMediaPlayerImpl::CopyVideoTextureToPlatformTexture(
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture");
- // We can't copy from protected frames.
- if (cdm_context_ref_)
- return false;
-
scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
- if (!video_frame.get() || !video_frame->HasTextures()) {
+ if (!video_frame || !video_frame->HasTextures()) {
return false;
}
+
if (out_metadata) {
// WebGL last-uploaded-frame-metadata API is enabled.
// https://crbug.com/639174
@@ -2018,6 +2017,22 @@ void WebMediaPlayerImpl::OnMetadata(const PipelineMetadata& metadata) {
delegate_id_, delegate_has_audio_, HasVideo(),
DurationToMediaContentType(GetPipelineMediaDuration()));
+ // It could happen that the demuxer successfully completed initialization
+ // (implying it had determined media metadata), but then removed all audio and
+ // video streams and the ability to demux any A/V before |metadata| was
+ // constructed and passed to us. One example is, with MSE-in-Workers, the
+ // worker owning the MediaSource could have been terminated, or the app could
+ // have explicitly removed all A/V SourceBuffers. That termination/removal
+ // could race the construction of |metadata|. Regardless of load-type, we
+ // shouldn't allow playback of a resource that has neither audio nor video.
+ // We treat lack of A/V as if there were an error in the demuxer before
+ // reaching HAVE_METADATA.
+ if (!HasVideo() && !HasAudio()) {
+ DVLOG(1) << __func__ << ": no audio and no video -> error";
+ OnError(PipelineStatus::DEMUXER_ERROR_COULD_NOT_OPEN);
+ return; // Do not transition to HAVE_METADATA.
+ }
+
// TODO(dalecurtis): Don't create these until kReadyStateHaveFutureData; when
// we create them early we just increase the chances of needing to throw them
// away unnecessarily.
@@ -2962,6 +2977,10 @@ scoped_refptr<VideoFrame> WebMediaPlayerImpl::GetCurrentFrameFromCompositor()
DCHECK(main_task_runner_->BelongsToCurrentThread());
TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor");
+ // We can't copy from protected frames.
+ if (cdm_context_ref_)
+ return nullptr;
+
// Can be null.
scoped_refptr<VideoFrame> video_frame =
compositor_->GetCurrentFrameOnAnyThread();
@@ -3381,7 +3400,7 @@ void WebMediaPlayerImpl::CreateWatchTimeReporter() {
}
// Create the watch time reporter and synchronize its initial state.
- watch_time_reporter_ = std::make_unique<WatchTimeReporter>(
+ watch_time_reporter_ = std::make_unique<blink::WatchTimeReporter>(
mojom::PlaybackProperties::New(
pipeline_metadata_.has_audio, has_video, false, false,
!!chunk_demuxer_, is_encrypted_, embedded_media_experience_enabled_),
@@ -3405,14 +3424,14 @@ void WebMediaPlayerImpl::CreateWatchTimeReporter() {
else
watch_time_reporter_->OnNativeControlsDisabled();
- switch (client_->DisplayType()) {
- case WebMediaPlayer::DisplayType::kInline:
+ switch (client_->GetDisplayType()) {
+ case blink::DisplayType::kInline:
watch_time_reporter_->OnDisplayTypeInline();
break;
- case WebMediaPlayer::DisplayType::kFullscreen:
+ case blink::DisplayType::kFullscreen:
watch_time_reporter_->OnDisplayTypeFullscreen();
break;
- case WebMediaPlayer::DisplayType::kPictureInPicture:
+ case blink::DisplayType::kPictureInPicture:
watch_time_reporter_->OnDisplayTypePictureInPicture();
break;
}
@@ -3832,8 +3851,7 @@ void WebMediaPlayerImpl::RecordEncryptionScheme(
bool WebMediaPlayerImpl::IsInPictureInPicture() const {
DCHECK(client_);
- return client_->DisplayType() ==
- WebMediaPlayer::DisplayType::kPictureInPicture;
+ return client_->GetDisplayType() == blink::DisplayType::kPictureInPicture;
}
void WebMediaPlayerImpl::OnPictureInPictureAvailabilityChanged(bool available) {
diff --git a/chromium/media/blink/webmediaplayer_impl.h b/chromium/media/blink/webmediaplayer_impl.h
index 0c1918a588f..995d07a7ea2 100644
--- a/chromium/media/blink/webmediaplayer_impl.h
+++ b/chromium/media/blink/webmediaplayer_impl.h
@@ -49,6 +49,7 @@
#include "media/renderers/paint_canvas_video_renderer.h"
#include "mojo/public/cpp/bindings/remote.h"
#include "services/media_session/public/cpp/media_position.h"
+#include "third_party/blink/public/common/media/display_type.h"
#include "third_party/blink/public/platform/media/webmediaplayer_delegate.h"
#include "third_party/blink/public/platform/web_audio_source_provider.h"
#include "third_party/blink/public/platform/web_content_decryption_module_result.h"
@@ -62,6 +63,7 @@ class WebAudioSourceProviderImpl;
class WebLocalFrame;
class WebMediaPlayerClient;
class WebMediaPlayerEncryptedMediaClient;
+class WatchTimeReporter;
} // namespace blink
namespace base {
@@ -87,7 +89,6 @@ class MediaLog;
class MemoryDumpProviderProxy;
class UrlIndex;
class VideoFrameCompositor;
-class WatchTimeReporter;
// The canonical implementation of blink::WebMediaPlayer that's backed by
// Pipeline. Handles normal resource loading, Media Source, and
@@ -149,6 +150,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
cc::PaintFlags& flags,
int already_uploaded_id,
VideoFrameUploadMetadata* out_metadata) override;
+ scoped_refptr<VideoFrame> GetCurrentFrame() override;
// True if the loaded media has a playable video/audio track.
bool HasVideo() const override;
@@ -238,7 +240,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
void SetIsEffectivelyFullscreen(
blink::WebFullscreenVideoStatus fullscreen_video_status) override;
void OnHasNativeControlsChanged(bool) override;
- void OnDisplayTypeChanged(WebMediaPlayer::DisplayType display_type) override;
+ void OnDisplayTypeChanged(blink::DisplayType display_type) override;
// blink::WebMediaPlayerDelegate::Observer implementation.
void OnFrameHidden() override;
@@ -748,7 +750,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// Document::shutdown() is called before the frame detaches (and before the
// frame is destroyed). RenderFrameImpl owns |delegate_| and is guaranteed
// to outlive |this|; thus it is safe to store |delegate_| as a raw pointer.
- blink::WebMediaPlayerDelegate* const delegate_;
+ blink::WebMediaPlayerDelegate* delegate_;
int delegate_id_ = 0;
// The playback state last reported to |delegate_|, to avoid setting duplicate
@@ -895,7 +897,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
base::OneShotTimer background_pause_timer_;
// Monitors the watch time of the played content.
- std::unique_ptr<WatchTimeReporter> watch_time_reporter_;
+ std::unique_ptr<blink::WatchTimeReporter> watch_time_reporter_;
std::string audio_decoder_name_;
std::string video_decoder_name_;
@@ -925,8 +927,7 @@ class MEDIA_BLINK_EXPORT WebMediaPlayerImpl
// Monitors the player events.
base::WeakPtr<MediaObserver> observer_;
- // Owns the weblayer and obtains/maintains SurfaceIds for
- // kUseSurfaceLayerForVideo feature.
+ // Owns the weblayer and obtains/maintains SurfaceIds.
std::unique_ptr<blink::WebSurfaceLayerBridge> bridge_;
// The maximum video keyframe distance that allows triggering background
diff --git a/chromium/media/blink/webmediaplayer_impl_unittest.cc b/chromium/media/blink/webmediaplayer_impl_unittest.cc
index 3fb0d5e3607..6aca18bc32b 100644
--- a/chromium/media/blink/webmediaplayer_impl_unittest.cc
+++ b/chromium/media/blink/webmediaplayer_impl_unittest.cc
@@ -9,7 +9,6 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/memory/ref_counted.h"
@@ -18,7 +17,7 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/utf_string_conversions.h"
#include "base/task_runner_util.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
#include "base/test/scoped_feature_list.h"
@@ -55,6 +54,7 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/media/webmediaplayer_delegate.h"
+#include "third_party/blink/public/platform/scheduler/web_thread_scheduler.h"
#include "third_party/blink/public/platform/web_fullscreen_video_status.h"
#include "third_party/blink/public/platform/web_media_player.h"
#include "third_party/blink/public/platform/web_media_player_client.h"
@@ -147,7 +147,7 @@ class MockWebMediaPlayerClient : public blink::WebMediaPlayerClient {
MOCK_METHOD0(GetSelectedVideoTrackId, blink::WebMediaPlayer::TrackId());
MOCK_METHOD0(HasNativeControls, bool());
MOCK_METHOD0(IsAudioElement, bool());
- MOCK_CONST_METHOD0(DisplayType, blink::WebMediaPlayer::DisplayType());
+ MOCK_CONST_METHOD0(GetDisplayType, blink::DisplayType());
MOCK_CONST_METHOD0(IsInAutoPIP, bool());
MOCK_METHOD1(MediaRemotingStarted, void(const blink::WebString&));
MOCK_METHOD1(MediaRemotingStopped, void(int));
@@ -249,6 +249,8 @@ class MockWebMediaPlayerDelegate : public blink::WebMediaPlayerDelegate {
DCHECK_EQ(player_id_, player_id);
}
+ void DidSeek(int player_id) override { DCHECK_EQ(player_id_, player_id); }
+
bool IsFrameHidden() override { return is_hidden_; }
bool IsFrameClosed() override { return is_closed_; }
@@ -330,13 +332,20 @@ class WebMediaPlayerImplTest
private blink::WebTestingSupport::WebScopedMockScrollbars {
public:
WebMediaPlayerImplTest()
+ : WebMediaPlayerImplTest(
+ blink::scheduler::WebThreadScheduler::MainThreadScheduler()
+ ->CreateAgentGroupScheduler()) {}
+ explicit WebMediaPlayerImplTest(
+ std::unique_ptr<blink::scheduler::WebAgentGroupScheduler>
+ agent_group_scheduler)
: media_thread_("MediaThreadForTest"),
web_view_(blink::WebView::Create(/*client=*/nullptr,
/*is_hidden=*/false,
/*is_inside_portal=*/false,
/*compositing_enabled=*/false,
- nullptr,
- mojo::NullAssociatedReceiver())),
+ /*opener=*/nullptr,
+ mojo::NullAssociatedReceiver(),
+ *agent_group_scheduler)),
web_local_frame_(blink::WebLocalFrame::CreateMainFrame(
web_view_,
&web_frame_client_,
@@ -346,7 +355,8 @@ class WebMediaPlayerImplTest
context_provider_(viz::TestContextProvider::Create()),
audio_parameters_(TestAudioParameters::Normal()),
memory_dump_manager_(
- base::trace_event::MemoryDumpManager::CreateInstanceForTesting()) {
+ base::trace_event::MemoryDumpManager::CreateInstanceForTesting()),
+ agent_group_scheduler_(std::move(agent_group_scheduler)) {
media_thread_.StartAndWaitForTesting();
}
@@ -380,6 +390,8 @@ class WebMediaPlayerImplTest
CycleThreads();
web_view_->Close();
+
+ agent_group_scheduler_ = nullptr;
}
protected:
@@ -886,6 +898,9 @@ class WebMediaPlayerImplTest
std::unique_ptr<base::trace_event::MemoryDumpManager> memory_dump_manager_;
+ std::unique_ptr<blink::scheduler::WebAgentGroupScheduler>
+ agent_group_scheduler_;
+
private:
DISALLOW_COPY_AND_ASSIGN(WebMediaPlayerImplTest);
};
@@ -1652,8 +1667,19 @@ TEST_F(WebMediaPlayerImplTest, NoStreams) {
EXPECT_CALL(*surface_layer_bridge_ptr_, GetSurfaceId()).Times(0);
EXPECT_CALL(*compositor_, EnableSubmission(_, _, _)).Times(0);
- // Nothing should happen. In particular, no assertions should fail.
+ // Since there is no audio nor video to play, OnError should occur with
+ // resulting network state error update, and transition to HAVE_METADATA
+ // should not occur.
+ EXPECT_CALL(client_, NetworkStateChanged()).Times(1);
+ EXPECT_CALL(client_, ReadyStateChanged()).Times(0);
+
+ // No assertions in the production code should fail.
OnMetadata(metadata);
+
+ EXPECT_EQ(wmpi_->GetNetworkState(),
+ blink::WebMediaPlayer::kNetworkStateFormatError);
+ EXPECT_EQ(wmpi_->GetReadyState(),
+ blink::WebMediaPlayer::kReadyStateHaveNothing);
}
TEST_F(WebMediaPlayerImplTest, Encrypted) {
@@ -2008,9 +2034,8 @@ TEST_F(WebMediaPlayerImplTest, PictureInPictureStateChange) {
metadata.has_video = true;
OnMetadata(metadata);
- EXPECT_CALL(client_, DisplayType())
- .WillRepeatedly(
- Return(blink::WebMediaPlayer::DisplayType::kPictureInPicture));
+ EXPECT_CALL(client_, GetDisplayType())
+ .WillRepeatedly(Return(blink::DisplayType::kPictureInPicture));
EXPECT_CALL(client_, OnPictureInPictureStateChange()).Times(1);
wmpi_->OnSurfaceIdUpdated(surface_id_);
@@ -2208,9 +2233,8 @@ class WebMediaPlayerImplBackgroundBehaviorTest
SetDuration(base::TimeDelta::FromSeconds(GetDurationSec()));
if (IsPictureInPictureOn()) {
- EXPECT_CALL(client_, DisplayType())
- .WillRepeatedly(
- Return(blink::WebMediaPlayer::DisplayType::kPictureInPicture));
+ EXPECT_CALL(client_, GetDisplayType())
+ .WillRepeatedly(Return(blink::DisplayType::kPictureInPicture));
wmpi_->OnSurfaceIdUpdated(surface_id_);
}
diff --git a/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
index 2d69342f7f9..6c39f4fa286 100644
--- a/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
+++ b/chromium/media/capabilities/in_memory_video_decode_stats_db_unittest.cc
@@ -5,7 +5,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/check_op.h"
#include "base/memory/ptr_util.h"
#include "base/test/gtest_util.h"
diff --git a/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc b/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
index 38b457a8463..0bce05ab1d4 100644
--- a/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
+++ b/chromium/media/capabilities/video_decode_stats_db_impl_unittest.cc
@@ -6,8 +6,8 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/check.h"
#include "base/files/file_path.h"
#include "base/memory/ptr_util.h"
diff --git a/chromium/media/capture/BUILD.gn b/chromium/media/capture/BUILD.gn
index 0a27e9d3a09..03b6d3ec832 100644
--- a/chromium/media/capture/BUILD.gn
+++ b/chromium/media/capture/BUILD.gn
@@ -2,18 +2,12 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//media/media_options.gni")
import("//testing/test.gni")
-# This file depends on the legacy global sources assignment filter. It should
-# be converted to check target platform before assigning source files to the
-# sources variable. Remove this import and set_sources_assignment_filter call
-# when the file has been converted. See https://crbug.com/1018739 for details.
-import("//build/config/deprecated_default_sources_assignment_filter.gni")
-set_sources_assignment_filter(deprecated_default_sources_assignment_filter)
-
group("capture") {
public_deps = [
":capture_lib",
@@ -98,6 +92,7 @@ source_set("capture_device_specific") {
deps = [
"//base",
"//base:i18n",
+ "//build:chromeos_buildflags",
"//gpu/command_buffer/client",
"//gpu/ipc/common:common",
"//media",
@@ -105,6 +100,7 @@ source_set("capture_device_specific") {
"//media/capture/mojom:image_capture_types",
"//media/mojo/mojom",
"//media/parsers",
+ "//skia",
"//third_party/libyuv",
"//ui/gfx",
]
@@ -121,6 +117,7 @@ component("capture_lib") {
"video/video_capture_buffer_pool.h",
"video/video_capture_buffer_pool_impl.cc",
"video/video_capture_buffer_pool_impl.h",
+ "video/video_capture_buffer_tracker.cc",
"video/video_capture_buffer_tracker.h",
"video/video_capture_buffer_tracker_factory.h",
"video/video_capture_buffer_tracker_factory_impl.cc",
@@ -145,6 +142,7 @@ component("capture_lib") {
deps = [
"//base",
"//base:i18n",
+ "//build:chromeos_buildflags",
"//media",
"//media/capture/mojom:image_capture",
"//media/capture/mojom:image_capture_types",
@@ -169,8 +167,14 @@ component("capture_lib") {
if (is_mac) {
sources += [
+ "video/mac/gpu_memory_buffer_tracker_mac.cc",
+ "video/mac/gpu_memory_buffer_tracker_mac.h",
"video/mac/pixel_buffer_pool_mac.cc",
"video/mac/pixel_buffer_pool_mac.h",
+ "video/mac/pixel_buffer_transferer_mac.cc",
+ "video/mac/pixel_buffer_transferer_mac.h",
+ "video/mac/sample_buffer_transformer_mac.cc",
+ "video/mac/sample_buffer_transformer_mac.h",
"video/mac/video_capture_device_avfoundation_legacy_mac.h",
"video/mac/video_capture_device_avfoundation_legacy_mac.mm",
"video/mac/video_capture_device_avfoundation_mac.h",
@@ -251,8 +255,6 @@ component("capture_lib") {
# these sources into linux, chromeos, and common.
if (is_linux || is_chromeos) {
sources += [
- "video/linux/camera_config_chromeos.cc",
- "video/linux/camera_config_chromeos.h",
"video/linux/scoped_v4l2_device_fd.cc",
"video/linux/scoped_v4l2_device_fd.h",
"video/linux/v4l2_capture_delegate.cc",
@@ -260,8 +262,6 @@ component("capture_lib") {
"video/linux/v4l2_capture_device.h",
"video/linux/v4l2_capture_device_impl.cc",
"video/linux/v4l2_capture_device_impl.h",
- "video/linux/video_capture_device_chromeos.cc",
- "video/linux/video_capture_device_chromeos.h",
"video/linux/video_capture_device_factory_linux.cc",
"video/linux/video_capture_device_factory_linux.h",
"video/linux/video_capture_device_linux.cc",
@@ -269,7 +269,7 @@ component("capture_lib") {
]
}
- if (is_chromeos) {
+ if (is_ash) {
sources += [
"video/chromeos/camera_3a_controller.cc",
"video/chromeos/camera_3a_controller.h",
@@ -317,6 +317,10 @@ component("capture_lib") {
"video/chromeos/video_capture_jpeg_decoder.h",
"video/chromeos/video_capture_jpeg_decoder_impl.cc",
"video/chromeos/video_capture_jpeg_decoder_impl.h",
+ "video/linux/camera_config_chromeos.cc",
+ "video/linux/camera_config_chromeos.h",
+ "video/linux/video_capture_device_chromeos.cc",
+ "video/linux/video_capture_device_chromeos.h",
]
public_deps += [ "//media/capture/video/chromeos/public" ]
deps += [
@@ -350,10 +354,6 @@ source_set("test_support") {
testonly = true
sources = [
- "video/linux/fake_device_provider.cc",
- "video/linux/fake_device_provider.h",
- "video/linux/fake_v4l2_impl.cc",
- "video/linux/fake_v4l2_impl.h",
"video/mock_device.cc",
"video/mock_device.h",
"video/mock_device_factory.cc",
@@ -369,6 +369,7 @@ source_set("test_support") {
deps = [
":capture_lib",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//gpu/command_buffer/client",
"//gpu/command_buffer/common:common",
"//media:test_support",
@@ -378,7 +379,16 @@ source_set("test_support") {
"//ui/gfx:test_support",
]
- if (is_chromeos) {
+ if (is_linux || is_chromeos) {
+ sources += [
+ "video/linux/fake_device_provider.cc",
+ "video/linux/fake_device_provider.h",
+ "video/linux/fake_v4l2_impl.cc",
+ "video/linux/fake_v4l2_impl.h",
+ ]
+ }
+
+ if (is_ash) {
sources += [
"video/chromeos/mock_camera_module.cc",
"video/chromeos/mock_camera_module.h",
@@ -407,17 +417,6 @@ test("capture_unittests") {
"run_all_unittests.cc",
"video/fake_video_capture_device_unittest.cc",
"video/file_video_capture_device_unittest.cc",
- "video/linux/camera_config_chromeos_unittest.cc",
- "video/linux/v4l2_capture_delegate_unittest.cc",
- "video/linux/video_capture_device_factory_linux_unittest.cc",
- "video/mac/pixel_buffer_pool_mac_unittest.mm",
- "video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.h",
- "video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.mm",
- "video/mac/test/video_capture_test_utils_mac.h",
- "video/mac/test/video_capture_test_utils_mac.mm",
- "video/mac/video_capture_device_avfoundation_mac_unittest.mm",
- "video/mac/video_capture_device_factory_mac_unittest.mm",
- "video/mac/video_capture_device_mac_unittest.mm",
"video/video_capture_device_client_unittest.cc",
"video/video_capture_device_unittest.cc",
"video_capture_types_unittest.cc",
@@ -429,6 +428,7 @@ test("capture_unittests") {
":capture",
":test_support",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//gpu/command_buffer/client",
"//media:test_support",
"//media/capture/mojom:image_capture",
@@ -436,9 +436,21 @@ test("capture_unittests") {
"//mojo/core/embedder",
"//testing/gmock",
"//testing/gtest",
+ "//third_party/libyuv:libyuv",
"//ui/gfx:test_support",
]
+ if (is_linux || is_chromeos) {
+ sources += [
+ "video/linux/v4l2_capture_delegate_unittest.cc",
+ "video/linux/video_capture_device_factory_linux_unittest.cc",
+ ]
+ }
+
+ if (is_ash) {
+ sources += [ "video/linux/camera_config_chromeos_unittest.cc" ]
+ }
+
if (is_android) {
deps += [
"//media/capture/video/android",
@@ -460,10 +472,25 @@ test("capture_unittests") {
}
if (is_mac) {
+ sources += [
+ "video/mac/pixel_buffer_pool_mac_unittest.mm",
+ "video/mac/pixel_buffer_transferer_mac_unittest.mm",
+ "video/mac/sample_buffer_transformer_mac_unittest.mm",
+ "video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.h",
+ "video/mac/test/mock_video_capture_device_avfoundation_frame_receiver_mac.mm",
+ "video/mac/test/pixel_buffer_test_utils_mac.cc",
+ "video/mac/test/pixel_buffer_test_utils_mac.h",
+ "video/mac/test/video_capture_test_utils_mac.h",
+ "video/mac/test/video_capture_test_utils_mac.mm",
+ "video/mac/video_capture_device_avfoundation_mac_unittest.mm",
+ "video/mac/video_capture_device_factory_mac_unittest.mm",
+ "video/mac/video_capture_device_mac_unittest.mm",
+ ]
frameworks = [
"AVFoundation.framework",
"CoreMedia.framework",
"CoreVideo.framework",
+ "IOSurface.framework",
]
}
@@ -486,14 +513,16 @@ test("capture_unittests") {
# TODO(jschuh): https://crbug.com/167187 fix size_t to int truncations.
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
+
+ deps += [ "//media/base/win:media_foundation_util" ]
}
# TODO(https://crbug.com/1043007): use is_linux.
- if (is_chromeos) {
+ if (is_ash) {
deps += [ "//media/gpu/test:local_gpu_memory_buffer_manager" ]
}
- if (is_chromeos) {
+ if (is_ash) {
sources += [
"video/chromeos/camera_3a_controller_unittest.cc",
"video/chromeos/camera_device_delegate_unittest.cc",
diff --git a/chromium/media/capture/OWNERS b/chromium/media/capture/OWNERS
index 000874b24d3..4f59853ed82 100644
--- a/chromium/media/capture/OWNERS
+++ b/chromium/media/capture/OWNERS
@@ -4,3 +4,5 @@
file://media/capture/content/OWNERS
# For video/
file://media/capture/video/OWNERS
+# For video/mac/
+file://media/capture/video/mac/OWNERS
diff --git a/chromium/media/capture/content/DIR_METADATA b/chromium/media/capture/content/DIR_METADATA
new file mode 100644
index 00000000000..f07592d70e5
--- /dev/null
+++ b/chromium/media/capture/content/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Internals>Media>ScreenCapture"
+} \ No newline at end of file
diff --git a/chromium/media/capture/content/OWNERS b/chromium/media/capture/content/OWNERS
index 12fd84abbff..90320c9bd1b 100644
--- a/chromium/media/capture/content/OWNERS
+++ b/chromium/media/capture/content/OWNERS
@@ -1,4 +1,2 @@
miu@chromium.org
mfoltz@chromium.org
-
-# COMPONENT: Internals>Media>ScreenCapture
diff --git a/chromium/media/capture/content/android/screen_capture_machine_android.cc b/chromium/media/capture/content/android/screen_capture_machine_android.cc
index c4a7189681d..18f0ec6e4af 100644
--- a/chromium/media/capture/content/android/screen_capture_machine_android.cc
+++ b/chromium/media/capture/content/android/screen_capture_machine_android.cc
@@ -251,8 +251,6 @@ bool ScreenCaptureMachineAndroid::Start(
}
DCHECK(params.requested_format.frame_size.GetArea());
- DCHECK(!(params.requested_format.frame_size.width() % 2));
- DCHECK(!(params.requested_format.frame_size.height() % 2));
jboolean ret =
Java_ScreenCapture_allocate(AttachCurrentThread(), j_capture_,
diff --git a/chromium/media/capture/mojom/video_capture_types.mojom b/chromium/media/capture/mojom/video_capture_types.mojom
index 672078673c0..8d4f4c68d38 100644
--- a/chromium/media/capture/mojom/video_capture_types.mojom
+++ b/chromium/media/capture/mojom/video_capture_types.mojom
@@ -229,6 +229,7 @@ enum VideoCaptureError {
kFuchsiaSysmemInvalidBufferSize,
kFuchsiaUnsupportedPixelFormat,
kFuchsiaFailedToMapSysmemBuffer,
+ kCrosHalV3DeviceContextDuplicatedClient,
};
enum VideoCaptureFrameDropReason {
diff --git a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
index 03398d0e0f4..d94dd8abe7d 100644
--- a/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
+++ b/chromium/media/capture/mojom/video_capture_types_mojom_traits.cc
@@ -701,6 +701,9 @@ EnumTraits<media::mojom::VideoCaptureError, media::VideoCaptureError>::ToMojom(
return media::mojom::VideoCaptureError::kFuchsiaUnsupportedPixelFormat;
case media::VideoCaptureError::kFuchsiaFailedToMapSysmemBuffer:
return media::mojom::VideoCaptureError::kFuchsiaFailedToMapSysmemBuffer;
+ case media::VideoCaptureError::kCrosHalV3DeviceContextDuplicatedClient:
+ return media::mojom::VideoCaptureError::
+ kCrosHalV3DeviceContextDuplicatedClient;
}
NOTREACHED();
return media::mojom::VideoCaptureError::kNone;
@@ -1243,6 +1246,11 @@ bool EnumTraits<media::mojom::VideoCaptureError, media::VideoCaptureError>::
case media::mojom::VideoCaptureError::kFuchsiaFailedToMapSysmemBuffer:
*output = media::VideoCaptureError::kFuchsiaFailedToMapSysmemBuffer;
return true;
+ case media::mojom::VideoCaptureError::
+ kCrosHalV3DeviceContextDuplicatedClient:
+ *output =
+ media::VideoCaptureError::kCrosHalV3DeviceContextDuplicatedClient;
+ return true;
}
NOTREACHED();
return false;
diff --git a/chromium/media/capture/video/DIR_METADATA b/chromium/media/capture/video/DIR_METADATA
new file mode 100644
index 00000000000..c6b71a837dd
--- /dev/null
+++ b/chromium/media/capture/video/DIR_METADATA
@@ -0,0 +1,12 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Blink>GetUserMedia>WebCam"
+}
+team_email: "webrtc-dev@chromium.org" \ No newline at end of file
diff --git a/chromium/media/capture/video/OWNERS b/chromium/media/capture/video/OWNERS
index 832f5a46e43..1197fa5da7e 100644
--- a/chromium/media/capture/video/OWNERS
+++ b/chromium/media/capture/video/OWNERS
@@ -5,6 +5,3 @@ guidou@chromium.org
chfremer@chromium.org
emircan@chromium.org
mcasas@chromium.org
-
-# TEAM: webrtc-dev@chromium.org
-# COMPONENT: Blink>GetUserMedia>WebCam
diff --git a/chromium/media/capture/video/android/video_capture_device_factory_android.cc b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
index fbe2f899afc..ff95a22a29d 100644
--- a/chromium/media/capture/video/android/video_capture_device_factory_android.cc
+++ b/chromium/media/capture/video/android/video_capture_device_factory_android.cc
@@ -8,7 +8,6 @@
#include "base/android/jni_string.h"
#include "base/android/scoped_java_ref.h"
-#include "base/ranges/algorithm.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/stringprintf.h"
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.cc b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
index dc93a18ee5b..16079425ae8 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.cc
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.cc
@@ -397,6 +397,36 @@ void Camera3AController::SetAutoWhiteBalanceMode(
DVLOG(1) << "Setting AWB mode to: " << awb_mode_;
}
+void Camera3AController::SetExposureTime(bool enable_auto,
+ int64_t exposure_time_nanoseconds) {
+ DCHECK(task_runner_->BelongsToCurrentThread());
+
+ if (enable_auto) {
+ if (!available_ae_modes_.count(
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON)) {
+ LOG(WARNING) << "Don't support ANDROID_CONTROL_AE_MODE_ON";
+ return;
+ }
+ ae_mode_ = cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_ON;
+ capture_metadata_dispatcher_->UnsetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_EXPOSURE_TIME);
+ } else {
+ if (!available_ae_modes_.count(
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF)) {
+ LOG(WARNING) << "Don't support ANDROID_CONTROL_AE_MODE_OFF";
+ return;
+ }
+ ae_mode_ = cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF;
+ SetRepeatingCaptureMetadata(
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_EXPOSURE_TIME,
+ exposure_time_nanoseconds);
+ }
+
+ Set3AMode(cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE,
+ base::checked_cast<uint8_t>(ae_mode_));
+ DVLOG(1) << "Setting AE mode to: " << ae_mode_;
+}
+
bool Camera3AController::IsPointOfInterestSupported() {
return point_of_interest_supported_;
}
diff --git a/chromium/media/capture/video/chromeos/camera_3a_controller.h b/chromium/media/capture/video/chromeos/camera_3a_controller.h
index 3a62006b555..03ff69669f1 100644
--- a/chromium/media/capture/video/chromeos/camera_3a_controller.h
+++ b/chromium/media/capture/video/chromeos/camera_3a_controller.h
@@ -18,7 +18,7 @@ namespace media {
// operations and modes of the camera. For the detailed state transitions for
// auto-exposure, auto-focus, and auto-white-balancing, see
// https://source.android.com/devices/camera/camera3_3Amodes
-class CAPTURE_EXPORT Camera3AController
+class CAPTURE_EXPORT Camera3AController final
: public CaptureMetadataDispatcher::ResultMetadataObserver {
public:
Camera3AController(const cros::mojom::CameraMetadataPtr& static_metadata,
@@ -45,6 +45,11 @@ class CAPTURE_EXPORT Camera3AController
// Set auto white balance mode.
void SetAutoWhiteBalanceMode(cros::mojom::AndroidControlAwbMode mode);
+ // Set exposure time.
+ // |enable_auto| enables auto exposure mode. |exposure_time_nanoseconds| is
+ // only effective if |enable_auto| is set to false
+ void SetExposureTime(bool enable_auto, int64_t exposure_time_nanoseconds);
+
bool IsPointOfInterestSupported();
// Set point of interest. The coordinate system is based on the active
diff --git a/chromium/media/capture/video/chromeos/camera_device_context.cc b/chromium/media/capture/video/chromeos/camera_device_context.cc
index 0de1b5c1255..2de1625b056 100644
--- a/chromium/media/capture/video/chromeos/camera_device_context.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_context.cc
@@ -4,25 +4,50 @@
#include "media/capture/video/chromeos/camera_device_context.h"
+#include "base/strings/string_number_conversions.h"
+
namespace media {
-CameraDeviceContext::CameraDeviceContext(
- std::unique_ptr<VideoCaptureDevice::Client> client)
- : state_(State::kStopped),
- sensor_orientation_(0),
- screen_rotation_(0),
- client_(std::move(client)) {
- DCHECK(client_);
+CameraDeviceContext::CameraDeviceContext()
+ : state_(State::kStopped), sensor_orientation_(0), screen_rotation_(0) {
DETACH_FROM_SEQUENCE(sequence_checker_);
}
CameraDeviceContext::~CameraDeviceContext() = default;
+bool CameraDeviceContext::AddClient(
+ ClientType client_type,
+ std::unique_ptr<VideoCaptureDevice::Client> client) {
+ DCHECK(client);
+ base::AutoLock lock(client_lock_);
+ if (!clients_.insert({client_type, std::move(client)}).second) {
+ SetErrorState(
+ media::VideoCaptureError::kCrosHalV3DeviceContextDuplicatedClient,
+ FROM_HERE,
+ std::string("Duplicated client in camera device context: ") +
+ base::NumberToString(static_cast<uint32_t>(client_type)));
+ return false;
+ }
+ return true;
+}
+
+void CameraDeviceContext::RemoveClient(ClientType client_type) {
+ base::AutoLock lock(client_lock_);
+ auto client = clients_.find(client_type);
+ if (client == clients_.end()) {
+ return;
+ }
+ clients_.erase(client);
+}
+
void CameraDeviceContext::SetState(State state) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
state_ = state;
if (state_ == State::kCapturing) {
- client_->OnStarted();
+ base::AutoLock lock(client_lock_);
+ for (const auto& client : clients_) {
+ client.second->OnStarted();
+ }
}
}
@@ -36,32 +61,52 @@ void CameraDeviceContext::SetErrorState(media::VideoCaptureError error,
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
state_ = State::kError;
LOG(ERROR) << reason;
- client_->OnError(error, from_here, reason);
+ base::AutoLock lock(client_lock_);
+ for (const auto& client : clients_) {
+ client.second->OnError(error, from_here, reason);
+ }
}
void CameraDeviceContext::LogToClient(std::string message) {
- client_->OnLog(message);
+ base::AutoLock lock(client_lock_);
+ for (const auto& client : clients_) {
+ client.second->OnLog(message);
+ }
}
void CameraDeviceContext::SubmitCapturedVideoCaptureBuffer(
+ ClientType client_type,
VideoCaptureDevice::Client::Buffer buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
base::TimeDelta timestamp,
const VideoFrameMetadata& metadata) {
- client_->OnIncomingCapturedBufferExt(
+ base::AutoLock lock(client_lock_);
+ auto client = clients_.find(client_type);
+ if (client == clients_.end()) {
+ return;
+ }
+
+ client->second->OnIncomingCapturedBufferExt(
std::move(buffer), frame_format, gfx::ColorSpace(), reference_time,
timestamp, gfx::Rect(frame_format.frame_size), std::move(metadata));
}
void CameraDeviceContext::SubmitCapturedGpuMemoryBuffer(
+ ClientType client_type,
gfx::GpuMemoryBuffer* buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
- client_->OnIncomingCapturedGfxBuffer(buffer, frame_format,
- GetCameraFrameRotation(), reference_time,
- timestamp);
+ base::AutoLock lock(client_lock_);
+ auto client = clients_.find(client_type);
+ if (client == clients_.end()) {
+ return;
+ }
+
+ client->second->OnIncomingCapturedGfxBuffer(buffer, frame_format,
+ GetCameraFrameRotation(),
+ reference_time, timestamp);
}
void CameraDeviceContext::SetSensorOrientation(int sensor_orientation) {
@@ -91,13 +136,20 @@ int CameraDeviceContext::GetRotationFromSensorOrientation() {
}
bool CameraDeviceContext::ReserveVideoCaptureBufferFromPool(
+ ClientType client_type,
gfx::Size size,
VideoPixelFormat format,
VideoCaptureDevice::Client::Buffer* buffer) {
+ base::AutoLock lock(client_lock_);
+ auto client = clients_.find(client_type);
+ if (client == clients_.end()) {
+ return false;
+ }
+
// Use a dummy frame feedback id as we don't need it.
constexpr int kDummyFrameFeedbackId = 0;
- auto result =
- client_->ReserveOutputBuffer(size, format, kDummyFrameFeedbackId, buffer);
+ auto result = client->second->ReserveOutputBuffer(
+ size, format, kDummyFrameFeedbackId, buffer);
return result == VideoCaptureDevice::Client::ReserveResult::kSucceeded;
}
diff --git a/chromium/media/capture/video/chromeos/camera_device_context.h b/chromium/media/capture/video/chromeos/camera_device_context.h
index b139b0b55ab..b7c64d5d02d 100644
--- a/chromium/media/capture/video/chromeos/camera_device_context.h
+++ b/chromium/media/capture/video/chromeos/camera_device_context.h
@@ -8,15 +8,23 @@
#include <memory>
#include <string>
+#include "base/containers/flat_map.h"
#include "base/sequence_checker.h"
+#include "base/synchronization/lock.h"
#include "media/capture/video/video_capture_device.h"
namespace media {
+enum class ClientType : uint32_t {
+ kPreviewClient = 0,
+ kVideoClient = 1,
+};
+
// A class storing the context of a running CameraDeviceDelegate.
//
// The class is also used to forward/translate events and method calls to a
-// given VideoCaptureDevice::Client.
+// given VideoCaptureDevice::Client. This class supposes to have two clients
+// at most. One is for preview and another is for video.
class CAPTURE_EXPORT CameraDeviceContext {
public:
// The internal state of the running CameraDeviceDelegate. The state
@@ -91,11 +99,13 @@ class CAPTURE_EXPORT CameraDeviceContext {
kError,
};
- explicit CameraDeviceContext(
- std::unique_ptr<VideoCaptureDevice::Client> client);
+ CameraDeviceContext();
~CameraDeviceContext();
+ bool AddClient(ClientType client_type,
+ std::unique_ptr<VideoCaptureDevice::Client> client);
+ void RemoveClient(ClientType client_type);
void SetState(State state);
State GetState();
@@ -114,6 +124,7 @@ class CAPTURE_EXPORT CameraDeviceContext {
// The buffer would be passed to the renderer process directly through
// |client_->OnIncomingCapturedBufferExt|.
void SubmitCapturedVideoCaptureBuffer(
+ ClientType client_type,
VideoCaptureDevice::Client::Buffer buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
@@ -125,7 +136,8 @@ class CAPTURE_EXPORT CameraDeviceContext {
// |client_->OnIncomingCapturedGfxBuffer|, which would perform buffer copy
// and/or format conversion to an I420 SharedMemory-based video capture buffer
// for client consumption.
- void SubmitCapturedGpuMemoryBuffer(gfx::GpuMemoryBuffer* buffer,
+ void SubmitCapturedGpuMemoryBuffer(ClientType client_type,
+ gfx::GpuMemoryBuffer* buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
base::TimeDelta timestamp);
@@ -149,10 +161,14 @@ class CAPTURE_EXPORT CameraDeviceContext {
// Reserves a video capture buffer from the buffer pool provided by the video
// |client_|. Returns true if the operation succeeds; false otherwise.
bool ReserveVideoCaptureBufferFromPool(
+ ClientType client_type,
gfx::Size size,
VideoPixelFormat format,
VideoCaptureDevice::Client::Buffer* buffer);
+ // Returns true if there is a client.
+ bool HasClient();
+
private:
friend class RequestManagerTest;
@@ -170,9 +186,10 @@ class CAPTURE_EXPORT CameraDeviceContext {
// 270.
int screen_rotation_;
- std::unique_ptr<VideoCaptureDevice::Client> client_;
-
- DISALLOW_IMPLICIT_CONSTRUCTORS(CameraDeviceContext);
+ base::Lock client_lock_;
+ // A map for client type and client instance.
+ base::flat_map<ClientType, std::unique_ptr<VideoCaptureDevice::Client>>
+ clients_ GUARDED_BY(client_lock_);
};
} // namespace media
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.cc b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
index 4664908380b..ab213906718 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.cc
@@ -12,7 +12,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/no_destructor.h"
#include "base/numerics/ranges.h"
#include "base/posix/safe_strerror.h"
@@ -23,7 +23,6 @@
#include "media/capture/video/blob_utils.h"
#include "media/capture/video/chromeos/camera_3a_controller.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
-#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_hal_delegate.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include "media/capture/video/chromeos/request_manager.h"
@@ -48,6 +47,7 @@ constexpr char kTiltRange[] = "com.google.control.tiltRange";
constexpr char kZoom[] = "com.google.control.zoom";
constexpr char kZoomRange[] = "com.google.control.zoomRange";
constexpr int32_t kColorTemperatureStep = 100;
+constexpr int32_t kMicroToNano = 1000;
using AwbModeTemperatureMap = std::map<uint8_t, int32_t>;
@@ -271,11 +271,13 @@ CameraDeviceDelegate::CameraDeviceDelegate(
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device)
+ CameraAppDeviceImpl* camera_app_device,
+ ClientType client_type)
: device_descriptor_(device_descriptor),
camera_hal_delegate_(std::move(camera_hal_delegate)),
ipc_task_runner_(std::move(ipc_task_runner)),
- camera_app_device_(camera_app_device) {}
+ camera_app_device_(camera_app_device),
+ client_type_(client_type) {}
CameraDeviceDelegate::~CameraDeviceDelegate() = default;
@@ -289,6 +291,7 @@ void CameraDeviceDelegate::AllocateAndStart(
is_set_awb_mode_ = false;
is_set_brightness_ = false;
is_set_contrast_ = false;
+ is_set_exposure_time_ = false;
is_set_pan_ = false;
is_set_saturation_ = false;
is_set_sharpness_ = false;
@@ -509,29 +512,33 @@ void CameraDeviceDelegate::SetPhotoOptions(
is_set_awb_mode_ = false;
}
- bool is_resolution_specified = settings->has_width && settings->has_height;
- bool should_reconfigure_streams =
- is_resolution_specified && (current_blob_resolution_.IsEmpty() ||
- current_blob_resolution_.width() !=
- static_cast<int32_t>(settings->width) ||
- current_blob_resolution_.height() !=
- static_cast<int32_t>(settings->height));
- if (!request_manager_->HasStreamsConfiguredForTakePhoto() ||
- should_reconfigure_streams) {
- if (is_resolution_specified) {
- gfx::Size new_blob_resolution(static_cast<int32_t>(settings->width),
- static_cast<int32_t>(settings->height));
- request_manager_->StopPreview(
- base::BindOnce(&CameraDeviceDelegate::OnFlushed, GetWeakPtr(),
- std::move(new_blob_resolution)));
+ if (settings->has_exposure_mode &&
+ settings->exposure_mode == mojom::MeteringMode::MANUAL &&
+ settings->has_exposure_time) {
+ int64_t exposure_time_nanoseconds_ =
+ settings->exposure_time * 100 * kMicroToNano;
+ camera_3a_controller_->SetExposureTime(false, exposure_time_nanoseconds_);
+ is_set_exposure_time_ = true;
+ } else if (is_set_exposure_time_) {
+ camera_3a_controller_->SetExposureTime(true, 0);
+ is_set_exposure_time_ = false;
+ }
+
+ // If there is callback of SetPhotoOptions(), the streams might being
+ // reconfigured and we should notify them once the reconfiguration is done.
+ auto on_reconfigured_callback = base::BindOnce(
+ [](VideoCaptureDevice::SetPhotoOptionsCallback callback) {
+ std::move(callback).Run(true);
+ },
+ std::move(callback));
+ if (!on_reconfigured_callbacks_.empty()) {
+ on_reconfigured_callbacks_.push(std::move(on_reconfigured_callback));
+ } else {
+ if (MaybeReconfigureForPhotoStream(std::move(settings))) {
+ on_reconfigured_callbacks_.push(std::move(on_reconfigured_callback));
} else {
- request_manager_->StopPreview(base::BindOnce(
- &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), base::nullopt));
+ std::move(on_reconfigured_callback).Run();
}
- set_photo_option_callback_ = std::move(callback);
- } else {
- set_photo_option_callback_.Reset();
- std::move(callback).Run(true);
}
result_metadata_frame_number_for_photo_state_ = current_request_frame_number_;
}
@@ -546,6 +553,32 @@ base::WeakPtr<CameraDeviceDelegate> CameraDeviceDelegate::GetWeakPtr() {
return weak_ptr_factory_.GetWeakPtr();
}
+bool CameraDeviceDelegate::MaybeReconfigureForPhotoStream(
+ mojom::PhotoSettingsPtr settings) {
+ bool is_resolution_specified = settings->has_width && settings->has_height;
+ bool should_reconfigure_streams =
+ (is_resolution_specified && (current_blob_resolution_.IsEmpty() ||
+ current_blob_resolution_.width() !=
+ static_cast<int32_t>(settings->width) ||
+ current_blob_resolution_.height() !=
+ static_cast<int32_t>(settings->height)));
+ if (!should_reconfigure_streams) {
+ return false;
+ }
+
+ if (is_resolution_specified) {
+ gfx::Size new_blob_resolution(static_cast<int32_t>(settings->width),
+ static_cast<int32_t>(settings->height));
+ request_manager_->StopPreview(
+ base::BindOnce(&CameraDeviceDelegate::OnFlushed, GetWeakPtr(),
+ std::move(new_blob_resolution)));
+ } else {
+ request_manager_->StopPreview(base::BindOnce(
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), base::nullopt));
+ }
+ return true;
+}
+
void CameraDeviceDelegate::TakePhotoImpl() {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
@@ -559,15 +592,18 @@ void CameraDeviceDelegate::TakePhotoImpl() {
return;
}
- SetPhotoOptions(
- mojom::PhotoSettings::New(),
- base::BindOnce(
- [](base::WeakPtr<Camera3AController> controller,
- base::OnceClosure callback, bool result) {
- controller->Stabilize3AForStillCapture(std::move(callback));
- },
- camera_3a_controller_->GetWeakPtr(),
- std::move(construct_request_cb)));
+ // Trigger the reconfigure process if it not yet triggered.
+ if (on_reconfigured_callbacks_.empty()) {
+ request_manager_->StopPreview(base::BindOnce(
+ &CameraDeviceDelegate::OnFlushed, GetWeakPtr(), base::nullopt));
+ }
+ auto on_reconfigured_callback = base::BindOnce(
+ [](base::WeakPtr<Camera3AController> controller,
+ base::OnceClosure callback) {
+ controller->Stabilize3AForStillCapture(std::move(callback));
+ },
+ camera_3a_controller_->GetWeakPtr(), std::move(construct_request_cb));
+ on_reconfigured_callbacks_.push(std::move(on_reconfigured_callback));
}
void CameraDeviceDelegate::OnMojoConnectionError() {
@@ -680,7 +716,7 @@ void CameraDeviceDelegate::Initialize() {
device_context_, chrome_capture_params_.buffer_type,
std::make_unique<CameraBufferFactory>(),
base::BindRepeating(&RotateAndBlobify), ipc_task_runner_,
- camera_app_device_);
+ camera_app_device_, client_type_);
camera_3a_controller_ = std::make_unique<Camera3AController>(
static_metadata_, request_manager_.get(), ipc_task_runner_);
device_ops_->Initialize(
@@ -1068,8 +1104,9 @@ void CameraDeviceDelegate::OnGotFpsRange(
TakePhotoImpl();
}
- if (set_photo_option_callback_) {
- std::move(set_photo_option_callback_).Run(true);
+ while (!on_reconfigured_callbacks_.empty()) {
+ std::move(on_reconfigured_callbacks_.front()).Run();
+ on_reconfigured_callbacks_.pop();
}
}
@@ -1242,13 +1279,25 @@ void CameraDeviceDelegate::OnResultMetadataAvailable(
gfx::Rect(rect[0], rect[1], rect[2], rect[3]);
}
+ result_metadata_.ae_mode.reset();
+ auto ae_mode = GetMetadataEntryAsSpan<uint8_t>(
+ result_metadata, cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_MODE);
+ if (ae_mode.size() == 1)
+ result_metadata_.ae_mode = ae_mode[0];
+
+ result_metadata_.exposure_time.reset();
+ auto exposure_time = GetMetadataEntryAsSpan<int64_t>(
+ result_metadata,
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_EXPOSURE_TIME);
+ if (exposure_time.size() == 1)
+ result_metadata_.exposure_time = exposure_time[0];
+
result_metadata_.awb_mode.reset();
auto awb_mode = GetMetadataEntryAsSpan<uint8_t>(
result_metadata,
cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AWB_MODE);
- if (awb_mode.size() == 1) {
+ if (awb_mode.size() == 1)
result_metadata_.awb_mode = awb_mode[0];
- }
result_metadata_frame_number_ = frame_number;
// We need to wait the new result metadata for new settings.
@@ -1382,6 +1431,45 @@ void CameraDeviceDelegate::DoGetPhotoState(
photo_state->color_temperature->current = current_temperature;
}
+ auto ae_available_modes = GetMetadataEntryAsSpan<uint8_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_CONTROL_AE_AVAILABLE_MODES);
+ bool support_manual_exposure_time = false;
+ if (ae_available_modes.size() > 1 && result_metadata_.ae_mode) {
+ support_manual_exposure_time = base::Contains(
+ ae_available_modes,
+ static_cast<uint8_t>(
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF));
+ }
+
+ auto exposure_time_range = GetMetadataEntryAsSpan<int64_t>(
+ static_metadata_,
+ cros::mojom::CameraMetadataTag::ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
+
+ if (support_manual_exposure_time && exposure_time_range.size() == 2 &&
+ result_metadata_.exposure_time) {
+ photo_state->supported_exposure_modes.push_back(
+ mojom::MeteringMode::MANUAL);
+ photo_state->supported_exposure_modes.push_back(
+ mojom::MeteringMode::CONTINUOUS);
+ if (result_metadata_.ae_mode ==
+ static_cast<uint8_t>(
+ cros::mojom::AndroidControlAeMode::ANDROID_CONTROL_AE_MODE_OFF))
+ photo_state->current_exposure_mode = mojom::MeteringMode::MANUAL;
+ else
+ photo_state->current_exposure_mode = mojom::MeteringMode::CONTINUOUS;
+
+ // The unit of photo_state->exposure_time is 100 microseconds and from
+ // metadata is nanoseconds.
+ photo_state->exposure_time->min = std::ceil(
+ static_cast<float>(exposure_time_range[0]) / (100 * kMicroToNano));
+ photo_state->exposure_time->max =
+ exposure_time_range[1] / (100 * kMicroToNano);
+ photo_state->exposure_time->step = 1; // 100 microseconds
+ photo_state->exposure_time->current =
+ result_metadata_.exposure_time.value() / (100 * kMicroToNano);
+ }
+
std::move(callback).Run(std::move(photo_state));
}
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate.h b/chromium/media/capture/video/chromeos/camera_device_delegate.h
index 78868a5a090..01a37cfd67e 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate.h
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate.h
@@ -6,11 +6,13 @@
#define MEDIA_CAPTURE_VIDEO_CHROMEOS_CAMERA_DEVICE_DELEGATE_H_
#include <memory>
+#include <queue>
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
+#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/capture_metadata_dispatcher.h"
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video/chromeos/mojom/camera_common.mojom.h"
@@ -24,7 +26,6 @@ namespace media {
class Camera3AController;
class CameraAppDeviceImpl;
-class CameraDeviceContext;
class CameraHalDelegate;
class RequestManager;
@@ -42,9 +43,11 @@ struct ResultMetadata {
ResultMetadata();
~ResultMetadata();
+ base::Optional<uint8_t> ae_mode;
base::Optional<uint8_t> awb_mode;
base::Optional<int32_t> brightness;
base::Optional<int32_t> contrast;
+ base::Optional<int64_t> exposure_time;
base::Optional<int32_t> pan;
base::Optional<int32_t> saturation;
base::Optional<int32_t> sharpness;
@@ -96,7 +99,8 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
VideoCaptureDeviceDescriptor device_descriptor,
scoped_refptr<CameraHalDelegate> camera_hal_delegate,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device);
+ CameraAppDeviceImpl* camera_app_device,
+ ClientType client_type);
~CameraDeviceDelegate() final;
@@ -121,6 +125,10 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
friend class CameraDeviceDelegateTest;
+ // Reconfigures the streams to include photo stream according to |settings|.
+ // Returns true if the reconfigure process is triggered.
+ bool MaybeReconfigureForPhotoStream(mojom::PhotoSettingsPtr settings);
+
void TakePhotoImpl();
// Mojo connection error handler.
@@ -236,7 +244,7 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
base::OnceClosure device_close_callback_;
- VideoCaptureDevice::SetPhotoOptionsCallback set_photo_option_callback_;
+ std::queue<base::OnceClosure> on_reconfigured_callbacks_;
CameraAppDeviceImpl* camera_app_device_; // Weak.
@@ -244,6 +252,7 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
bool is_set_awb_mode_;
bool is_set_brightness_;
bool is_set_contrast_;
+ bool is_set_exposure_time_;
bool is_set_pan_;
bool is_set_saturation_;
bool is_set_sharpness_;
@@ -263,6 +272,8 @@ class CAPTURE_EXPORT CameraDeviceDelegate final
ResultMetadata result_metadata_;
gfx::Rect active_array_size_;
+ ClientType client_type_;
+
base::WeakPtrFactory<CameraDeviceDelegate> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(CameraDeviceDelegate);
diff --git a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
index 8c569077b7b..01ed4b806da 100644
--- a/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_device_delegate_unittest.cc
@@ -11,9 +11,9 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
@@ -162,7 +162,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
devices_info[0].descriptor, camera_hal_delegate_,
- device_delegate_thread_.task_runner(), nullptr);
+ device_delegate_thread_.task_runner(), nullptr, client_type_);
}
void GetNumberOfFakeCameras(
@@ -462,15 +462,17 @@ class CameraDeviceDelegateTest : public ::testing::Test {
}
unittest_internal::NiceMockVideoCaptureClient* ResetDeviceContext() {
+ client_type_ = ClientType::kPreviewClient;
auto mock_client =
std::make_unique<unittest_internal::NiceMockVideoCaptureClient>();
auto* client_ptr = mock_client.get();
- device_context_ =
- std::make_unique<CameraDeviceContext>(std::move(mock_client));
+ device_context_ = std::make_unique<CameraDeviceContext>();
+ device_context_->AddClient(client_type_, std::move(mock_client));
return client_ptr;
}
void ResetDevice() {
+ device_context_->RemoveClient(client_type_);
ASSERT_TRUE(device_delegate_thread_.IsRunning());
ASSERT_TRUE(camera_device_delegate_);
ASSERT_TRUE(device_delegate_thread_.task_runner()->DeleteSoon(
@@ -516,6 +518,7 @@ class CameraDeviceDelegateTest : public ::testing::Test {
base::Thread device_delegate_thread_;
std::unique_ptr<CameraDeviceContext> device_context_;
+ ClientType client_type_;
private:
base::Thread hal_delegate_thread_;
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
index f61c64da379..a0bee569637 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate.cc
@@ -11,7 +11,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/containers/flat_set.h"
#include "base/posix/safe_strerror.h"
#include "base/process/launch.h"
diff --git a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
index c7469a70d0b..89208e5610f 100644
--- a/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
+++ b/chromium/media/capture/video/chromeos/camera_hal_delegate_unittest.cc
@@ -11,7 +11,7 @@
#include <utility>
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "media/capture/video/chromeos/mock_camera_module.h"
#include "media/capture/video/chromeos/mock_vendor_tag_ops.h"
diff --git a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
index 4b2fc0c910c..c3394d55b45 100644
--- a/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
+++ b/chromium/media/capture/video/chromeos/camera_metadata_utils.cc
@@ -52,7 +52,15 @@ cros::mojom::CameraMetadataEntryPtr* GetMetadataEntry(
if (iter == camera_metadata->entries.value().end()) {
return nullptr;
}
- return &(camera_metadata->entries.value()[(*iter)->index]);
+
+ auto* entry_ptr = &(camera_metadata->entries.value()[(*iter)->index]);
+ if (!(*entry_ptr)->data.data()) {
+ // Metadata tag found with no valid data.
+ LOG(WARNING) << "Found tag " << static_cast<int>(tag)
+ << " but with invalid data";
+ return nullptr;
+ }
+ return entry_ptr;
}
void AddOrUpdateMetadataEntry(cros::mojom::CameraMetadataPtr* to,
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
index 734f8f7914e..9151ac99363 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.cc
@@ -69,7 +69,6 @@ void MockVideoCaptureClient::OnIncomingCapturedGfxBuffer(
void MockVideoCaptureClient::OnIncomingCapturedExternalBuffer(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<Buffer::ScopedAccessPermission> read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
diff --git a/chromium/media/capture/video/chromeos/mock_video_capture_client.h b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
index 10270b8b940..ddb9380aece 100644
--- a/chromium/media/capture/video/chromeos/mock_video_capture_client.h
+++ b/chromium/media/capture/video/chromeos/mock_video_capture_client.h
@@ -56,7 +56,6 @@ class MockVideoCaptureClient : public VideoCaptureDevice::Client {
int frame_feedback_id = 0) override;
void OnIncomingCapturedExternalBuffer(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<Buffer::ScopedAccessPermission> read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
diff --git a/chromium/media/capture/video/chromeos/request_manager.cc b/chromium/media/capture/video/chromeos/request_manager.cc
index cb64da2283b..7dedadf09b3 100644
--- a/chromium/media/capture/video/chromeos/request_manager.cc
+++ b/chromium/media/capture/video/chromeos/request_manager.cc
@@ -13,12 +13,11 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/posix/safe_strerror.h"
#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
-#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include "media/capture/video/chromeos/video_capture_features_chromeos.h"
#include "mojo/public/cpp/platform/platform_handle.h"
@@ -43,7 +42,8 @@ RequestManager::RequestManager(
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device)
+ CameraAppDeviceImpl* camera_app_device,
+ ClientType client_type)
: callback_ops_(this, std::move(callback_ops_receiver)),
capture_interface_(std::move(capture_interface)),
device_context_(device_context),
@@ -52,13 +52,15 @@ RequestManager::RequestManager(
stream_buffer_manager_(
new StreamBufferManager(device_context_,
video_capture_use_gmb_,
- std::move(camera_buffer_factory))),
+ std::move(camera_buffer_factory),
+ client_type)),
blobify_callback_(std::move(blobify_callback)),
ipc_task_runner_(std::move(ipc_task_runner)),
capturing_(false),
partial_result_count_(1),
first_frame_shutter_time_(base::TimeTicks()),
- camera_app_device_(std::move(camera_app_device)) {
+ camera_app_device_(std::move(camera_app_device)),
+ client_type_(client_type) {
DCHECK(ipc_task_runner_->BelongsToCurrentThread());
DCHECK(callback_ops_.is_bound());
DCHECK(device_context_);
@@ -864,7 +866,7 @@ void RequestManager::SubmitCapturedPreviewBuffer(uint32_t frame_number,
metadata.rotation = VideoRotation::VIDEO_ROTATION_0;
}
device_context_->SubmitCapturedVideoCaptureBuffer(
- std::move(*buffer), format, pending_result.reference_time,
+ client_type_, std::move(*buffer), format, pending_result.reference_time,
pending_result.timestamp, metadata);
// |buffer| ownership is transferred to client, so we need to reserve a
// new video buffer.
@@ -874,7 +876,7 @@ void RequestManager::SubmitCapturedPreviewBuffer(uint32_t frame_number,
StreamType::kPreviewOutput, buffer_ipc_id);
CHECK(gmb);
device_context_->SubmitCapturedGpuMemoryBuffer(
- gmb,
+ client_type_, gmb,
stream_buffer_manager_->GetStreamCaptureFormat(
StreamType::kPreviewOutput),
pending_result.reference_time, pending_result.timestamp);
diff --git a/chromium/media/capture/video/chromeos/request_manager.h b/chromium/media/capture/video/chromeos/request_manager.h
index f71699d7a0b..f5d4fdad821 100644
--- a/chromium/media/capture/video/chromeos/request_manager.h
+++ b/chromium/media/capture/video/chromeos/request_manager.h
@@ -16,6 +16,7 @@
#include "base/optional.h"
#include "media/capture/mojom/image_capture.mojom.h"
#include "media/capture/video/chromeos/camera_app_device_impl.h"
+#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
#include "media/capture/video/chromeos/capture_metadata_dispatcher.h"
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
@@ -29,7 +30,6 @@
namespace media {
class CameraBufferFactory;
-class CameraDeviceContext;
// The JPEG transport header as defined by Android camera HAL v3 API. The JPEG
// transport header is at the end of the blob buffer filled by the HAL.
@@ -107,7 +107,8 @@ class CAPTURE_EXPORT RequestManager final
std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
BlobifyCallback blobify_callback,
scoped_refptr<base::SingleThreadTaskRunner> ipc_task_runner,
- CameraAppDeviceImpl* camera_app_device);
+ CameraAppDeviceImpl* camera_app_device,
+ ClientType client_type);
~RequestManager() override;
// Sets up the stream context and allocate buffers according to the
@@ -363,6 +364,8 @@ class CAPTURE_EXPORT RequestManager final
CameraAppDeviceImpl* camera_app_device_; // Weak.
+ ClientType client_type_;
+
base::WeakPtrFactory<RequestManager> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(RequestManager);
diff --git a/chromium/media/capture/video/chromeos/request_manager_unittest.cc b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
index 74fad9dca4d..6227b6f3e43 100644
--- a/chromium/media/capture/video/chromeos/request_manager_unittest.cc
+++ b/chromium/media/capture/video/chromeos/request_manager_unittest.cc
@@ -86,22 +86,28 @@ class RequestManagerTest : public ::testing::Test {
public:
void SetUp() override {
quit_ = false;
- device_context_ = std::make_unique<CameraDeviceContext>(
- std::make_unique<unittest_internal::MockVideoCaptureClient>());
-
- request_manager_ = std::make_unique<RequestManager>(
- mock_callback_ops_.BindNewPipeAndPassReceiver(),
- std::make_unique<MockStreamCaptureInterface>(), device_context_.get(),
- VideoCaptureBufferType::kSharedMemory,
- std::make_unique<FakeCameraBufferFactory>(),
- base::BindRepeating(
- [](const uint8_t* buffer, const uint32_t bytesused,
- const VideoCaptureFormat& capture_format,
- const int rotation) { return mojom::Blob::New(); }),
- base::ThreadTaskRunnerHandle::Get(), nullptr);
+ client_type_ = ClientType::kPreviewClient;
+ device_context_ = std::make_unique<CameraDeviceContext>();
+ if (device_context_->AddClient(
+ client_type_,
+ std::make_unique<unittest_internal::MockVideoCaptureClient>())) {
+ request_manager_ = std::make_unique<RequestManager>(
+ mock_callback_ops_.BindNewPipeAndPassReceiver(),
+ std::make_unique<MockStreamCaptureInterface>(), device_context_.get(),
+ VideoCaptureBufferType::kSharedMemory,
+ std::make_unique<FakeCameraBufferFactory>(),
+ base::BindRepeating(
+ [](const uint8_t* buffer, const uint32_t bytesused,
+ const VideoCaptureFormat& capture_format,
+ const int rotation) { return mojom::Blob::New(); }),
+ base::ThreadTaskRunnerHandle::Get(), nullptr, client_type_);
+ }
}
- void TearDown() override { request_manager_.reset(); }
+ void TearDown() override {
+ device_context_->RemoveClient(client_type_);
+ request_manager_.reset();
+ }
void DoLoop() {
run_loop_.reset(new base::RunLoop());
@@ -181,8 +187,10 @@ class RequestManagerTest : public ::testing::Test {
unittest_internal::MockVideoCaptureClient* GetMockVideoCaptureClient() {
EXPECT_NE(nullptr, device_context_);
+ base::AutoLock lock(device_context_->client_lock_);
+ EXPECT_TRUE(!device_context_->clients_.empty());
return reinterpret_cast<unittest_internal::MockVideoCaptureClient*>(
- device_context_->client_.get());
+ device_context_->clients_[client_type_].get());
}
std::map<uint32_t, RequestManager::CaptureResult>& GetPendingResults() {
@@ -275,7 +283,7 @@ class RequestManagerTest : public ::testing::Test {
std::unique_ptr<RequestManager> request_manager_;
mojo::Remote<cros::mojom::Camera3CallbackOps> mock_callback_ops_;
std::unique_ptr<CameraDeviceContext> device_context_;
- cros::mojom::Camera3StreamPtr stream;
+ ClientType client_type_;
private:
std::unique_ptr<base::RunLoop> run_loop_;
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
index ce40c8efd4a..75bd224507a 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.cc
@@ -8,13 +8,12 @@
#include <string>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/posix/safe_strerror.h"
#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
-#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_metadata_utils.h"
#include "media/capture/video/chromeos/pixel_format_utils.h"
#include "media/capture/video/chromeos/request_builder.h"
@@ -28,10 +27,12 @@ namespace media {
StreamBufferManager::StreamBufferManager(
CameraDeviceContext* device_context,
bool video_capture_use_gmb,
- std::unique_ptr<CameraBufferFactory> camera_buffer_factory)
+ std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
+ ClientType client_type)
: device_context_(device_context),
video_capture_use_gmb_(video_capture_use_gmb),
- camera_buffer_factory_(std::move(camera_buffer_factory)) {
+ camera_buffer_factory_(std::move(camera_buffer_factory)),
+ client_type_(client_type) {
if (video_capture_use_gmb_) {
gmb_support_ = std::make_unique<gpu::GpuMemoryBufferSupport>();
}
@@ -155,7 +156,8 @@ StreamBufferManager::AcquireBufferForClientById(StreamType stream_type,
// We have to reserve a new buffer because the size is different.
Buffer rotated_buffer;
if (!device_context_->ReserveVideoCaptureBufferFromPool(
- format->frame_size, format->pixel_format, &rotated_buffer)) {
+ client_type_, format->frame_size, format->pixel_format,
+ &rotated_buffer)) {
DLOG(WARNING) << "Failed to reserve video capture buffer";
original_gmb->Unmap();
return std::move(buffer_pair.vcd_buffer);
@@ -440,7 +442,7 @@ void StreamBufferManager::ReserveBufferFromPool(StreamType stream_type) {
}
Buffer vcd_buffer;
if (!device_context_->ReserveVideoCaptureBufferFromPool(
- stream_context->buffer_dimension,
+ client_type_, stream_context->buffer_dimension,
stream_context->capture_format.pixel_format, &vcd_buffer)) {
DLOG(WARNING) << "Failed to reserve video capture buffer";
return;
diff --git a/chromium/media/capture/video/chromeos/stream_buffer_manager.h b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
index e6e0d56b489..50ed6a352b1 100644
--- a/chromium/media/capture/video/chromeos/stream_buffer_manager.h
+++ b/chromium/media/capture/video/chromeos/stream_buffer_manager.h
@@ -18,6 +18,7 @@
#include "base/memory/weak_ptr.h"
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
+#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/camera_device_delegate.h"
#include "media/capture/video/chromeos/mojom/camera3.mojom.h"
#include "media/capture/video_capture_types.h"
@@ -37,7 +38,6 @@ class GpuMemoryBufferSupport;
namespace media {
class CameraBufferFactory;
-class CameraDeviceContext;
struct BufferInfo;
@@ -51,7 +51,8 @@ class CAPTURE_EXPORT StreamBufferManager final {
StreamBufferManager(
CameraDeviceContext* device_context,
bool video_capture_use_gmb,
- std::unique_ptr<CameraBufferFactory> camera_buffer_factory);
+ std::unique_ptr<CameraBufferFactory> camera_buffer_factory,
+ ClientType client_type);
~StreamBufferManager();
void ReserveBuffer(StreamType stream_type);
@@ -163,6 +164,8 @@ class CAPTURE_EXPORT StreamBufferManager final {
std::unique_ptr<CameraBufferFactory> camera_buffer_factory_;
+ ClientType client_type_;
+
base::WeakPtrFactory<StreamBufferManager> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(StreamBufferManager);
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
index 6a210da4173..07f2996154d 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.cc
@@ -9,7 +9,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/platform_thread.h"
@@ -122,7 +122,8 @@ VideoCaptureDeviceChromeOSHalv3::VideoCaptureDeviceChromeOSHalv3(
camera_app_device_(camera_app_device),
cleanup_callback_(std::move(cleanup_callback)),
power_manager_client_proxy_(
- base::MakeRefCounted<PowerManagerClientProxy>()) {
+ base::MakeRefCounted<PowerManagerClientProxy>()),
+ client_type_(ClientType::kPreviewClient) {
power_manager_client_proxy_->Init(weak_ptr_factory_.GetWeakPtr(),
capture_task_runner_,
std::move(ui_task_runner));
@@ -152,12 +153,14 @@ void VideoCaptureDeviceChromeOSHalv3::AllocateAndStart(
return;
}
capture_params_ = params;
- device_context_ = std::make_unique<CameraDeviceContext>(std::move(client));
-
- camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
- device_descriptor_, camera_hal_delegate_,
- camera_device_ipc_thread_.task_runner(), camera_app_device_);
- OpenDevice();
+ device_context_ = std::make_unique<CameraDeviceContext>();
+ if (device_context_->AddClient(client_type_, std::move(client))) {
+ camera_device_delegate_ = std::make_unique<CameraDeviceDelegate>(
+ device_descriptor_, camera_hal_delegate_,
+ camera_device_ipc_thread_.task_runner(), camera_app_device_,
+ client_type_);
+ OpenDevice();
+ }
}
void VideoCaptureDeviceChromeOSHalv3::StopAndDeAllocate() {
@@ -169,6 +172,7 @@ void VideoCaptureDeviceChromeOSHalv3::StopAndDeAllocate() {
CloseDevice(base::UnguessableToken());
camera_device_ipc_thread_.Stop();
camera_device_delegate_.reset();
+ device_context_->RemoveClient(client_type_);
device_context_.reset();
}
diff --git a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
index 2e9fea397ac..9f5a03ef70a 100644
--- a/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
+++ b/chromium/media/capture/video/chromeos/video_capture_device_chromeos_halv3.h
@@ -11,6 +11,7 @@
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread.h"
+#include "media/capture/video/chromeos/camera_device_context.h"
#include "media/capture/video/chromeos/display_rotation_observer.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/video_capture_device_descriptor.h"
@@ -26,7 +27,6 @@ namespace media {
class CameraAppDeviceImpl;
class CameraHalDelegate;
-class CameraDeviceContext;
class CameraDeviceDelegate;
// Implementation of VideoCaptureDevice for ChromeOS with CrOS camera HALv3.
@@ -102,6 +102,9 @@ class CAPTURE_EXPORT VideoCaptureDeviceChromeOSHalv3 final
scoped_refptr<PowerManagerClientProxy> power_manager_client_proxy_;
+ // The client type in CameraDeviceContext.
+ ClientType client_type_;
+
base::WeakPtrFactory<VideoCaptureDeviceChromeOSHalv3> weak_ptr_factory_{this};
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceChromeOSHalv3);
diff --git a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
index a8056fb047f..c272d15e92b 100644
--- a/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
+++ b/chromium/media/capture/video/chromeos/video_capture_jpeg_decoder_impl.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/metrics/histogram_macros.h"
#include "components/chromeos_camera/mojo_mjpeg_decode_accelerator.h"
#include "media/base/media_switches.h"
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.cc b/chromium/media/capture/video/create_video_capture_device_factory.cc
index 8d0f977e18a..bab72603630 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.cc
+++ b/chromium/media/capture/video/create_video_capture_device_factory.cc
@@ -6,13 +6,14 @@
#include "base/command_line.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/media_switches.h"
#include "media/capture/video/fake_video_capture_device_factory.h"
#include "media/capture/video/file_video_capture_device_factory.h"
-#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
+#if defined(OS_LINUX) || BUILDFLAG(IS_LACROS)
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
-#elif defined(OS_CHROMEOS)
+#elif BUILDFLAG(IS_ASH)
#include "media/capture/video/chromeos/camera_app_device_bridge_impl.h"
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
@@ -56,7 +57,7 @@ CreateFakeVideoCaptureDeviceFactory() {
}
}
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory>
CreateChromeOSVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
@@ -77,14 +78,14 @@ CreateChromeOSVideoCaptureDeviceFactory(
return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
}
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory>
CreatePlatformSpecificVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner) {
-#if defined(OS_LINUX) && !defined(OS_CHROMEOS)
+#if defined(OS_LINUX) || BUILDFLAG(IS_LACROS)
return std::make_unique<VideoCaptureDeviceFactoryLinux>(ui_task_runner);
-#elif defined(OS_CHROMEOS)
+#elif BUILDFLAG(IS_ASH)
return CreateChromeOSVideoCaptureDeviceFactory(ui_task_runner, {});
#elif defined(OS_WIN)
return std::make_unique<VideoCaptureDeviceFactoryWin>();
@@ -114,7 +115,7 @@ std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
}
}
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
media::CameraAppDeviceBridgeImpl* camera_app_device_bridge) {
@@ -128,6 +129,6 @@ std::unique_ptr<VideoCaptureDeviceFactory> CreateVideoCaptureDeviceFactory(
camera_app_device_bridge);
}
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
} // namespace media
diff --git a/chromium/media/capture/video/create_video_capture_device_factory.h b/chromium/media/capture/video/create_video_capture_device_factory.h
index b2450facdf5..9a9c85d756c 100644
--- a/chromium/media/capture/video/create_video_capture_device_factory.h
+++ b/chromium/media/capture/video/create_video_capture_device_factory.h
@@ -8,6 +8,7 @@
#include <memory>
#include "base/single_thread_task_runner.h"
+#include "build/chromeos_buildflags.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_device_factory.h"
@@ -19,12 +20,12 @@ std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner);
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
std::unique_ptr<VideoCaptureDeviceFactory> CAPTURE_EXPORT
CreateVideoCaptureDeviceFactory(
scoped_refptr<base::SingleThreadTaskRunner> ui_task_runner,
media::CameraAppDeviceBridgeImpl* camera_app_device_bridge);
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
} // namespace media
diff --git a/chromium/media/capture/video/fake_video_capture_device.cc b/chromium/media/capture/video/fake_video_capture_device.cc
index 1984bfba2d9..aef12478e12 100644
--- a/chromium/media/capture/video/fake_video_capture_device.cc
+++ b/chromium/media/capture/video/fake_video_capture_device.cc
@@ -23,6 +23,7 @@
#include "media/base/video_frame.h"
#include "media/capture/mojom/image_capture_types.h"
#include "media/capture/video/gpu_memory_buffer_utils.h"
+#include "skia/ext/legacy_display_globals.h"
#include "third_party/skia/include/core/SkBitmap.h"
#include "third_party/skia/include/core/SkCanvas.h"
#include "third_party/skia/include/core/SkFont.h"
@@ -429,7 +430,7 @@ void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time,
paint.setStyle(SkPaint::kFill_Style);
SkFont font;
font.setEdging(SkFont::Edging::kAlias);
- SkCanvas canvas(bitmap);
+ SkCanvas canvas(bitmap, skia::LegacyDisplayGlobals::GetSkSurfaceProps());
const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f;
const SkScalar translate_x =
diff --git a/chromium/media/capture/video/fake_video_capture_device_unittest.cc b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
index aa043fabb6f..b87657666dd 100644
--- a/chromium/media/capture/video/fake_video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/fake_video_capture_device_unittest.cc
@@ -13,7 +13,7 @@
#include "base/bind.h"
#include "base/command_line.h"
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread.h"
diff --git a/chromium/media/capture/video/file_video_capture_device.cc b/chromium/media/capture/video/file_video_capture_device.cc
index 4b5af3b00ad..41daa799bbb 100644
--- a/chromium/media/capture/video/file_video_capture_device.cc
+++ b/chromium/media/capture/video/file_video_capture_device.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/location.h"
+#include "base/logging.h"
#include "base/macros.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_number_conversions.h"
@@ -453,6 +454,7 @@ void FileVideoCaptureDevice::OnCaptureTask() {
VideoCaptureDevice::Client::ReserveResult::kSucceeded) {
client_->OnFrameDropped(
ConvertReservationFailureToFrameDropReason(reserve_result));
+ DVLOG(2) << __func__ << " frame was dropped.";
return;
}
ScopedNV12GpuMemoryBufferMapping scoped_mapping(std::move(gmb));
@@ -469,7 +471,6 @@ void FileVideoCaptureDevice::OnCaptureTask() {
src_v_plane, buffer_size.width() / 2, scoped_mapping.y_plane(),
scoped_mapping.y_stride(), scoped_mapping.uv_plane(),
scoped_mapping.uv_stride(), buffer_size.width(), buffer_size.height());
-
VideoCaptureFormat modified_format = capture_format_;
// When GpuMemoryBuffer is used, the frame data is opaque to the CPU for
// most of the time. Currently the only supported underlying format is
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia_test.cc b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia_test.cc
index 9b660dc8e5d..644d25d8d8e 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia_test.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_factory_fuchsia_test.cc
@@ -6,7 +6,7 @@
#include "base/fuchsia/test_component_context_for_process.h"
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "media/fuchsia/camera/fake_fuchsia_camera.h"
#include "testing/gtest/include/gtest/gtest.h"
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
index b3098cf7f7b..2f892b7fdbc 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia.cc
@@ -256,8 +256,13 @@ void VideoCaptureDeviceFuchsia::InitializeBufferCollection(
// Request just one buffer in collection constraints: each frame is copied as
// soon as it's received.
const size_t kMaxUsedOutputFrames = 1;
+
+ // Sysmem calculates buffer size based on image constraints, so it doesn't
+ // need to specified explicitly.
fuchsia::sysmem::BufferCollectionConstraints constraints =
- SysmemBufferReader::GetRecommendedConstraints(kMaxUsedOutputFrames);
+ SysmemBufferReader::GetRecommendedConstraints(
+ kMaxUsedOutputFrames,
+ /*min_buffer_size=*/base::nullopt);
buffer_collection_creator_->Create(
std::move(constraints),
base::BindOnce(&VideoCaptureDeviceFuchsia::OnBufferCollectionCreated,
diff --git a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
index fa3790fc6dd..849fa5f8086 100644
--- a/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
+++ b/chromium/media/capture/video/fuchsia/video_capture_device_fuchsia_test.cc
@@ -5,7 +5,7 @@
#include "media/capture/video/fuchsia/video_capture_device_fuchsia.h"
#include "base/fuchsia/test_component_context_for_process.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "media/capture/video/fuchsia/video_capture_device_factory_fuchsia.h"
#include "media/fuchsia/camera/fake_fuchsia_camera.h"
@@ -164,7 +164,6 @@ class TestVideoCaptureClient : public VideoCaptureDevice::Client {
}
void OnIncomingCapturedExternalBuffer(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<Buffer::ScopedAccessPermission> read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
diff --git a/chromium/media/capture/video/gpu_memory_buffer_utils.cc b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
index 6001654d2b0..2d1117a8501 100644
--- a/chromium/media/capture/video/gpu_memory_buffer_utils.cc
+++ b/chromium/media/capture/video/gpu_memory_buffer_utils.cc
@@ -4,7 +4,7 @@
#include "media/capture/video/gpu_memory_buffer_utils.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
#include "ui/gfx/gpu_memory_buffer.h"
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
index 4137f962f55..48bcb6e168f 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux.cc
@@ -18,6 +18,7 @@
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/capture/video/linux/scoped_v4l2_device_fd.h"
#include "media/capture/video/linux/video_capture_device_linux.h"
@@ -27,7 +28,7 @@
#include <linux/videodev2.h>
#endif
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#include "media/capture/video/linux/camera_config_chromeos.h"
#include "media/capture/video/linux/video_capture_device_chromeos.h"
#endif
@@ -36,6 +37,11 @@ namespace media {
namespace {
+bool CompareCaptureDevices(const VideoCaptureDeviceInfo& a,
+ const VideoCaptureDeviceInfo& b) {
+ return a.descriptor < b.descriptor;
+}
+
// USB VID and PID are both 4 bytes long.
const size_t kVidPidSize = 4;
const size_t kMaxInterfaceNameSize = 256;
@@ -47,7 +53,7 @@ const char kPidPathTemplate[] = "/sys/class/video4linux/%s/device/../idProduct";
const char kInterfacePathTemplate[] =
"/sys/class/video4linux/%s/device/interface";
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
static CameraConfigChromeOS* GetCameraConfig() {
static CameraConfigChromeOS* config = new CameraConfigChromeOS();
return config;
@@ -119,7 +125,7 @@ class DevVideoFilePathsDeviceProvider
VideoFacingMode GetCameraFacing(const std::string& device_id,
const std::string& model_id) override {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return GetCameraConfig()->GetCameraFacing(device_id, model_id);
#else
NOTREACHED();
@@ -129,7 +135,7 @@ class DevVideoFilePathsDeviceProvider
int GetOrientation(const std::string& device_id,
const std::string& model_id) override {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return GetCameraConfig()->GetOrientation(device_id, model_id);
#else
NOTREACHED();
@@ -160,7 +166,7 @@ std::unique_ptr<VideoCaptureDevice>
VideoCaptureDeviceFactoryLinux::CreateDevice(
const VideoCaptureDeviceDescriptor& device_descriptor) {
DCHECK(thread_checker_.CalledOnValidThread());
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
ChromeOSDeviceCameraConfig camera_config(
device_provider_->GetCameraFacing(device_descriptor.device_id,
device_descriptor.model_id),
@@ -217,7 +223,7 @@ void VideoCaptureDeviceFactoryLinux::GetDevicesInfo(
display_name = reinterpret_cast<char*>(cap.card);
VideoFacingMode facing_mode =
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
device_provider_->GetCameraFacing(unique_id, model_id);
#else
VideoFacingMode::MEDIA_VIDEO_FACING_NONE;
@@ -233,6 +239,10 @@ void VideoCaptureDeviceFactoryLinux::GetDevicesInfo(
}
}
+ // This is required for some applications that rely on the stable ordering of
+ // devices.
+ std::sort(devices_info.begin(), devices_info.end(), CompareCaptureDevices);
+
std::move(callback).Run(std::move(devices_info));
}
diff --git a/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
index 39f14546f5e..a98b8c0eb78 100644
--- a/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
+++ b/chromium/media/capture/video/linux/video_capture_device_factory_linux_unittest.cc
@@ -5,7 +5,7 @@
#include "media/capture/video/linux/video_capture_device_factory_linux.h"
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "media/capture/video/linux/fake_device_provider.h"
#include "media/capture/video/linux/fake_v4l2_impl.h"
diff --git a/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.cc b/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.cc
new file mode 100644
index 00000000000..dcb988d6913
--- /dev/null
+++ b/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.cc
@@ -0,0 +1,104 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/mac/gpu_memory_buffer_tracker_mac.h"
+
+#include "base/logging.h"
+
+namespace media {
+
+GpuMemoryBufferTrackerMac::GpuMemoryBufferTrackerMac() {}
+
+GpuMemoryBufferTrackerMac::GpuMemoryBufferTrackerMac(
+ base::ScopedCFTypeRef<IOSurfaceRef> io_surface)
+ : is_external_io_surface_(true), io_surface_(std::move(io_surface)) {}
+
+GpuMemoryBufferTrackerMac::~GpuMemoryBufferTrackerMac() {}
+
+bool GpuMemoryBufferTrackerMac::Init(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) {
+ DCHECK(!io_surface_);
+ if (format != PIXEL_FORMAT_NV12) {
+ NOTREACHED() << "Unsupported VideoPixelFormat "
+ << VideoPixelFormatToString(format);
+ return false;
+ }
+ if (IOSurfaceRef io_surface =
+ CreateIOSurface(dimensions, gfx::BufferFormat::YUV_420_BIPLANAR,
+ /*should_clear=*/false)) {
+ io_surface_.reset(io_surface, base::scoped_policy::ASSUME);
+ DVLOG(2) << __func__ << " id " << IOSurfaceGetID(io_surface_);
+ return true;
+ } else {
+ LOG(ERROR) << "Unable to create IOSurface!";
+ return false;
+ }
+}
+
+bool GpuMemoryBufferTrackerMac::IsSameGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) const {
+ if (!is_external_io_surface_)
+ return false;
+ return IOSurfaceGetID(io_surface_) == IOSurfaceGetID(handle.io_surface);
+}
+
+bool GpuMemoryBufferTrackerMac::IsReusableForFormat(
+ const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) {
+ if (is_external_io_surface_)
+ return false;
+ gfx::Size surface_size(IOSurfaceGetWidth(io_surface_),
+ IOSurfaceGetHeight(io_surface_));
+ return format == PIXEL_FORMAT_NV12 && dimensions == surface_size;
+}
+
+uint32_t GpuMemoryBufferTrackerMac::GetMemorySizeInBytes() {
+ return IOSurfaceGetAllocSize(io_surface_);
+}
+
+std::unique_ptr<VideoCaptureBufferHandle>
+GpuMemoryBufferTrackerMac::GetMemoryMappedAccess() {
+ NOTREACHED() << "Unsupported operation";
+ return std::make_unique<NullHandle>();
+}
+
+base::UnsafeSharedMemoryRegion
+GpuMemoryBufferTrackerMac::DuplicateAsUnsafeRegion() {
+ NOTREACHED() << "Unsupported operation";
+ return base::UnsafeSharedMemoryRegion();
+}
+
+mojo::ScopedSharedBufferHandle
+GpuMemoryBufferTrackerMac::DuplicateAsMojoBuffer() {
+ NOTREACHED() << "Unsupported operation";
+ return mojo::ScopedSharedBufferHandle();
+}
+
+gfx::GpuMemoryBufferHandle
+GpuMemoryBufferTrackerMac::GetGpuMemoryBufferHandle() {
+ DVLOG(2) << __func__ << " id " << IOSurfaceGetID(io_surface_);
+ gfx::GpuMemoryBufferHandle gmb_handle;
+ gmb_handle.type = gfx::GpuMemoryBufferType::IO_SURFACE_BUFFER;
+ gmb_handle.id.id = -1;
+ gmb_handle.io_surface = io_surface_;
+ return gmb_handle;
+}
+
+void GpuMemoryBufferTrackerMac::OnHeldByConsumersChanged(
+ bool is_held_by_consumers) {
+ if (!is_external_io_surface_)
+ return;
+
+ if (is_held_by_consumers) {
+ DCHECK(!in_use_for_consumers_);
+ in_use_for_consumers_.reset(io_surface_.get(), base::scoped_policy::RETAIN);
+ } else {
+ DCHECK(in_use_for_consumers_);
+ in_use_for_consumers_.reset();
+ }
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h b/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h
new file mode 100644
index 00000000000..f344a90d6d0
--- /dev/null
+++ b/chromium/media/capture/video/mac/gpu_memory_buffer_tracker_mac.h
@@ -0,0 +1,53 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_MAC_GPU_MEMORY_BUFFER_TRACKER_MAC_H_
+#define MEDIA_CAPTURE_VIDEO_MAC_GPU_MEMORY_BUFFER_TRACKER_MAC_H_
+
+#include "media/capture/video/video_capture_buffer_tracker.h"
+#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/mac/io_surface.h"
+
+namespace media {
+
+class CAPTURE_EXPORT GpuMemoryBufferTrackerMac final
+ : public VideoCaptureBufferTracker {
+ public:
+ GpuMemoryBufferTrackerMac();
+ explicit GpuMemoryBufferTrackerMac(
+ base::ScopedCFTypeRef<IOSurfaceRef> io_surface);
+ ~GpuMemoryBufferTrackerMac() override;
+
+ // VideoCaptureBufferTracker
+ bool Init(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) override;
+ bool IsSameGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) const override;
+ bool IsReusableForFormat(const gfx::Size& dimensions,
+ VideoPixelFormat format,
+ const mojom::PlaneStridesPtr& strides) override;
+ uint32_t GetMemorySizeInBytes() override;
+ std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() override;
+ base::UnsafeSharedMemoryRegion DuplicateAsUnsafeRegion() override;
+ mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() override;
+ gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() override;
+ void OnHeldByConsumersChanged(bool is_held_by_consumers) override;
+
+ private:
+ bool is_external_io_surface_ = false;
+ gfx::ScopedIOSurface io_surface_;
+
+ // External IOSurfaces come from a CVPixelBufferPool. An IOSurface in a
+ // CVPixelBufferPool will be reused by the pool as soon IOSurfaceIsInUse is
+ // false. To prevent reuse while consumers are accessing the IOSurface, use
+ // |in_use_for_consumers_| to maintain IOSurfaceIsInUse as true.
+ gfx::ScopedInUseIOSurface in_use_for_consumers_;
+
+ DISALLOW_COPY_AND_ASSIGN(GpuMemoryBufferTrackerMac);
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_MAC_GPU_MEMORY_BUFFER_TRACKER_MAC_H_
diff --git a/chromium/media/capture/video/mac/pixel_buffer_pool_mac.h b/chromium/media/capture/video/mac/pixel_buffer_pool_mac.h
index 3e9c7cfdda1..162204b6706 100644
--- a/chromium/media/capture/video/mac/pixel_buffer_pool_mac.h
+++ b/chromium/media/capture/video/mac/pixel_buffer_pool_mac.h
@@ -32,11 +32,18 @@ class CAPTURE_EXPORT PixelBufferPool {
// Creates a new buffer from the pool, or returns null if |max_buffers_| would
// be exceeded. The underlying buffers may be recycled.
//
- // The caller owns the returned buffer and is responsible for calling
- // CFRelease() after they are done using it. This returns the underlying
- // buffer to the pool. In order to free memory, you must both release all
- // buffers and call Flush() or delete the pool. It is safe for a buffer to
- // outlive its pool.
+ // Freeing all buffer references returns the underlying buffer to the pool. In
+ // order to free memory, you must both release all buffers and call Flush() or
+ // delete the pool. It is safe for a buffer to outlive its pool.
+ //
+ // Retaining a pixel buffer and preventing it from returning to the pool can
+ // be done either by keeping a reference directly to the CVPixelBuffer, e.g.
+ // with a base::ScopedCFTypeRef<CVPixelBufferRef>, or by incrementing the use
+ // count of the IOSurface, i.e. with IOSurfaceIncrementUseCount().
+ //
+ // WARNING: Retaining references to the pixel buffer's IOSurface (e.g. with
+ // base::ScopedCFTypeRef<IOSurfaceRef>) without incrementing its use count
+ // does NOT prevent it from being recycled!
base::ScopedCFTypeRef<CVPixelBufferRef> CreateBuffer();
// Frees the memory of any released buffers returned to the pool.
diff --git a/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm b/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
index b366cdb5e62..55befbd1574 100644
--- a/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/pixel_buffer_pool_mac_unittest.mm
@@ -43,9 +43,10 @@ TEST(PixelBufferPoolTest, CreatedBufferHasIOSurface) {
EXPECT_TRUE(CVPixelBufferGetIOSurface(buffer));
}
-TEST(PixelBufferPoolTest, CannotExceedMaxBuffers) {
- std::unique_ptr<PixelBufferPool> pool =
- PixelBufferPool::Create(kPixelFormatNv12, kVgaWidth, kVgaHeight, 2);
+TEST(PixelBufferPoolTest, CannotExceedMaxBuffersWhenHoldingOnToPixelBuffer) {
+ constexpr size_t kPoolMaxBuffers = 2;
+ std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
+ kPixelFormatNv12, kVgaWidth, kVgaHeight, kPoolMaxBuffers);
base::ScopedCFTypeRef<CVPixelBufferRef> first_buffer = pool->CreateBuffer();
EXPECT_TRUE(first_buffer);
base::ScopedCFTypeRef<CVPixelBufferRef> second_buffer = pool->CreateBuffer();
@@ -54,6 +55,25 @@ TEST(PixelBufferPoolTest, CannotExceedMaxBuffers) {
EXPECT_FALSE(third_buffer);
}
+TEST(PixelBufferPoolTest, CannotExceedMaxBuffersWhenIOSurfaceIsInUse) {
+ constexpr size_t kPoolMaxBuffers = 1;
+ std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
+ kPixelFormatNv12, kVgaWidth, kVgaHeight, kPoolMaxBuffers);
+ base::ScopedCFTypeRef<CVPixelBufferRef> first_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(first_buffer);
+ IOSurfaceRef io_surface = CVPixelBufferGetIOSurface(first_buffer);
+ EXPECT_TRUE(io_surface);
+ // Incremet use count of raw ptr IOSurface reference while releasing the pixel
+ // buffer's only reference.
+ IOSurfaceIncrementUseCount(io_surface);
+ first_buffer.reset();
+ // The pixel buffer has not been returned to the pool.
+ base::ScopedCFTypeRef<CVPixelBufferRef> second_buffer = pool->CreateBuffer();
+ EXPECT_FALSE(second_buffer);
+ // Cleanup.
+ IOSurfaceDecrementUseCount(io_surface);
+}
+
TEST(PixelBufferPoolTest, CanCreateBuffersIfMaxIsNull) {
std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
kPixelFormatNv12, kVgaWidth, kVgaHeight, base::nullopt);
@@ -66,14 +86,57 @@ TEST(PixelBufferPoolTest, CanCreateBuffersIfMaxIsNull) {
}
TEST(PixelBufferPoolTest, CanCreateBufferAfterPreviousBufferIsReleased) {
- std::unique_ptr<PixelBufferPool> pool =
- PixelBufferPool::Create(kPixelFormatNv12, kVgaWidth, kVgaHeight, 1);
+ constexpr size_t kPoolMaxBuffers = 1;
+ std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
+ kPixelFormatNv12, kVgaWidth, kVgaHeight, kPoolMaxBuffers);
base::ScopedCFTypeRef<CVPixelBufferRef> buffer = pool->CreateBuffer();
buffer.reset();
buffer = pool->CreateBuffer();
EXPECT_TRUE(buffer);
}
+TEST(PixelBufferPoolTest, CanCreateBufferAfterPreviousIOSurfaceIsNoLongerUsed) {
+ constexpr size_t kPoolMaxBuffers = 1;
+ std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
+ kPixelFormatNv12, kVgaWidth, kVgaHeight, kPoolMaxBuffers);
+ base::ScopedCFTypeRef<CVPixelBufferRef> first_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(first_buffer);
+ IOSurfaceRef io_surface = CVPixelBufferGetIOSurface(first_buffer);
+ EXPECT_TRUE(io_surface);
+ IOSurfaceIncrementUseCount(io_surface);
+ first_buffer.reset();
+ // Decrementing the use count when there are no pixel buffer references
+ // returns it to the pool.
+ IOSurfaceDecrementUseCount(io_surface);
+ base::ScopedCFTypeRef<CVPixelBufferRef> second_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(second_buffer);
+}
+
+TEST(PixelBufferPoolTest,
+ SimplyReferencingAnIOSurfaceDoesNotPreventItReturningToThePool) {
+ constexpr size_t kPoolMaxBuffers = 1;
+ std::unique_ptr<PixelBufferPool> pool = PixelBufferPool::Create(
+ kPixelFormatNv12, kVgaWidth, kVgaHeight, kPoolMaxBuffers);
+ base::ScopedCFTypeRef<CVPixelBufferRef> first_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(first_buffer);
+ base::ScopedCFTypeRef<IOSurfaceRef> first_buffer_io_surface(
+ CVPixelBufferGetIOSurface(first_buffer), base::scoped_policy::RETAIN);
+ EXPECT_TRUE(first_buffer_io_surface);
+ // Releasing the first buffer returns it to the pool, despite the IOSurface
+ // still being referenced by |first_buffer_io_surface|.
+ first_buffer.reset();
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> second_buffer = pool->CreateBuffer();
+ EXPECT_TRUE(second_buffer);
+ base::ScopedCFTypeRef<IOSurfaceRef> second_buffer_io_surface(
+ CVPixelBufferGetIOSurface(second_buffer), base::scoped_policy::RETAIN);
+ EXPECT_TRUE(second_buffer_io_surface);
+
+ // Because this is a recycled pixel buffer, the IOSurface is also recycled.
+ EXPECT_EQ(IOSurfaceGetID(first_buffer_io_surface),
+ IOSurfaceGetID(second_buffer_io_surface));
+}
+
TEST(PixelBufferPoolTest, BuffersCanOutliveThePool) {
std::unique_ptr<PixelBufferPool> pool =
PixelBufferPool::Create(kPixelFormatNv12, kVgaWidth, kVgaHeight, 1);
diff --git a/chromium/media/capture/video/mac/pixel_buffer_transferer_mac.cc b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac.cc
new file mode 100644
index 00000000000..e6509a930ec
--- /dev/null
+++ b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac.cc
@@ -0,0 +1,40 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/mac/pixel_buffer_transferer_mac.h"
+
+#include "base/check.h"
+#include "base/logging.h"
+
+namespace media {
+
+PixelBufferTransferer::PixelBufferTransferer() {
+ OSStatus error =
+ VTPixelTransferSessionCreate(nil, transfer_session_.InitializeInto());
+ // There is no known way to make session creation fail, so we do not deal with
+ // failures gracefully.
+ CHECK(error == noErr) << "Creating a VTPixelTransferSession failed: "
+ << error;
+}
+
+bool PixelBufferTransferer::TransferImage(CVPixelBufferRef source,
+ CVPixelBufferRef destination) {
+ DCHECK(source);
+ DCHECK(destination);
+ OSStatus error = VTPixelTransferSessionTransferImage(transfer_session_,
+ source, destination);
+ if (error == kVTPixelTransferNotSupportedErr) {
+ // This source/destination transfer operation is not supported.
+ return false;
+ }
+ CHECK(error == noErr)
+ << "Unexpected VTPixelTransferSessionTransferImage error: " << error;
+ return true;
+}
+
+PixelBufferTransferer::~PixelBufferTransferer() {
+ VTPixelTransferSessionInvalidate(transfer_session_);
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/mac/pixel_buffer_transferer_mac.h b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac.h
new file mode 100644
index 00000000000..e201deb73be
--- /dev/null
+++ b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac.h
@@ -0,0 +1,39 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_MAC_PIXEL_BUFFER_TRANSFERER_MAC_H_
+#define MEDIA_CAPTURE_VIDEO_MAC_PIXEL_BUFFER_TRANSFERER_MAC_H_
+
+#import <VideoToolbox/VideoToolbox.h>
+
+#include "base/mac/scoped_cftyperef.h"
+#include "media/capture/capture_export.h"
+
+namespace media {
+
+// The PixelBufferTransferer copies and/or converts image buffers from a source
+// buffer to a destination buffer. The desired resolution and pixel format is
+// configured on the destination pixel buffer, not the transferer. See also
+// PixelBufferPool for creating recyclable pixel buffers.
+//
+// If the destination pixel buffer is set up with a different resolution than
+// the source, scaling happens.
+// If the destination pixel buffer is set up with a different pixel format than
+// thee source, conversion happens.
+class CAPTURE_EXPORT PixelBufferTransferer {
+ public:
+ PixelBufferTransferer();
+ ~PixelBufferTransferer();
+
+ // Copies and/or converts from source to destination. If the transfer is not
+ // supported, false is returned.
+ bool TransferImage(CVPixelBufferRef source, CVPixelBufferRef destination);
+
+ private:
+ base::ScopedCFTypeRef<VTPixelTransferSessionRef> transfer_session_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_MAC_PIXEL_BUFFER_TRANSFERER_MAC_H_
diff --git a/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
new file mode 100644
index 00000000000..fa00dfca33d
--- /dev/null
+++ b/chromium/media/capture/video/mac/pixel_buffer_transferer_mac_unittest.mm
@@ -0,0 +1,268 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/mac/pixel_buffer_transferer_mac.h"
+
+#include <cmath>
+#include <vector>
+
+#include "base/logging.h"
+#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
+#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
+#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+namespace {
+
+constexpr uint8_t kColorR = 255u;
+constexpr uint8_t kColorG = 127u;
+constexpr uint8_t kColorB = 63u;
+
+// Common pixel formats that we want to test. This is partially based on
+// VideoCaptureDeviceAVFoundation::FourCCToChromiumPixelFormat but we do not
+// include MJPEG because compressed formats are not supported by the
+// PixelBufferPool. In addition to the formats supported for capturing, we also
+// test I420, which all captured formats are normally converted to in software
+// making it a sensible destination format.
+
+// media::PIXEL_FORMAT_NV12 a.k.a. "420v"
+constexpr OSType kPixelFormatNv12 =
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+// media::PIXEL_FORMAT_UYVY a.k.a. "2vuy"
+constexpr OSType kPixelFormatUyvy = kCVPixelFormatType_422YpCbCr8;
+// media::PIXEL_FORMAT_YUY2 a.k.a. "yuvs"
+constexpr OSType kPixelFormatYuvs = kCVPixelFormatType_422YpCbCr8_yuvs;
+// media::PIXEL_FORMAT_I420 a.k.a. "y420"
+constexpr OSType kPixelFormatI420 = kCVPixelFormatType_420YpCbCr8Planar;
+
+} // namespace
+
+TEST(PixelBufferTransfererTest, CanCopyYuvsAndVerifyColor) {
+ constexpr OSType kPixelFormat = kPixelFormatYuvs;
+ constexpr int kWidth = 32;
+ constexpr int kHeight = 32;
+ PixelBufferTransferer transferer;
+ // Source: A single colored buffer.
+ std::unique_ptr<ByteArrayPixelBuffer> source =
+ CreateYuvsPixelBufferFromSingleRgbColor(kWidth, kHeight, kColorR, kColorG,
+ kColorB);
+ // Destination buffer: A same-sized YUVS buffer.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination =
+ PixelBufferPool::Create(kPixelFormat, kWidth, kHeight, 1)->CreateBuffer();
+ EXPECT_TRUE(transferer.TransferImage(source->pixel_buffer, destination));
+ // Verify the result is the same color.
+ EXPECT_TRUE(YuvsIOSurfaceIsSingleColor(CVPixelBufferGetIOSurface(destination),
+ kColorR, kColorG, kColorB));
+}
+
+TEST(PixelBufferTransfererTest, CanScaleYuvsAndVerifyColor) {
+ constexpr OSType kPixelFormat = kPixelFormatYuvs;
+ constexpr int kSourceWidth = 32;
+ constexpr int kSourceHeight = 32;
+ constexpr int kDestinationWidth = 16;
+ constexpr int kDestinationHeight = 16;
+ PixelBufferTransferer transferer;
+ // Source: A single colored buffer.
+ std::unique_ptr<ByteArrayPixelBuffer> source =
+ CreateYuvsPixelBufferFromSingleRgbColor(kSourceWidth, kSourceHeight,
+ kColorR, kColorG, kColorB);
+ // Destination buffer: A downscaled YUVS buffer.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination =
+ PixelBufferPool::Create(kPixelFormat, kDestinationWidth,
+ kDestinationHeight, 1)
+ ->CreateBuffer();
+ EXPECT_TRUE(transferer.TransferImage(source->pixel_buffer, destination));
+ // Verify the result is the same color.
+ EXPECT_TRUE(YuvsIOSurfaceIsSingleColor(CVPixelBufferGetIOSurface(destination),
+ kColorR, kColorG, kColorB));
+}
+
+TEST(PixelBufferTransfererTest, CanScaleYuvsAndVerifyCheckerPattern) {
+ // Note: The ARGB -> YUVS -> ARGB conversions results in a small loss of
+ // information, so for the checker pattern to be intact the buffer can't be
+ // tiny (e.g. 4x4).
+ constexpr int kSourceWidth = 64;
+ constexpr int kSourceHeight = 64;
+ constexpr int kSourceNumTilesAcross = 4;
+ constexpr int kDestinationWidth = 32;
+ constexpr int kDestinationHeight = 32;
+ PixelBufferTransferer transferer;
+ // Source: A single colored buffer.
+ std::unique_ptr<ByteArrayPixelBuffer> source =
+ CreateYuvsPixelBufferFromArgbBuffer(
+ kSourceWidth, kSourceHeight,
+ CreateArgbCheckerPatternBuffer(kSourceWidth, kSourceHeight,
+ kSourceNumTilesAcross));
+ // Destination buffer: A downscaled YUVS buffer.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination =
+ PixelBufferPool::Create(kPixelFormatYuvs, kDestinationWidth,
+ kDestinationHeight, 1)
+ ->CreateBuffer();
+ EXPECT_TRUE(transferer.TransferImage(source->pixel_buffer, destination));
+ // Verify the result has the same number of checker tiles.
+ int num_tiles_across_x;
+ int num_tiles_across_y;
+ std::tie(num_tiles_across_x, num_tiles_across_y) =
+ GetCheckerPatternNumTilesAccross(
+ CreateArgbBufferFromYuvsIOSurface(
+ CVPixelBufferGetIOSurface(destination)),
+ kDestinationWidth, kDestinationHeight);
+ EXPECT_EQ(num_tiles_across_x, kSourceNumTilesAcross);
+ EXPECT_EQ(num_tiles_across_y, kSourceNumTilesAcross);
+}
+
+TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyCheckerPattern) {
+ // Note: The ARGB -> YUVS -> ARGB conversions results in a small loss of
+ // information, so for the checker pattern to be intact the buffer can't be
+ // tiny (e.g. 4x4).
+ constexpr int kSourceWidth = 64;
+ constexpr int kSourceHeight = 64;
+ constexpr int kSourceNumTilesAcross = 4;
+ constexpr int kDestinationWidth = 48;
+ constexpr int kDestinationHeight = 32;
+ PixelBufferTransferer transferer;
+ // Source: A single colored buffer.
+ std::unique_ptr<ByteArrayPixelBuffer> source =
+ CreateYuvsPixelBufferFromArgbBuffer(
+ kSourceWidth, kSourceHeight,
+ CreateArgbCheckerPatternBuffer(kSourceWidth, kSourceHeight,
+ kSourceNumTilesAcross));
+ // Destination buffer: A downscaled YUVS buffer.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination =
+ PixelBufferPool::Create(kPixelFormatYuvs, kDestinationWidth,
+ kDestinationHeight, 1)
+ ->CreateBuffer();
+ EXPECT_TRUE(transferer.TransferImage(source->pixel_buffer, destination));
+ // Verify the result has the same number of checker tiles.
+ int num_tiles_across_x;
+ int num_tiles_across_y;
+ std::tie(num_tiles_across_x, num_tiles_across_y) =
+ GetCheckerPatternNumTilesAccross(
+ CreateArgbBufferFromYuvsIOSurface(
+ CVPixelBufferGetIOSurface(destination)),
+ kDestinationWidth, kDestinationHeight);
+ EXPECT_EQ(num_tiles_across_x, kSourceNumTilesAcross);
+ EXPECT_EQ(num_tiles_across_y, kSourceNumTilesAcross);
+}
+
+TEST(PixelBufferTransfererTest, CanStretchYuvsAndVerifyColor) {
+ constexpr OSType kPixelFormat = kPixelFormatYuvs;
+ constexpr int kSourceWidth = 32;
+ constexpr int kSourceHeight = 32;
+ constexpr int kDestinationWidth = 48; // Aspect ratio does not match source.
+ constexpr int kDestinationHeight = 16;
+ PixelBufferTransferer transferer;
+ // Source: A single colored buffer.
+ std::unique_ptr<ByteArrayPixelBuffer> source =
+ CreateYuvsPixelBufferFromSingleRgbColor(kSourceWidth, kSourceHeight,
+ kColorR, kColorG, kColorB);
+ // Destination buffer: A streched YUVS buffer.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination =
+ PixelBufferPool::Create(kPixelFormat, kDestinationWidth,
+ kDestinationHeight, 1)
+ ->CreateBuffer();
+ EXPECT_TRUE(transferer.TransferImage(source->pixel_buffer, destination));
+ // Verify the result is the same color.
+ EXPECT_TRUE(YuvsIOSurfaceIsSingleColor(CVPixelBufferGetIOSurface(destination),
+ kColorR, kColorG, kColorB));
+}
+
+TEST(PixelBufferTransfererTest, CanConvertAndStretchSimultaneouslyYuvsToNv12) {
+ // Source pixel format: YUVS
+ constexpr int kSourceWidth = 32;
+ constexpr int kSourceHeight = 32;
+ constexpr OSType kDestinationPixelFormat = kPixelFormatNv12;
+ constexpr int kDestinationWidth = 48; // Aspect ratio does not match source.
+ constexpr int kDestinationHeight = 16;
+ PixelBufferTransferer transferer;
+ // Source: A single colored buffer.
+ std::unique_ptr<ByteArrayPixelBuffer> source =
+ CreateYuvsPixelBufferFromSingleRgbColor(kSourceWidth, kSourceHeight,
+ kColorR, kColorG, kColorB);
+ // Destination buffer: A streched NV12 buffer.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination =
+ PixelBufferPool::Create(kDestinationPixelFormat, kDestinationWidth,
+ kDestinationHeight, 1)
+ ->CreateBuffer();
+ EXPECT_TRUE(transferer.TransferImage(source->pixel_buffer, destination));
+}
+
+class PixelBufferTransfererParameterizedTest
+ : public ::testing::Test,
+ public ::testing::WithParamInterface<std::tuple<OSType, OSType>> {};
+
+// We do not have the testing utils necessary to create and verify pixel buffers
+// in other formats than YUVS, so in order to test the full conversion matrix of
+// all supported formats X -> Y, this parameterized test performs:
+// YUVS -> X -> Y -> YUVS
+TEST_P(PixelBufferTransfererParameterizedTest,
+ CanConvertFromXToYAndVerifyColor) {
+ OSType pixel_format_from;
+ OSType pixel_format_to;
+ std::tie(pixel_format_from, pixel_format_to) = GetParam();
+ LOG(INFO) << "Running Test: " << MacFourCCToString(pixel_format_from)
+ << " -> " << MacFourCCToString(pixel_format_to);
+
+ constexpr int kWidth = 32;
+ constexpr int kHeight = 32;
+ PixelBufferTransferer transferer;
+ // We always start with YUVS because this is the format that the testing
+ // utilities can convert to/from RGB.
+ std::unique_ptr<ByteArrayPixelBuffer> original_yuvs_buffer =
+ CreateYuvsPixelBufferFromSingleRgbColor(kWidth, kHeight, kColorR, kColorG,
+ kColorB);
+ // YUVS -> pixel_format_from
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer_from;
+ if (pixel_format_from == kPixelFormatYuvs) {
+ pixel_buffer_from = original_yuvs_buffer->pixel_buffer;
+ } else {
+ pixel_buffer_from =
+ PixelBufferPool::Create(pixel_format_from, kWidth, kHeight, 1)
+ ->CreateBuffer();
+ transferer.TransferImage(original_yuvs_buffer->pixel_buffer,
+ pixel_buffer_from);
+ }
+ ASSERT_TRUE(pixel_buffer_from);
+
+ // pixel_format_from -> pixel_format_to
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer_to =
+ PixelBufferPool::Create(pixel_format_to, kWidth, kHeight, 1)
+ ->CreateBuffer();
+ EXPECT_TRUE(transferer.TransferImage(pixel_buffer_from, pixel_buffer_to));
+
+ // We always convert back to YUVS because this is the only format that the
+ // testing utilities can convert to/from RGB.
+ base::ScopedCFTypeRef<CVPixelBufferRef> final_yuvs_buffer;
+ // pixel_format_to -> YUVS
+ if (pixel_format_to == kPixelFormatYuvs) {
+ final_yuvs_buffer = pixel_buffer_to;
+ } else {
+ final_yuvs_buffer =
+ PixelBufferPool::Create(kPixelFormatYuvs, kWidth, kHeight, 1)
+ ->CreateBuffer();
+ transferer.TransferImage(pixel_buffer_to, final_yuvs_buffer);
+ }
+ ASSERT_TRUE(final_yuvs_buffer);
+ // Verify that after our "conversion dance" we end up with the same color that
+ // we started with.
+ EXPECT_TRUE(YuvsIOSurfaceIsSingleColor(
+ CVPixelBufferGetIOSurface(final_yuvs_buffer), kColorR, kColorG, kColorB));
+}
+
+INSTANTIATE_TEST_SUITE_P(
+ PixelBufferTransfererTest,
+ PixelBufferTransfererParameterizedTest,
+ ::testing::Combine(::testing::Values(kPixelFormatNv12,
+ kPixelFormatUyvy,
+ kPixelFormatYuvs,
+ kPixelFormatI420),
+ ::testing::Values(kPixelFormatNv12,
+ kPixelFormatUyvy,
+ kPixelFormatYuvs,
+ kPixelFormatI420)));
+
+} // namespace media
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
new file mode 100644
index 00000000000..daa96cb01b0
--- /dev/null
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.cc
@@ -0,0 +1,822 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/mac/sample_buffer_transformer_mac.h"
+
+#include <utility>
+
+#include "base/logging.h"
+#include "base/notreached.h"
+#include "media/base/video_frame.h"
+#include "media/base/video_types.h"
+#include "third_party/libyuv/include/libyuv.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
+
+namespace media {
+
+const base::Feature kInCaptureConvertToNv12{"InCaptureConvertToNv12",
+ base::FEATURE_ENABLED_BY_DEFAULT};
+
+const base::Feature kInCaptureConvertToNv12WithPixelTransfer{
+ "InCaptureConvertToNv12WithPixelTransfer",
+ base::FEATURE_DISABLED_BY_DEFAULT};
+
+const base::Feature kInCaptureConvertToNv12WithLibyuv{
+ "InCaptureConvertToNv12WithLibyuv", base::FEATURE_DISABLED_BY_DEFAULT};
+
+namespace {
+
+constexpr size_t kDefaultBufferPoolSize = 10;
+
+// NV12 a.k.a. 420v
+constexpr OSType kPixelFormatNv12 =
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+// I420 a.k.a. y420
+constexpr OSType kPixelFormatI420 = kCVPixelFormatType_420YpCbCr8Planar;
+// MJPEG a.k.a. dmb1
+constexpr OSType kPixelFormatMjpeg = kCMVideoCodecType_JPEG_OpenDML;
+
+std::pair<uint8_t*, size_t> GetSampleBufferBaseAddressAndSize(
+ CMSampleBufferRef sample_buffer) {
+ // Access source sample buffer bytes.
+ CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(sample_buffer);
+ DCHECK(block_buffer);
+ char* data_base_address;
+ size_t data_size;
+ size_t length_at_offset;
+ OSStatus status = CMBlockBufferGetDataPointer(
+ block_buffer, 0, &length_at_offset, &data_size, &data_base_address);
+ DCHECK_EQ(status, noErr);
+ DCHECK(data_base_address);
+ DCHECK(data_size);
+ DCHECK_EQ(length_at_offset, data_size); // Buffer must be contiguous.
+ return std::make_pair(reinterpret_cast<uint8_t*>(data_base_address),
+ data_size);
+}
+
+struct I420Planes {
+ size_t width;
+ size_t height;
+ uint8_t* y_plane_data;
+ uint8_t* u_plane_data;
+ uint8_t* v_plane_data;
+ size_t y_plane_stride;
+ size_t u_plane_stride;
+ size_t v_plane_stride;
+};
+
+size_t GetContiguousI420BufferSize(size_t width, size_t height) {
+ gfx::Size dimensions(width, height);
+ return VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane,
+ dimensions)
+ .GetArea() +
+ VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane,
+ dimensions)
+ .GetArea() +
+ VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kVPlane,
+ dimensions)
+ .GetArea();
+}
+
+I420Planes GetI420PlanesFromContiguousBuffer(uint8_t* data_base_address,
+ size_t width,
+ size_t height) {
+ gfx::Size dimensions(width, height);
+ gfx::Size y_plane_size =
+ VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kYPlane, dimensions);
+ gfx::Size u_plane_size =
+ VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, dimensions);
+ gfx::Size v_plane_size =
+ VideoFrame::PlaneSize(PIXEL_FORMAT_I420, VideoFrame::kUPlane, dimensions);
+ I420Planes i420_planes;
+ i420_planes.width = width;
+ i420_planes.height = height;
+ i420_planes.y_plane_data = data_base_address;
+ i420_planes.u_plane_data = i420_planes.y_plane_data + y_plane_size.GetArea();
+ i420_planes.v_plane_data = i420_planes.u_plane_data + u_plane_size.GetArea();
+ i420_planes.y_plane_stride = y_plane_size.width();
+ i420_planes.u_plane_stride = u_plane_size.width();
+ i420_planes.v_plane_stride = v_plane_size.width();
+ return i420_planes;
+}
+
+I420Planes EnsureI420BufferSizeAndGetPlanes(size_t width,
+ size_t height,
+ std::vector<uint8_t>* i420_buffer) {
+ size_t required_size = GetContiguousI420BufferSize(width, height);
+ if (i420_buffer->size() < required_size)
+ i420_buffer->resize(required_size);
+ return GetI420PlanesFromContiguousBuffer(&(*i420_buffer)[0], width, height);
+}
+
+I420Planes GetI420PlanesFromPixelBuffer(CVPixelBufferRef pixel_buffer) {
+ DCHECK_EQ(CVPixelBufferGetPlaneCount(pixel_buffer), 3u);
+ I420Planes i420_planes;
+ i420_planes.width = CVPixelBufferGetWidth(pixel_buffer);
+ i420_planes.height = CVPixelBufferGetHeight(pixel_buffer);
+ i420_planes.y_plane_data = static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
+ i420_planes.u_plane_data = static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
+ i420_planes.v_plane_data = static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 2));
+ i420_planes.y_plane_stride =
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
+ i420_planes.u_plane_stride =
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
+ i420_planes.v_plane_stride =
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 2);
+ return i420_planes;
+}
+
+struct NV12Planes {
+ size_t width;
+ size_t height;
+ uint8_t* y_plane_data;
+ uint8_t* uv_plane_data;
+ size_t y_plane_stride;
+ size_t uv_plane_stride;
+};
+
+size_t GetContiguousNV12BufferSize(size_t width, size_t height) {
+ gfx::Size dimensions(width, height);
+ return VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kYPlane,
+ dimensions)
+ .GetArea() +
+ VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kUVPlane,
+ dimensions)
+ .GetArea();
+}
+
+NV12Planes GetNV12PlanesFromContiguousBuffer(uint8_t* data_base_address,
+ size_t width,
+ size_t height) {
+ gfx::Size dimensions(width, height);
+ gfx::Size y_plane_size =
+ VideoFrame::PlaneSize(PIXEL_FORMAT_NV12, VideoFrame::kYPlane, dimensions);
+ gfx::Size uv_plane_size = VideoFrame::PlaneSize(
+ PIXEL_FORMAT_NV12, VideoFrame::kUVPlane, dimensions);
+ NV12Planes nv12_planes;
+ nv12_planes.width = width;
+ nv12_planes.height = height;
+ nv12_planes.y_plane_data = data_base_address;
+ nv12_planes.uv_plane_data = nv12_planes.y_plane_data + y_plane_size.GetArea();
+ nv12_planes.y_plane_stride = y_plane_size.width();
+ nv12_planes.uv_plane_stride = uv_plane_size.width();
+ return nv12_planes;
+}
+
+NV12Planes EnsureNV12BufferSizeAndGetPlanes(size_t width,
+ size_t height,
+ std::vector<uint8_t>* nv12_buffer) {
+ size_t required_size = GetContiguousNV12BufferSize(width, height);
+ if (nv12_buffer->size() < required_size)
+ nv12_buffer->resize(required_size);
+ return GetNV12PlanesFromContiguousBuffer(&(*nv12_buffer)[0], width, height);
+}
+
+NV12Planes GetNV12PlanesFromPixelBuffer(CVPixelBufferRef pixel_buffer) {
+ DCHECK_EQ(CVPixelBufferGetPlaneCount(pixel_buffer), 2u);
+ NV12Planes nv12_planes;
+ nv12_planes.width = CVPixelBufferGetWidth(pixel_buffer);
+ nv12_planes.height = CVPixelBufferGetHeight(pixel_buffer);
+ nv12_planes.y_plane_data = static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
+ nv12_planes.uv_plane_data = static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
+ nv12_planes.y_plane_stride =
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
+ nv12_planes.uv_plane_stride =
+ CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
+ return nv12_planes;
+}
+
+bool ConvertFromMjpegToI420(uint8_t* source_buffer_base_address,
+ size_t source_buffer_size,
+ const I420Planes& destination) {
+ int result = libyuv::MJPGToI420(
+ source_buffer_base_address, source_buffer_size, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height,
+ destination.width, destination.height);
+ return result == 0;
+}
+
+// Returns true on success. Converting uncompressed pixel formats should never
+// fail, however MJPEG frames produces by some webcams have been observed to be
+// invalid in special circumstances (see https://crbug.com/1147867). To support
+// a graceful failure path in this case, this function may return false.
+bool ConvertFromAnyToI420(CVPixelBufferRef source_pixel_buffer,
+ const I420Planes& destination) {
+ auto pixel_format = CVPixelBufferGetPixelFormatType(source_pixel_buffer);
+ switch (pixel_format) {
+ // UYVY a.k.a. 2vuy
+ case kCVPixelFormatType_422YpCbCr8: {
+ const uint8_t* src_uyvy = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddress(source_pixel_buffer));
+ size_t src_stride_uyvy = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
+ return libyuv::UYVYToI420(
+ src_uyvy, src_stride_uyvy, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width,
+ destination.height) == 0;
+ }
+ // YUY2 a.k.a. yuvs
+ case kCMPixelFormat_422YpCbCr8_yuvs: {
+ const uint8_t* src_yuy2 = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddress(source_pixel_buffer));
+ size_t src_stride_yuy2 = CVPixelBufferGetBytesPerRow(source_pixel_buffer);
+ return libyuv::YUY2ToI420(
+ src_yuy2, src_stride_yuy2, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width,
+ destination.height) == 0;
+ }
+ // MJPEG a.k.a. dmb1
+ case kCMVideoCodecType_JPEG_OpenDML: {
+ uint8_t* src_jpg = static_cast<uint8_t*>(
+ CVPixelBufferGetBaseAddress(source_pixel_buffer));
+ size_t src_jpg_size = CVPixelBufferGetDataSize(source_pixel_buffer);
+ return ConvertFromMjpegToI420(src_jpg, src_jpg_size, destination);
+ }
+ // NV12 a.k.a. 420v
+ case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: {
+ DCHECK(CVPixelBufferIsPlanar(source_pixel_buffer));
+ DCHECK_EQ(2u, CVPixelBufferGetPlaneCount(source_pixel_buffer));
+ const uint8_t* src_y = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 0));
+ size_t src_stride_y =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 0);
+ const uint8_t* src_uv = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
+ size_t src_stride_uv =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
+ return libyuv::NV12ToI420(
+ src_y, src_stride_y, src_uv, src_stride_uv,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.u_plane_data, destination.u_plane_stride,
+ destination.v_plane_data, destination.v_plane_stride,
+ destination.width, destination.height) == 0;
+ }
+ // I420 a.k.a. y420
+ case kCVPixelFormatType_420YpCbCr8Planar: {
+ DCHECK(CVPixelBufferIsPlanar(source_pixel_buffer));
+ DCHECK_EQ(3u, CVPixelBufferGetPlaneCount(source_pixel_buffer));
+ const uint8_t* src_y = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 0));
+ size_t src_stride_y =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 0);
+ const uint8_t* src_u = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 1));
+ size_t src_stride_u =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 1);
+ const uint8_t* src_v = static_cast<const uint8_t*>(
+ CVPixelBufferGetBaseAddressOfPlane(source_pixel_buffer, 2));
+ size_t src_stride_v =
+ CVPixelBufferGetBytesPerRowOfPlane(source_pixel_buffer, 2);
+ return libyuv::I420Copy(
+ src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.u_plane_data, destination.u_plane_stride,
+ destination.v_plane_data, destination.v_plane_stride,
+ destination.width, destination.height) == 0;
+ }
+ default:
+ NOTREACHED() << "Pixel format " << pixel_format << " not supported.";
+ }
+ return false;
+}
+
+void ConvertFromI420ToNV12(const I420Planes& source,
+ const NV12Planes& destination) {
+ DCHECK_EQ(source.width, destination.width);
+ DCHECK_EQ(source.height, destination.height);
+ int result = libyuv::I420ToNV12(
+ source.y_plane_data, source.y_plane_stride, source.u_plane_data,
+ source.u_plane_stride, source.v_plane_data, source.v_plane_stride,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride, source.width,
+ source.height);
+ // A webcam has never been observed to produce invalid uncompressed pixel
+ // buffer, so we do not support a graceful failure path in this case.
+ DCHECK_EQ(result, 0);
+}
+
+// Returns true on success. MJPEG frames produces by some webcams have been
+// observed to be invalid in special circumstances (see
+// https://crbug.com/1147867). To support a graceful failure path in this case,
+// this function may return false.
+bool ConvertFromMjpegToNV12(uint8_t* source_buffer_data_base_address,
+ size_t source_buffer_data_size,
+ const NV12Planes& destination) {
+ // Despite libyuv::MJPGToNV12() taking both source and destination sizes as
+ // arguments, this function is only successful if the sizes match. So here we
+ // require the destination buffer's size to match the source's.
+ int result = libyuv::MJPGToNV12(
+ source_buffer_data_base_address, source_buffer_data_size,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride, destination.width,
+ destination.height, destination.width, destination.height);
+ return result == 0;
+}
+
+void ScaleI420(const I420Planes& source, const I420Planes& destination) {
+ int result = libyuv::I420Scale(
+ source.y_plane_data, source.y_plane_stride, source.u_plane_data,
+ source.u_plane_stride, source.v_plane_data, source.v_plane_stride,
+ source.width, source.height, destination.y_plane_data,
+ destination.y_plane_stride, destination.u_plane_data,
+ destination.u_plane_stride, destination.v_plane_data,
+ destination.v_plane_stride, destination.width, destination.height,
+ libyuv::kFilterBilinear);
+ DCHECK_EQ(result, 0);
+}
+
+void CopyI420(const I420Planes& source, const I420Planes& destination) {
+ DCHECK_EQ(source.width, destination.width);
+ DCHECK_EQ(source.height, destination.height);
+ libyuv::I420Copy(source.y_plane_data, source.y_plane_stride,
+ source.u_plane_data, source.u_plane_stride,
+ source.v_plane_data, source.v_plane_stride,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.u_plane_data, destination.u_plane_stride,
+ destination.v_plane_data, destination.v_plane_stride,
+ source.width, source.height);
+}
+
+void ScaleNV12(const NV12Planes& source, const NV12Planes& destination) {
+ int result = libyuv::NV12Scale(
+ source.y_plane_data, source.y_plane_stride, source.uv_plane_data,
+ source.uv_plane_stride, source.width, source.height,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride, destination.width,
+ destination.height, libyuv::kFilterBilinear);
+ DCHECK_EQ(result, 0);
+}
+
+void CopyNV12(const NV12Planes& source, const NV12Planes& destination) {
+ DCHECK_EQ(source.width, destination.width);
+ DCHECK_EQ(source.height, destination.height);
+ libyuv::CopyPlane(source.y_plane_data, source.y_plane_stride,
+ destination.y_plane_data, destination.y_plane_stride,
+ destination.width, destination.height);
+ size_t half_width = (destination.width + 1) >> 1;
+ size_t half_height = (destination.height + 1) >> 1;
+ libyuv::CopyPlane(source.uv_plane_data, source.uv_plane_stride,
+ destination.uv_plane_data, destination.uv_plane_stride,
+ half_width * 2, half_height);
+}
+
+} // namespace
+
+// static
+std::unique_ptr<SampleBufferTransformer>
+SampleBufferTransformer::CreateIfAutoReconfigureEnabled() {
+ return IsAutoReconfigureEnabled()
+ ? std::make_unique<SampleBufferTransformer>()
+ : nullptr;
+}
+
+// static
+std::unique_ptr<SampleBufferTransformer> SampleBufferTransformer::Create() {
+ return std::make_unique<SampleBufferTransformer>();
+}
+
+// static
+bool SampleBufferTransformer::IsAutoReconfigureEnabled() {
+ return base::FeatureList::IsEnabled(kInCaptureConvertToNv12) ||
+ base::FeatureList::IsEnabled(
+ kInCaptureConvertToNv12WithPixelTransfer) ||
+ base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithLibyuv);
+}
+
+SampleBufferTransformer::SampleBufferTransformer()
+ : transformer_(Transformer::kNotConfigured),
+ destination_pixel_format_(0x0),
+ destination_width_(0),
+ destination_height_(0) {}
+
+SampleBufferTransformer::~SampleBufferTransformer() {}
+
+SampleBufferTransformer::Transformer SampleBufferTransformer::transformer()
+ const {
+ return transformer_;
+}
+
+OSType SampleBufferTransformer::destination_pixel_format() const {
+ return destination_pixel_format_;
+}
+
+size_t SampleBufferTransformer::destination_width() const {
+ return destination_width_;
+}
+
+size_t SampleBufferTransformer::destination_height() const {
+ return destination_height_;
+}
+
+base::ScopedCFTypeRef<CVPixelBufferRef>
+SampleBufferTransformer::AutoReconfigureAndTransform(
+ CMSampleBufferRef sample_buffer) {
+ AutoReconfigureBasedOnInputAndFeatureFlags(sample_buffer);
+ return Transform(sample_buffer);
+}
+
+void SampleBufferTransformer::Reconfigure(
+ Transformer transformer,
+ OSType destination_pixel_format,
+ size_t destination_width,
+ size_t destination_height,
+ base::Optional<size_t> buffer_pool_size) {
+ DCHECK(transformer != Transformer::kLibyuv ||
+ destination_pixel_format == kPixelFormatI420 ||
+ destination_pixel_format == kPixelFormatNv12)
+ << "Destination format is unsupported when running libyuv";
+ if (transformer_ == transformer &&
+ destination_pixel_format_ == destination_pixel_format &&
+ destination_width_ == destination_width &&
+ destination_height_ == destination_height) {
+ // Already configured as desired, abort.
+ return;
+ }
+ transformer_ = transformer;
+ destination_pixel_format_ = destination_pixel_format;
+ destination_width_ = destination_width;
+ destination_height_ = destination_height;
+ destination_pixel_buffer_pool_ =
+ PixelBufferPool::Create(destination_pixel_format_, destination_width_,
+ destination_height_, buffer_pool_size);
+ if (transformer == Transformer::kPixelBufferTransfer) {
+ pixel_buffer_transferer_ = std::make_unique<PixelBufferTransferer>();
+ } else {
+ pixel_buffer_transferer_.reset();
+ }
+ intermediate_i420_buffer_.resize(0);
+ intermediate_nv12_buffer_.resize(0);
+}
+
+void SampleBufferTransformer::AutoReconfigureBasedOnInputAndFeatureFlags(
+ CMSampleBufferRef sample_buffer) {
+ DCHECK(IsAutoReconfigureEnabled());
+ Transformer desired_transformer = Transformer::kNotConfigured;
+ size_t desired_width;
+ size_t desired_height;
+ if (CVPixelBufferRef pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer)) {
+ // We have a pixel buffer.
+ if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12)) {
+ // Pixel transfers are believed to be more efficient for X -> NV12.
+ desired_transformer = Transformer::kPixelBufferTransfer;
+ }
+ desired_width = CVPixelBufferGetWidth(pixel_buffer);
+ desired_height = CVPixelBufferGetHeight(pixel_buffer);
+ } else {
+ // We don't have a pixel buffer. Reconfigure to be prepared for MJPEG.
+ if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12)) {
+ // Only libyuv supports MJPEG -> NV12.
+ desired_transformer = Transformer::kLibyuv;
+ }
+ CMFormatDescriptionRef format_description =
+ CMSampleBufferGetFormatDescription(sample_buffer);
+ CMVideoDimensions dimensions =
+ CMVideoFormatDescriptionGetDimensions(format_description);
+ desired_width = dimensions.width;
+ desired_height = dimensions.height;
+ }
+ if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithPixelTransfer)) {
+ desired_transformer = Transformer::kPixelBufferTransfer;
+ } else if (base::FeatureList::IsEnabled(kInCaptureConvertToNv12WithLibyuv)) {
+ desired_transformer = Transformer::kLibyuv;
+ }
+ Reconfigure(desired_transformer, kPixelFormatNv12, desired_width,
+ desired_height, kDefaultBufferPoolSize);
+}
+
+base::ScopedCFTypeRef<CVPixelBufferRef> SampleBufferTransformer::Transform(
+ CMSampleBufferRef sample_buffer) {
+ DCHECK(transformer_ != Transformer::kNotConfigured);
+ CVPixelBufferRef source_pixel_buffer =
+ CMSampleBufferGetImageBuffer(sample_buffer);
+ // Fast path: If source and destination formats are identical, return the
+ // source pixel buffer.
+ if (source_pixel_buffer &&
+ destination_width_ == CVPixelBufferGetWidth(source_pixel_buffer) &&
+ destination_height_ == CVPixelBufferGetHeight(source_pixel_buffer) &&
+ destination_pixel_format_ ==
+ CVPixelBufferGetPixelFormatType(source_pixel_buffer) &&
+ CVPixelBufferGetIOSurface(source_pixel_buffer)) {
+ return base::ScopedCFTypeRef<CVPixelBufferRef>(source_pixel_buffer,
+ base::scoped_policy::RETAIN);
+ }
+ // Create destination buffer from pool.
+ base::ScopedCFTypeRef<CVPixelBufferRef> destination_pixel_buffer =
+ destination_pixel_buffer_pool_->CreateBuffer();
+ if (!destination_pixel_buffer) {
+ // Maximum destination buffers exceeded. Old buffers are not being released
+ // (and thus not returned to the pool) in time.
+ LOG(ERROR) << "Maximum destination buffers exceeded";
+ return base::ScopedCFTypeRef<CVPixelBufferRef>();
+ }
+ if (source_pixel_buffer) {
+ // Pixel buffer path. Do pixel transfer or libyuv conversion + rescale.
+ TransformPixelBuffer(source_pixel_buffer, destination_pixel_buffer);
+ return destination_pixel_buffer;
+ }
+ // Sample buffer path - it's MJPEG. Do libyuv conversion + rescale.
+ if (!TransformSampleBuffer(sample_buffer, destination_pixel_buffer)) {
+ LOG(ERROR) << "Failed to transform sample buffer.";
+ return base::ScopedCFTypeRef<CVPixelBufferRef>();
+ }
+ return destination_pixel_buffer;
+}
+
+void SampleBufferTransformer::TransformPixelBuffer(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer) {
+ switch (transformer_) {
+ case Transformer::kPixelBufferTransfer:
+ return TransformPixelBufferWithPixelTransfer(source_pixel_buffer,
+ destination_pixel_buffer);
+ case Transformer::kLibyuv:
+ return TransformPixelBufferWithLibyuv(source_pixel_buffer,
+ destination_pixel_buffer);
+ case Transformer::kNotConfigured:
+ NOTREACHED();
+ }
+}
+
+void SampleBufferTransformer::TransformPixelBufferWithPixelTransfer(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer) {
+ DCHECK(transformer_ == Transformer::kPixelBufferTransfer);
+ DCHECK(pixel_buffer_transferer_);
+ bool success = pixel_buffer_transferer_->TransferImage(
+ source_pixel_buffer, destination_pixel_buffer);
+ DCHECK(success);
+}
+
+void SampleBufferTransformer::TransformPixelBufferWithLibyuv(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer) {
+ DCHECK(transformer_ == Transformer::kLibyuv);
+ // Lock source and destination pixel buffers.
+ CVReturn lock_status = CVPixelBufferLockBaseAddress(
+ source_pixel_buffer, kCVPixelBufferLock_ReadOnly);
+ DCHECK_EQ(lock_status, kCVReturnSuccess);
+ lock_status = CVPixelBufferLockBaseAddress(destination_pixel_buffer, 0);
+ DCHECK_EQ(lock_status, kCVReturnSuccess);
+
+ // Perform transform with libyuv.
+ switch (destination_pixel_format_) {
+ case kPixelFormatI420:
+ TransformPixelBufferWithLibyuvFromAnyToI420(source_pixel_buffer,
+ destination_pixel_buffer);
+ break;
+ case kPixelFormatNv12:
+ TransformPixelBufferWithLibyuvFromAnyToNV12(source_pixel_buffer,
+ destination_pixel_buffer);
+ break;
+ default:
+ NOTREACHED();
+ }
+
+ // Unlock source and destination pixel buffers.
+ lock_status = CVPixelBufferUnlockBaseAddress(destination_pixel_buffer, 0);
+ DCHECK_EQ(lock_status, kCVReturnSuccess);
+ lock_status = CVPixelBufferUnlockBaseAddress(source_pixel_buffer,
+ kCVPixelBufferLock_ReadOnly);
+ DCHECK_EQ(lock_status, kCVReturnSuccess);
+}
+
+void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToI420(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer) {
+ // Get source pixel format and bytes.
+ size_t source_width = CVPixelBufferGetWidth(source_pixel_buffer);
+ size_t source_height = CVPixelBufferGetHeight(source_pixel_buffer);
+ OSType source_pixel_format =
+ CVPixelBufferGetPixelFormatType(source_pixel_buffer);
+
+ // Rescaling has to be done in a separate step.
+ const bool rescale_needed = destination_width_ != source_width ||
+ destination_height_ != source_height;
+
+ // Step 1: Convert to I420.
+ I420Planes i420_fullscale_buffer;
+ if (source_pixel_format == kPixelFormatI420) {
+ // We are already at I420.
+ i420_fullscale_buffer = GetI420PlanesFromPixelBuffer(source_pixel_buffer);
+ // Fast path should have been taken if no resize needed and the buffer is on
+ // an IOSurface already.
+ DCHECK(rescale_needed || !CVPixelBufferGetIOSurface(source_pixel_buffer));
+ if (!rescale_needed) {
+ I420Planes i420_destination_buffer =
+ GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
+ CopyI420(i420_fullscale_buffer, i420_destination_buffer);
+ return;
+ }
+ } else {
+ // Convert X -> I420.
+ if (!rescale_needed) {
+ i420_fullscale_buffer =
+ GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
+ } else {
+ i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
+ source_width, source_height, &intermediate_i420_buffer_);
+ }
+ if (!ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer)) {
+ // Only MJPEG conversions are known to be able to fail. Because X is an
+ // uncompressed pixel format, this conversion should never fail.
+ NOTREACHED();
+ }
+ }
+
+ // Step 2: Rescale I420.
+ if (rescale_needed) {
+ I420Planes i420_destination_buffer =
+ GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
+ ScaleI420(i420_fullscale_buffer, i420_destination_buffer);
+ }
+}
+
+void SampleBufferTransformer::TransformPixelBufferWithLibyuvFromAnyToNV12(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer) {
+ // Get source pixel format and bytes.
+ size_t source_width = CVPixelBufferGetWidth(source_pixel_buffer);
+ size_t source_height = CVPixelBufferGetHeight(source_pixel_buffer);
+ OSType source_pixel_format =
+ CVPixelBufferGetPixelFormatType(source_pixel_buffer);
+
+ // Rescaling has to be done in a separate step.
+ const bool rescale_needed = destination_width_ != source_width ||
+ destination_height_ != source_height;
+
+ // Step 1: Convert to NV12.
+ NV12Planes nv12_fullscale_buffer;
+ if (source_pixel_format == kPixelFormatNv12) {
+ // We are already at NV12.
+ nv12_fullscale_buffer = GetNV12PlanesFromPixelBuffer(source_pixel_buffer);
+ // Fast path should have been taken if no resize needed and the buffer is on
+ // an IOSurface already.
+ DCHECK(rescale_needed || !CVPixelBufferGetIOSurface(source_pixel_buffer));
+ if (!rescale_needed) {
+ NV12Planes nv12_destination_buffer =
+ GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
+ CopyNV12(nv12_fullscale_buffer, nv12_destination_buffer);
+ return;
+ }
+ } else {
+ // Convert X -> I420 -> NV12. (We don't know how to do X -> NV12.)
+ // TODO(https://crbug.com/1154273): Convert to NV12 directly.
+ I420Planes i420_fullscale_buffer;
+ if (source_pixel_format == kPixelFormatI420) {
+ // We are already at I420.
+ i420_fullscale_buffer = GetI420PlanesFromPixelBuffer(source_pixel_buffer);
+ } else {
+ // Convert X -> I420.
+ i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
+ source_width, source_height, &intermediate_i420_buffer_);
+ if (!ConvertFromAnyToI420(source_pixel_buffer, i420_fullscale_buffer)) {
+ NOTREACHED();
+ }
+ }
+ // Convert I420 -> NV12.
+ if (!rescale_needed) {
+ nv12_fullscale_buffer =
+ GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
+ } else {
+ nv12_fullscale_buffer = EnsureNV12BufferSizeAndGetPlanes(
+ source_width, source_height, &intermediate_nv12_buffer_);
+ }
+ ConvertFromI420ToNV12(i420_fullscale_buffer, nv12_fullscale_buffer);
+ }
+
+ // Step 2: Rescale NV12.
+ if (rescale_needed) {
+ NV12Planes nv12_destination_buffer =
+ GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
+ ScaleNV12(nv12_fullscale_buffer, nv12_destination_buffer);
+ }
+}
+
+bool SampleBufferTransformer::TransformSampleBuffer(
+ CMSampleBufferRef source_sample_buffer,
+ CVPixelBufferRef destination_pixel_buffer) {
+ DCHECK(transformer_ == Transformer::kLibyuv);
+ // Ensure source pixel format is MJPEG and get width and height.
+ CMFormatDescriptionRef source_format_description =
+ CMSampleBufferGetFormatDescription(source_sample_buffer);
+ FourCharCode source_pixel_format =
+ CMFormatDescriptionGetMediaSubType(source_format_description);
+ DCHECK(source_pixel_format == kPixelFormatMjpeg);
+ CMVideoDimensions source_dimensions =
+ CMVideoFormatDescriptionGetDimensions(source_format_description);
+
+ // Access source pixel buffer bytes.
+ uint8_t* source_buffer_data_base_address;
+ size_t source_buffer_data_size;
+ std::tie(source_buffer_data_base_address, source_buffer_data_size) =
+ GetSampleBufferBaseAddressAndSize(source_sample_buffer);
+
+ // Lock destination pixel buffer.
+ CVReturn lock_status =
+ CVPixelBufferLockBaseAddress(destination_pixel_buffer, 0);
+ DCHECK_EQ(lock_status, kCVReturnSuccess);
+ // Convert to I420 or NV12.
+ bool success = false;
+ switch (destination_pixel_format_) {
+ case kPixelFormatI420:
+ success = TransformSampleBufferFromMjpegToI420(
+ source_buffer_data_base_address, source_buffer_data_size,
+ source_dimensions.width, source_dimensions.height,
+ destination_pixel_buffer);
+ break;
+ case kPixelFormatNv12:
+ success = TransformSampleBufferFromMjpegToNV12(
+ source_buffer_data_base_address, source_buffer_data_size,
+ source_dimensions.width, source_dimensions.height,
+ destination_pixel_buffer);
+ break;
+ default:
+ NOTREACHED();
+ }
+ // Unlock destination pixel buffer.
+ lock_status = CVPixelBufferUnlockBaseAddress(destination_pixel_buffer, 0);
+ DCHECK_EQ(lock_status, kCVReturnSuccess);
+ return success;
+}
+
+bool SampleBufferTransformer::TransformSampleBufferFromMjpegToI420(
+ uint8_t* source_buffer_data_base_address,
+ size_t source_buffer_data_size,
+ size_t source_width,
+ size_t source_height,
+ CVPixelBufferRef destination_pixel_buffer) {
+ DCHECK(destination_pixel_format_ == kPixelFormatI420);
+ // Rescaling has to be done in a separate step.
+ const bool rescale_needed = destination_width_ != source_width ||
+ destination_height_ != source_height;
+
+ // Step 1: Convert MJPEG -> I420.
+ I420Planes i420_fullscale_buffer;
+ if (!rescale_needed) {
+ i420_fullscale_buffer =
+ GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
+ } else {
+ i420_fullscale_buffer = EnsureI420BufferSizeAndGetPlanes(
+ source_width, source_height, &intermediate_i420_buffer_);
+ }
+ if (!ConvertFromMjpegToI420(source_buffer_data_base_address,
+ source_buffer_data_size, i420_fullscale_buffer)) {
+ return false;
+ }
+
+ // Step 2: Rescale I420.
+ if (rescale_needed) {
+ I420Planes i420_destination_buffer =
+ GetI420PlanesFromPixelBuffer(destination_pixel_buffer);
+ ScaleI420(i420_fullscale_buffer, i420_destination_buffer);
+ }
+ return true;
+}
+
+bool SampleBufferTransformer::TransformSampleBufferFromMjpegToNV12(
+ uint8_t* source_buffer_data_base_address,
+ size_t source_buffer_data_size,
+ size_t source_width,
+ size_t source_height,
+ CVPixelBufferRef destination_pixel_buffer) {
+ DCHECK(destination_pixel_format_ == kPixelFormatNv12);
+ // Rescaling has to be done in a separate step.
+ const bool rescale_needed = destination_width_ != source_width ||
+ destination_height_ != source_height;
+
+ // Step 1: Convert MJPEG -> NV12.
+ NV12Planes nv12_fullscale_buffer;
+ if (!rescale_needed) {
+ nv12_fullscale_buffer =
+ GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
+ } else {
+ nv12_fullscale_buffer = EnsureNV12BufferSizeAndGetPlanes(
+ source_width, source_height, &intermediate_nv12_buffer_);
+ }
+ if (!ConvertFromMjpegToNV12(source_buffer_data_base_address,
+ source_buffer_data_size, nv12_fullscale_buffer)) {
+ return false;
+ }
+
+ // Step 2: Rescale NV12.
+ if (rescale_needed) {
+ NV12Planes nv12_destination_buffer =
+ GetNV12PlanesFromPixelBuffer(destination_pixel_buffer);
+ ScaleNV12(nv12_fullscale_buffer, nv12_destination_buffer);
+ }
+ return true;
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
new file mode 100644
index 00000000000..4f9dc45bbe3
--- /dev/null
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac.h
@@ -0,0 +1,147 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_CAPTURE_VIDEO_MAC_SAMPLE_BUFFER_TRANSFORMER_MAC_H_
+#define MEDIA_CAPTURE_VIDEO_MAC_SAMPLE_BUFFER_TRANSFORMER_MAC_H_
+
+#import <CoreMedia/CoreMedia.h>
+#import <CoreVideo/CoreVideo.h>
+#include <vector>
+
+#include "base/feature_list.h"
+#include "base/mac/scoped_cftyperef.h"
+#include "media/capture/capture_export.h"
+#include "media/capture/video/mac/pixel_buffer_pool_mac.h"
+#include "media/capture/video/mac/pixel_buffer_transferer_mac.h"
+
+namespace media {
+
+// When enabled, AutoReconfigureAndTransform() configures the
+// SampleBufferTransformer to use the conversion path (pixel transfer or libyuv)
+// that is believed to be most efficient for the input sample buffer.
+CAPTURE_EXPORT extern const base::Feature kInCaptureConvertToNv12;
+
+// Feature flag used for performance measurements. This will not be shipped.
+//
+// When enabled, AutoReconfigureAndTransform() configures the
+// SampleBufferTransformer to use the pixel transfer path. Transforming an MJPEG
+// sample buffer with this configuration will DCHECK crash.
+CAPTURE_EXPORT extern const base::Feature
+ kInCaptureConvertToNv12WithPixelTransfer;
+
+// Feature flag used for performance measurements. This will not be shipped.
+//
+// When enabled, AutoReconfigureAndTransform() configures the
+// SampleBufferTransformer to use the libyuv path.
+CAPTURE_EXPORT extern const base::Feature kInCaptureConvertToNv12WithLibyuv;
+
+// Capable of converting from any supported capture format (NV12, YUY2, UYVY and
+// MJPEG) to NV12 or I420 and doing rescaling. This class can be configured to
+// use VTPixelTransferSession (sometimes HW-accelerated) or third_party/libyuv
+// (SW-only). The output is always an IOSurface-backed pixel buffer that comes
+// from an internal pixel buffer pool.
+class CAPTURE_EXPORT SampleBufferTransformer {
+ public:
+ enum class Transformer {
+ kNotConfigured,
+ // Supports (Any except MJPEG) -> (NV12, I420, ...)
+ kPixelBufferTransfer,
+ // Supports (Any) -> (NV12 or I420)
+ kLibyuv,
+ };
+
+ // Only construct a sample transformer if one of the "InCaptureConvertToNv12"
+ // flags are enabled and AutoReconfigureAndTransform() is supported. See
+ // IsAutoReconfigureEnabled().
+ static std::unique_ptr<SampleBufferTransformer>
+ CreateIfAutoReconfigureEnabled();
+ static std::unique_ptr<SampleBufferTransformer> Create();
+
+ ~SampleBufferTransformer();
+
+ Transformer transformer() const;
+ OSType destination_pixel_format() const;
+ size_t destination_width() const;
+ size_t destination_height() const;
+
+ // Automatically reconfigures based on |sample_buffer| and base::Feature flags
+ // if needed before performing a Transform().
+ base::ScopedCFTypeRef<CVPixelBufferRef> AutoReconfigureAndTransform(
+ CMSampleBufferRef sample_buffer);
+
+ // Future calls to Transform() will output pixel buffers according to this
+ // configuration.
+ void Reconfigure(Transformer transformer,
+ OSType destination_pixel_format,
+ size_t destination_width,
+ size_t destination_height,
+ base::Optional<size_t> buffer_pool_size);
+
+ // Converts the sample buffer to an IOSurface-backed pixel buffer according to
+ // current configurations. If no transformation is needed (input format is the
+ // same as the configured output format), the sample buffer's pixel buffer is
+ // returned.
+ base::ScopedCFTypeRef<CVPixelBufferRef> Transform(
+ CMSampleBufferRef sample_buffer);
+
+ private:
+ friend std::unique_ptr<SampleBufferTransformer>
+ std::make_unique<SampleBufferTransformer>();
+
+ static bool IsAutoReconfigureEnabled();
+
+ SampleBufferTransformer();
+
+ void AutoReconfigureBasedOnInputAndFeatureFlags(
+ CMSampleBufferRef sample_buffer);
+
+ // Sample buffers from the camera contain pixel buffers when an uncompressed
+ // pixel format is used (i.e. it's not MJPEG).
+ void TransformPixelBuffer(CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer);
+ // (Any uncompressed -> Any uncompressed)
+ void TransformPixelBufferWithPixelTransfer(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer);
+ // (Any uncompressed -> NV12 or I420)
+ void TransformPixelBufferWithLibyuv(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer);
+ void TransformPixelBufferWithLibyuvFromAnyToI420(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer);
+ void TransformPixelBufferWithLibyuvFromAnyToNV12(
+ CVPixelBufferRef source_pixel_buffer,
+ CVPixelBufferRef destination_pixel_buffer);
+ // Sample buffers from the camera contain byte buffers when MJPEG is used.
+ bool TransformSampleBuffer(CMSampleBufferRef source_sample_buffer,
+ CVPixelBufferRef destination_pixel_buffer);
+ bool TransformSampleBufferFromMjpegToI420(
+ uint8_t* source_buffer_data_base_address,
+ size_t source_buffer_data_size,
+ size_t source_width,
+ size_t source_height,
+ CVPixelBufferRef destination_pixel_buffer);
+ bool TransformSampleBufferFromMjpegToNV12(
+ uint8_t* source_buffer_data_base_address,
+ size_t source_buffer_data_size,
+ size_t source_width,
+ size_t source_height,
+ CVPixelBufferRef destination_pixel_buffer);
+
+ Transformer transformer_;
+ OSType destination_pixel_format_;
+ size_t destination_width_;
+ size_t destination_height_;
+ std::unique_ptr<PixelBufferPool> destination_pixel_buffer_pool_;
+ // For kPixelBufferTransfer.
+ std::unique_ptr<PixelBufferTransferer> pixel_buffer_transferer_;
+ // For kLibyuv in cases where an intermediate buffer is needed.
+ std::vector<uint8_t> intermediate_i420_buffer_;
+ std::vector<uint8_t> intermediate_nv12_buffer_;
+};
+
+} // namespace media
+
+#endif // MEDIA_CAPTURE_VIDEO_MAC_SAMPLE_BUFFER_TRANSFORMER_MAC_H_
diff --git a/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm b/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
new file mode 100644
index 00000000000..dde2a940139
--- /dev/null
+++ b/chromium/media/capture/video/mac/sample_buffer_transformer_mac_unittest.mm
@@ -0,0 +1,548 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/mac/sample_buffer_transformer_mac.h"
+
+#include <tuple>
+
+#include "base/logging.h"
+#include "base/test/scoped_feature_list.h"
+#include "media/capture/video/mac/test/pixel_buffer_test_utils_mac.h"
+#include "media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+
+namespace media {
+
+namespace {
+
+// Example single colored .jpg file (created with MSPaint). It is of RGB color
+// (255, 127, 63).
+const uint8_t kExampleJpegData[] = {
+ 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
+ 0x01, 0x01, 0x00, 0x60, 0x00, 0x60, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
+ 0x00, 0x02, 0x01, 0x01, 0x02, 0x01, 0x01, 0x02, 0x02, 0x02, 0x02, 0x02,
+ 0x02, 0x02, 0x02, 0x03, 0x05, 0x03, 0x03, 0x03, 0x03, 0x03, 0x06, 0x04,
+ 0x04, 0x03, 0x05, 0x07, 0x06, 0x07, 0x07, 0x07, 0x06, 0x07, 0x07, 0x08,
+ 0x09, 0x0b, 0x09, 0x08, 0x08, 0x0a, 0x08, 0x07, 0x07, 0x0a, 0x0d, 0x0a,
+ 0x0a, 0x0b, 0x0c, 0x0c, 0x0c, 0x0c, 0x07, 0x09, 0x0e, 0x0f, 0x0d, 0x0c,
+ 0x0e, 0x0b, 0x0c, 0x0c, 0x0c, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x02, 0x02,
+ 0x02, 0x03, 0x03, 0x03, 0x06, 0x03, 0x03, 0x06, 0x0c, 0x08, 0x07, 0x08,
+ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
+ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
+ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
+ 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c, 0x0c,
+ 0x0c, 0x0c, 0xff, 0xc0, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
+ 0x01, 0x22, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
+ 0x1f, 0x00, 0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05,
+ 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0xff, 0xc4, 0x00, 0xb5, 0x10, 0x00,
+ 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00,
+ 0x00, 0x01, 0x7d, 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21,
+ 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, 0x22, 0x71, 0x14, 0x32, 0x81,
+ 0x91, 0xa1, 0x08, 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0, 0x24,
+ 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x25,
+ 0x26, 0x27, 0x28, 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a,
+ 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56,
+ 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a,
+ 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x83, 0x84, 0x85, 0x86,
+ 0x87, 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99,
+ 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3,
+ 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6,
+ 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9,
+ 0xda, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xf1,
+ 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xff, 0xc4, 0x00,
+ 0x1f, 0x01, 0x00, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05,
+ 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0xff, 0xc4, 0x00, 0xb5, 0x11, 0x00,
+ 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, 0x04, 0x04, 0x00,
+ 0x01, 0x02, 0x77, 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, 0x31,
+ 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, 0x13, 0x22, 0x32, 0x81, 0x08,
+ 0x14, 0x42, 0x91, 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0, 0x15,
+ 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34, 0xe1, 0x25, 0xf1, 0x17, 0x18,
+ 0x19, 0x1a, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38, 0x39,
+ 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x53, 0x54, 0x55,
+ 0x56, 0x57, 0x58, 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
+ 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x82, 0x83, 0x84,
+ 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97,
+ 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa,
+ 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4,
+ 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7,
+ 0xd8, 0xd9, 0xda, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
+ 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xff, 0xda, 0x00,
+ 0x0c, 0x03, 0x01, 0x00, 0x02, 0x11, 0x03, 0x11, 0x00, 0x3f, 0x00, 0xf7,
+ 0x8a, 0x28, 0xa2, 0xbf, 0x89, 0xcf, 0xf4, 0x50, 0x28, 0xa2, 0x8a, 0x00,
+ 0xff, 0xd9};
+constexpr size_t kExampleJpegDataSize = 638;
+constexpr uint32_t kExampleJpegWidth = 32;
+constexpr uint32_t kExampleJpegHeight = 16;
+constexpr uint32_t kExampleJpegScaledDownWidth = 16;
+constexpr uint32_t kExampleJpegScaledDownHeight = 8;
+
+const uint8_t kInvalidJpegData[] = {
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
+constexpr size_t kInvalidJpegDataSize = 24;
+
+constexpr uint8_t kColorR = 255u;
+constexpr uint8_t kColorG = 127u;
+constexpr uint8_t kColorB = 63u;
+
+constexpr unsigned int kFullResolutionWidth = 128;
+constexpr unsigned int kFullResolutionHeight = 96;
+constexpr unsigned int kScaledDownResolutionWidth = 64;
+constexpr unsigned int kScaledDownResolutionHeight = 48;
+
+// NV12 a.k.a. 420v
+constexpr OSType kPixelFormatNv12 =
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+// UYVY a.k.a. 2vuy
+constexpr OSType kPixelFormatUyvy = kCVPixelFormatType_422YpCbCr8;
+// YUY2 a.k.a. yuvs
+constexpr OSType kPixelFormatYuy2 = kCVPixelFormatType_422YpCbCr8_yuvs;
+// I420 a.k.a. y420
+constexpr OSType kPixelFormatI420 = kCVPixelFormatType_420YpCbCr8Planar;
+
+auto SupportedCaptureFormats() {
+ return ::testing::Values(kPixelFormatNv12, kPixelFormatUyvy,
+ kPixelFormatYuy2);
+}
+
+auto SupportedOutputFormats() {
+ return ::testing::Values(kPixelFormatNv12, kPixelFormatI420);
+}
+
+// Gives parameterized tests a readable suffix.
+// E.g. ".../yuvsTo420v" instead of ".../4"
+std::string TestParametersOSTypeTupleToString(
+ testing::TestParamInfo<std::tuple<OSType, OSType>> info) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = info.param;
+ return MacFourCCToString(input_pixel_format) + std::string("To") +
+ MacFourCCToString(output_pixel_format);
+}
+std::string TestParametersOSTypeToString(testing::TestParamInfo<OSType> info) {
+ return MacFourCCToString(info.param);
+}
+
+base::ScopedCFTypeRef<CVPixelBufferRef> CreatePixelBuffer(OSType pixel_format,
+ int width,
+ int height,
+ uint8_t r,
+ uint8_t g,
+ uint8_t b) {
+ // Create a YUVS buffer in main memory.
+ std::unique_ptr<ByteArrayPixelBuffer> yuvs_buffer =
+ CreateYuvsPixelBufferFromSingleRgbColor(width, height, r, g, b);
+ // Convert and/or transfer to a pixel buffer that has an IOSurface.
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer =
+ PixelBufferPool::Create(pixel_format, width, height, 1)->CreateBuffer();
+ PixelBufferTransferer transferer;
+ bool success =
+ transferer.TransferImage(yuvs_buffer->pixel_buffer, pixel_buffer);
+ DCHECK(success);
+ return pixel_buffer;
+}
+
+base::ScopedCFTypeRef<CMSampleBufferRef> CreateSampleBuffer(
+ OSType pixel_format,
+ int width,
+ int height,
+ uint8_t r,
+ uint8_t g,
+ uint8_t b,
+ bool iosurface_backed = true) {
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixel_buffer;
+ if (iosurface_backed) {
+ pixel_buffer = CreatePixelBuffer(pixel_format, width, height, r, g, b);
+ } else {
+ CVPixelBufferCreate(nullptr, width, height, pixel_format, nullptr,
+ pixel_buffer.InitializeInto());
+ }
+
+ // Wrap the pixel buffer in a sample buffer.
+ CMFormatDescriptionRef format_description;
+ OSStatus status = CMVideoFormatDescriptionCreateForImageBuffer(
+ nil, pixel_buffer, &format_description);
+ DCHECK(status == noErr);
+
+ // Dummy information to make CMSampleBufferCreateForImageBuffer() happy.
+ CMSampleTimingInfo timing_info;
+ timing_info.decodeTimeStamp = kCMTimeInvalid;
+ timing_info.presentationTimeStamp = CMTimeMake(0, CMTimeScale(NSEC_PER_SEC));
+ timing_info.duration =
+ CMTimeMake(33 * NSEC_PER_MSEC, CMTimeScale(NSEC_PER_SEC)); // 30 fps
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample_buffer;
+ status = CMSampleBufferCreateForImageBuffer(
+ nil, pixel_buffer, YES, nil, nullptr, format_description, &timing_info,
+ sample_buffer.InitializeInto());
+ DCHECK(status == noErr);
+ return sample_buffer;
+}
+
+base::ScopedCFTypeRef<CMSampleBufferRef> CreateMjpegSampleBuffer(
+ const uint8_t* mjpeg_data,
+ size_t mjpeg_data_size,
+ size_t width,
+ size_t height) {
+ CMBlockBufferRef data_buffer;
+ OSStatus status = CMBlockBufferCreateWithMemoryBlock(
+ nil, const_cast<void*>(static_cast<const void*>(mjpeg_data)),
+ mjpeg_data_size, nil, nil, 0, mjpeg_data_size, 0, &data_buffer);
+ DCHECK(status == noErr);
+
+ CMFormatDescriptionRef format_description;
+ status =
+ CMVideoFormatDescriptionCreate(nil, kCMVideoCodecType_JPEG_OpenDML, width,
+ height, nil, &format_description);
+ DCHECK(status == noErr);
+
+ // Dummy information to make CMSampleBufferCreateReady() happy.
+ CMSampleTimingInfo timing_info;
+ timing_info.decodeTimeStamp = kCMTimeInvalid;
+ timing_info.presentationTimeStamp = CMTimeMake(0, CMTimeScale(NSEC_PER_SEC));
+ timing_info.duration =
+ CMTimeMake(33 * NSEC_PER_MSEC, CMTimeScale(NSEC_PER_SEC)); // 30 fps
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample_buffer;
+ status = CMSampleBufferCreateReady(nil, data_buffer, format_description, 1, 1,
+ &timing_info, 1, &kExampleJpegDataSize,
+ sample_buffer.InitializeInto());
+ DCHECK(status == noErr);
+ return sample_buffer;
+}
+
+base::ScopedCFTypeRef<CMSampleBufferRef> CreateExampleMjpegSampleBuffer() {
+ // Sanity-check the example data.
+ int width;
+ int height;
+ int result =
+ libyuv::MJPGSize(kExampleJpegData, kExampleJpegDataSize, &width, &height);
+ DCHECK(result == 0);
+ DCHECK_EQ(width, static_cast<int>(kExampleJpegWidth));
+ DCHECK_EQ(height, static_cast<int>(kExampleJpegHeight));
+ return CreateMjpegSampleBuffer(kExampleJpegData, kExampleJpegDataSize,
+ kExampleJpegWidth, kExampleJpegHeight);
+}
+
+base::ScopedCFTypeRef<CMSampleBufferRef> CreateInvalidMjpegSampleBuffer() {
+ return CreateMjpegSampleBuffer(kInvalidJpegData, kInvalidJpegDataSize,
+ kExampleJpegWidth, kExampleJpegHeight);
+}
+
+} // namespace
+
+class SampleBufferTransformerPixelTransferTest
+ : public ::testing::Test,
+ public ::testing::WithParamInterface<std::tuple<OSType, OSType>> {};
+
+TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertFullScale) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format, kFullResolutionWidth, kFullResolutionHeight, 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerPixelTransferTest, CanConvertAndScaleDown) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(
+ SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ output_pixel_format, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
+ SampleBufferTransformerPixelTransferTest,
+ ::testing::Combine(SupportedCaptureFormats(),
+ SupportedOutputFormats()),
+ TestParametersOSTypeTupleToString);
+
+class SampleBufferTransformerLibyuvTest
+ : public ::testing::Test,
+ public ::testing::WithParamInterface<std::tuple<OSType, OSType>> {};
+
+TEST_P(SampleBufferTransformerLibyuvTest, CanConvertFullScale) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
+ output_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerLibyuvTest, CanConvertAndScaleDown) {
+ OSType input_pixel_format;
+ OSType output_pixel_format;
+ std::tie(input_pixel_format, output_pixel_format) = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateSampleBuffer(input_pixel_format, kFullResolutionWidth,
+ kFullResolutionHeight, kColorR, kColorG, kColorB);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
+ output_pixel_format, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_EQ(kScaledDownResolutionWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
+ SampleBufferTransformerLibyuvTest,
+ ::testing::Combine(SupportedCaptureFormats(),
+ SupportedOutputFormats()),
+ TestParametersOSTypeTupleToString);
+
+class SampleBufferTransformerMjpegTest
+ : public ::testing::Test,
+ public ::testing::WithParamInterface<OSType> {};
+
+TEST_P(SampleBufferTransformerMjpegTest, CanConvertFullScale) {
+ OSType output_pixel_format = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateExampleMjpegSampleBuffer();
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
+ output_pixel_format, kExampleJpegWidth,
+ kExampleJpegHeight, 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_EQ(kExampleJpegWidth, CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kExampleJpegHeight, CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerMjpegTest, CanConvertAndScaleDown) {
+ OSType output_pixel_format = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateExampleMjpegSampleBuffer();
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
+ output_pixel_format, kExampleJpegScaledDownWidth,
+ kExampleJpegScaledDownHeight, 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+
+ EXPECT_EQ(kExampleJpegScaledDownWidth,
+ CVPixelBufferGetWidth(output_pixel_buffer));
+ EXPECT_EQ(kExampleJpegScaledDownHeight,
+ CVPixelBufferGetHeight(output_pixel_buffer));
+ EXPECT_TRUE(
+ PixelBufferIsSingleColor(output_pixel_buffer, kColorR, kColorG, kColorB));
+}
+
+TEST_P(SampleBufferTransformerMjpegTest,
+ AttemptingToTransformInvalidMjpegFailsGracefully) {
+ OSType output_pixel_format = GetParam();
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> input_sample_buffer =
+ CreateInvalidMjpegSampleBuffer();
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::Create();
+ transformer->Reconfigure(SampleBufferTransformer::Transformer::kLibyuv,
+ output_pixel_format, kExampleJpegWidth,
+ kExampleJpegHeight, 1);
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_pixel_buffer =
+ transformer->Transform(input_sample_buffer);
+ EXPECT_FALSE(output_pixel_buffer);
+}
+
+INSTANTIATE_TEST_SUITE_P(SampleBufferTransformerTest,
+ SampleBufferTransformerMjpegTest,
+ SupportedOutputFormats(),
+ TestParametersOSTypeToString);
+
+TEST(SampleBufferTransformerAutoReconfigureTest,
+ AutoReconfigureIsEnabledByDefault) {
+ EXPECT_TRUE(SampleBufferTransformer::CreateIfAutoReconfigureEnabled());
+}
+
+TEST(SampleBufferTransformerAutoReconfigureTest,
+ SourceAndDestinationResolutionMatches) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
+ ASSERT_TRUE(transformer);
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample0 = CreateSampleBuffer(
+ kPixelFormatNv12, kFullResolutionWidth, kFullResolutionHeight, kColorR,
+ kColorG, kColorB, /*iosurface_backed=*/false);
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
+ transformer->AutoReconfigureAndTransform(sample0);
+
+ EXPECT_EQ(kFullResolutionWidth, transformer->destination_width());
+ EXPECT_EQ(kFullResolutionHeight, transformer->destination_height());
+ EXPECT_EQ(kFullResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kFullResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ // Because sample0 has no underlying IOSurface, it should not be returned from
+ // the transformer.
+ EXPECT_NE(output_buffer.get(), CMSampleBufferGetImageBuffer(sample0.get()));
+
+ base::ScopedCFTypeRef<CMSampleBufferRef> sample1 = CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB);
+
+ output_buffer = transformer->AutoReconfigureAndTransform(sample1);
+
+ EXPECT_EQ(kScaledDownResolutionWidth, transformer->destination_width());
+ EXPECT_EQ(kScaledDownResolutionHeight, transformer->destination_height());
+ EXPECT_EQ(kScaledDownResolutionWidth, CVPixelBufferGetWidth(output_buffer));
+ EXPECT_EQ(kScaledDownResolutionHeight, CVPixelBufferGetHeight(output_buffer));
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+ // Because sample1 does have an IOSurface, it can be returned directly.
+ EXPECT_EQ(output_buffer.get(), CMSampleBufferGetImageBuffer(sample1.get()));
+}
+
+TEST(SampleBufferTransformerAutoReconfigureTest,
+ DestinationPixelFormatIsAlwaysNv12) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
+ ASSERT_TRUE(transformer);
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
+ transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG, kColorB));
+ EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
+ EXPECT_EQ(kPixelFormatNv12,
+ IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatUyvy, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB));
+ EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
+ EXPECT_EQ(kPixelFormatNv12,
+ IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatYuy2, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB));
+ EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
+ EXPECT_EQ(kPixelFormatNv12,
+ IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatI420, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB));
+ EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
+ EXPECT_EQ(kPixelFormatNv12,
+ IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(
+ CreateExampleMjpegSampleBuffer());
+ EXPECT_EQ(kPixelFormatNv12, transformer->destination_pixel_format());
+ EXPECT_EQ(kPixelFormatNv12,
+ IOSurfaceGetPixelFormat(CVPixelBufferGetIOSurface(output_buffer)));
+}
+
+TEST(SampleBufferTransformerAutoReconfigureTest, UsesBestTransformerPaths) {
+ base::test::ScopedFeatureList scoped_feature_list;
+ scoped_feature_list.InitAndEnableFeature(kInCaptureConvertToNv12);
+ std::unique_ptr<SampleBufferTransformer> transformer =
+ SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
+ ASSERT_TRUE(transformer);
+
+ base::ScopedCFTypeRef<CVPixelBufferRef> output_buffer =
+ transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatNv12, kScaledDownResolutionWidth,
+ kScaledDownResolutionHeight, kColorR, kColorG, kColorB));
+ EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ transformer->transformer());
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatUyvy, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB));
+ EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ transformer->transformer());
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatYuy2, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB));
+ EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ transformer->transformer());
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(CreateSampleBuffer(
+ kPixelFormatI420, kScaledDownResolutionWidth, kScaledDownResolutionHeight,
+ kColorR, kColorG, kColorB));
+ EXPECT_EQ(SampleBufferTransformer::Transformer::kPixelBufferTransfer,
+ transformer->transformer());
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+
+ output_buffer = transformer->AutoReconfigureAndTransform(
+ CreateExampleMjpegSampleBuffer());
+ EXPECT_EQ(SampleBufferTransformer::Transformer::kLibyuv,
+ transformer->transformer());
+ EXPECT_TRUE(CVPixelBufferGetIOSurface(output_buffer));
+}
+
+} // namespace media
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
index a4e431556a3..120f8c656ed 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.h
@@ -8,9 +8,11 @@
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
-#import "base/mac/scoped_nsobject.h"
+#include "base/mac/scoped_dispatch_object.h"
+#include "base/mac/scoped_nsobject.h"
#include "base/synchronization/lock.h"
#include "base/threading/thread_checker.h"
+#include "media/capture/video/mac/sample_buffer_transformer_mac.h"
#import "media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video_capture_types.h"
@@ -42,6 +44,10 @@ CAPTURE_EXPORT
// The capture format that best matches the above attributes.
base::scoped_nsobject<AVCaptureDeviceFormat> _bestCaptureFormat;
+ // A serial queue to deliver frames on, ensuring frames are delivered in
+ // order.
+ base::ScopedDispatchObject<dispatch_queue_t> _sampleQueue;
+
// Protects concurrent setting and using |frameReceiver_|. Note that the
// GUARDED_BY decoration below does not have any effect.
base::Lock _lock;
@@ -56,6 +62,9 @@ CAPTURE_EXPORT
base::scoped_nsobject<AVCaptureDeviceInput> _captureDeviceInput;
base::scoped_nsobject<AVCaptureVideoDataOutput> _captureVideoDataOutput;
+ // When enabled, converts captured frames to NV12.
+ std::unique_ptr<media::SampleBufferTransformer> _sampleBufferTransformer;
+
// An AVDataOutput specialized for taking pictures out of |captureSession_|.
base::scoped_nsobject<AVCaptureStillImageOutput> _stillImageOutput;
size_t _takePhotoStartedCount;
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
index 69a1a99166a..31d5516bf2e 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_mac.mm
@@ -17,6 +17,7 @@
#include "base/sequenced_task_runner.h"
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
+#include "media/base/mac/color_space_util_mac.h"
#include "media/base/media_switches.h"
#include "media/base/timestamp_constants.h"
#include "media/base/video_types.h"
@@ -33,6 +34,12 @@ namespace {
constexpr NSString* kModelIdLogitech4KPro =
@"UVC Camera VendorID_1133 ProductID_2175";
+constexpr gfx::ColorSpace kColorSpaceRec709Apple(
+ gfx::ColorSpace::PrimaryID::BT709,
+ gfx::ColorSpace::TransferID::BT709_APPLE,
+ gfx::ColorSpace::MatrixID::SMPTE170M,
+ gfx::ColorSpace::RangeID::LIMITED);
+
constexpr int kTimeToWaitBeforeStoppingStillImageCaptureInSeconds = 60;
constexpr FourCharCode kDefaultFourCCPixelFormat =
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; // NV12 (a.k.a. 420v)
@@ -47,25 +54,6 @@ base::TimeDelta GetCMSampleBufferTimestamp(CMSampleBufferRef sampleBuffer) {
return timestamp;
}
-std::string MacFourCCToString(OSType fourcc) {
- char arr[] = {fourcc >> 24, (fourcc >> 16) & 255, (fourcc >> 8) & 255,
- fourcc & 255, 0};
- return arr;
-}
-
-class CMSampleBufferScopedAccessPermission
- : public media::VideoCaptureDevice::Client::Buffer::ScopedAccessPermission {
- public:
- CMSampleBufferScopedAccessPermission(CMSampleBufferRef buffer)
- : buffer_(buffer, base::scoped_policy::RETAIN) {
- buffer_.reset();
- }
- ~CMSampleBufferScopedAccessPermission() override {}
-
- private:
- base::ScopedCFTypeRef<CMSampleBufferRef> buffer_;
-};
-
} // anonymous namespace
namespace media {
@@ -169,12 +157,22 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
(media::VideoCaptureDeviceAVFoundationFrameReceiver*)frameReceiver {
if ((self = [super init])) {
_mainThreadTaskRunner = base::ThreadTaskRunnerHandle::Get();
+ _sampleQueue.reset(
+ dispatch_queue_create("org.chromium.VideoCaptureDeviceAVFoundation."
+ "SampleDeliveryDispatchQueue",
+ DISPATCH_QUEUE_SERIAL),
+ base::scoped_policy::ASSUME);
DCHECK(frameReceiver);
_weakPtrFactoryForTakePhoto =
std::make_unique<base::WeakPtrFactory<VideoCaptureDeviceAVFoundation>>(
self);
[self setFrameReceiver:frameReceiver];
_captureSession.reset([[AVCaptureSession alloc] init]);
+ _sampleBufferTransformer =
+ media::SampleBufferTransformer::CreateIfAutoReconfigureEnabled();
+ if (_sampleBufferTransformer) {
+ VLOG(1) << "Capturing with SampleBufferTransformer enabled";
+ }
}
return self;
}
@@ -182,8 +180,10 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
- (void)dealloc {
[self stopStillImageOutput];
[self stopCapture];
+ _sampleBufferTransformer.reset();
_weakPtrFactoryForTakePhoto = nullptr;
_mainThreadTaskRunner = nullptr;
+ _sampleQueue.reset();
[super dealloc];
}
@@ -249,10 +249,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
}
[_captureVideoDataOutput setAlwaysDiscardsLateVideoFrames:true];
- [_captureVideoDataOutput
- setSampleBufferDelegate:self
- queue:dispatch_get_global_queue(
- DISPATCH_QUEUE_PRIORITY_DEFAULT, 0)];
+ [_captureVideoDataOutput setSampleBufferDelegate:self queue:_sampleQueue];
[_captureSession addOutput:_captureVideoDataOutput];
return YES;
@@ -289,8 +286,9 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
}
}
- VLOG(2) << __func__ << ": configuring '" << MacFourCCToString(best_fourcc)
- << "' " << width << "x" << height << "@" << frameRate;
+ VLOG(2) << __func__ << ": configuring '"
+ << media::MacFourCCToString(best_fourcc) << "' " << width << "x"
+ << height << "@" << frameRate;
// The capture output has to be configured, despite Mac documentation
// detailing that setting the sessionPreset would be enough. The reason for
@@ -651,8 +649,7 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
return YES;
}
-- (BOOL)processNV12IOSurface:(IOSurfaceRef)ioSurface
- sampleBuffer:(CMSampleBufferRef)sampleBuffer
+- (void)processNV12IOSurface:(IOSurfaceRef)ioSurface
captureFormat:(const media::VideoCaptureFormat&)captureFormat
colorSpace:(const gfx::ColorSpace&)colorSpace
timestamp:(const base::TimeDelta)timestamp {
@@ -661,15 +658,24 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
gfx::GpuMemoryBufferHandle handle;
handle.id.id = -1;
handle.type = gfx::GpuMemoryBufferType::IO_SURFACE_BUFFER;
- handle.mach_port.reset(IOSurfaceCreateMachPort(ioSurface));
- if (!handle.mach_port)
- return NO;
+ handle.io_surface.reset(ioSurface, base::scoped_policy::RETAIN);
+
+ // The BT709_APPLE color space is stored as an ICC profile, which is parsed
+ // every frame in the GPU process. For this particularly common case, go back
+ // to ignoring the color profile, because doing so avoids doing an ICC profile
+ // parse.
+ // https://crbug.com/1143477 (CPU usage parsing ICC profile)
+ // https://crbug.com/959962 (ignoring color space)
+ gfx::ColorSpace overriddenColorSpace = colorSpace;
+ if (colorSpace == kColorSpaceRec709Apple) {
+ overriddenColorSpace = gfx::ColorSpace::CreateSRGB();
+ IOSurfaceSetValue(ioSurface, CFSTR("IOSurfaceColorSpace"),
+ kCGColorSpaceSRGB);
+ }
+
_lock.AssertAcquired();
_frameReceiver->ReceiveExternalGpuMemoryBufferFrame(
- std::move(handle),
- std::make_unique<CMSampleBufferScopedAccessPermission>(sampleBuffer),
- captureFormat, colorSpace, timestamp);
- return YES;
+ std::move(handle), captureFormat, overriddenColorSpace, timestamp);
}
// |captureOutput| is called by the capture device to deliver a new frame.
@@ -686,6 +692,55 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
if (!_frameReceiver)
return;
+ const base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer);
+
+ // The SampleBufferTransformer CHECK-crashes if the sample buffer is not MJPEG
+ // and does not have a pixel buffer (https://crbug.com/1160647) so we fall
+ // back on the M87 code path if this is the case.
+ // TODO(https://crbug.com/1160315): When the SampleBufferTransformer is
+ // patched to support non-MJPEG-and-non-pixel-buffer sample buffers, remove
+ // this workaround.
+ bool sampleBufferLacksPixelBufferAndIsNotMjpeg =
+ !CMSampleBufferGetImageBuffer(sampleBuffer) &&
+ CMFormatDescriptionGetMediaSubType(CMSampleBufferGetFormatDescription(
+ sampleBuffer)) != kCMVideoCodecType_JPEG_OpenDML;
+
+ // If the SampleBufferTransformer is enabled, convert all possible capture
+ // formats to an IOSurface-backed NV12 pixel buffer.
+ // TODO(hbos): If |_sampleBufferTransformer| gets shipped 100%, delete the
+ // other code paths.
+ if (_sampleBufferTransformer && !sampleBufferLacksPixelBufferAndIsNotMjpeg) {
+ base::ScopedCFTypeRef<CVPixelBufferRef> pixelBuffer =
+ _sampleBufferTransformer->AutoReconfigureAndTransform(sampleBuffer);
+ if (!pixelBuffer) {
+ LOG(ERROR) << "Failed to transform captured frame. Dropping frame.";
+ return;
+ }
+ IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
+ CHECK(ioSurface);
+ CHECK_EQ(CVPixelBufferGetPixelFormatType(pixelBuffer),
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); // NV12
+ const media::VideoCaptureFormat captureFormat(
+ gfx::Size(CVPixelBufferGetWidth(pixelBuffer),
+ CVPixelBufferGetHeight(pixelBuffer)),
+ _frameRate, media::PIXEL_FORMAT_NV12);
+ // When the |pixelBuffer| is the result of a conversion (not camera
+ // pass-through) then it originates from a CVPixelBufferPool and the color
+ // space is not recognized by media::GetImageBufferColorSpace(). This
+ // results in log spam and a default color space format is returned. To
+ // avoid this, we pretend the color space is kColorSpaceRec709Apple which
+ // triggers a path that avoids color space parsing inside of
+ // processNV12IOSurface.
+ // TODO(hbos): Investigate how to successfully parse and/or configure the
+ // color space correctly. The implications of this hack is not fully
+ // understood.
+ [self processNV12IOSurface:ioSurface
+ captureFormat:captureFormat
+ colorSpace:kColorSpaceRec709Apple
+ timestamp:timestamp];
+ return;
+ }
+
// We have certain format expectation for capture output:
// For MJPEG, |sampleBuffer| is expected to always be a CVBlockBuffer.
// For other formats, |sampleBuffer| may be either CVBlockBuffer or
@@ -702,18 +757,14 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
media::VideoPixelFormat videoPixelFormat = [VideoCaptureDeviceAVFoundation
FourCCToChromiumPixelFormat:sampleBufferPixelFormat];
- // TODO(julien.isorce): move GetImageBufferColorSpace(CVImageBufferRef)
- // from media::VTVideoDecodeAccelerator to media/base/mac and call it
- // here to get the color space. See https://crbug.com/959962.
- // colorSpace = media::GetImageBufferColorSpace(videoFrame);
- gfx::ColorSpace colorSpace;
const media::VideoCaptureFormat captureFormat(
gfx::Size(dimensions.width, dimensions.height), _frameRate,
videoPixelFormat);
- const base::TimeDelta timestamp = GetCMSampleBufferTimestamp(sampleBuffer);
if (CVPixelBufferRef pixelBuffer =
CMSampleBufferGetImageBuffer(sampleBuffer)) {
+ const gfx::ColorSpace colorSpace =
+ media::GetImageBufferColorSpace(pixelBuffer);
OSType pixelBufferPixelFormat =
CVPixelBufferGetPixelFormatType(pixelBuffer);
DCHECK_EQ(pixelBufferPixelFormat, sampleBufferPixelFormat);
@@ -724,13 +775,11 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
if (kEnableGpuMemoryBuffers) {
IOSurfaceRef ioSurface = CVPixelBufferGetIOSurface(pixelBuffer);
if (ioSurface && videoPixelFormat == media::PIXEL_FORMAT_NV12) {
- if ([self processNV12IOSurface:ioSurface
- sampleBuffer:sampleBuffer
- captureFormat:captureFormat
- colorSpace:colorSpace
- timestamp:timestamp]) {
- return;
- }
+ [self processNV12IOSurface:ioSurface
+ captureFormat:captureFormat
+ colorSpace:colorSpace
+ timestamp:timestamp];
+ return;
}
}
@@ -742,7 +791,11 @@ AVCaptureDeviceFormat* FindBestCaptureFormat(
return;
}
}
+
// Last preference is to read the CMSampleBuffer.
+ gfx::ColorSpace colorSpace;
+ if (@available(macOS 10.11, *))
+ colorSpace = media::GetFormatDescriptionColorSpace(formatDescription);
[self processSample:sampleBuffer
captureFormat:captureFormat
colorSpace:colorSpace
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
index 5ae6004c08e..f121dd8fe2e 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_protocol_mac.h
@@ -36,9 +36,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceAVFoundationFrameReceiver {
// AVFoundation.
virtual void ReceiveExternalGpuMemoryBufferFrame(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- read_access_permission,
const VideoCaptureFormat& frame_format,
const gfx::ColorSpace color_space,
base::TimeDelta timestamp) = 0;
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
index 44e247e87ec..82956f85a20 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.h
@@ -15,6 +15,8 @@
namespace media {
+std::string CAPTURE_EXPORT MacFourCCToString(OSType fourcc);
+
// Returns a dictionary of capture devices with friendly name and unique id.
// VideoCaptureDeviceMac should call this function to fetch the list of devices
// available in the system; this method returns the list of device names that
diff --git a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
index 38a509f7ca2..164e850a526 100644
--- a/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_avfoundation_utils_mac.mm
@@ -163,6 +163,12 @@ base::scoped_nsobject<NSDictionary> GetDeviceNames() {
}
} // namespace
+std::string MacFourCCToString(OSType fourcc) {
+ char arr[] = {fourcc >> 24, (fourcc >> 16) & 255, (fourcc >> 8) & 255,
+ fourcc & 255, 0};
+ return arr;
+}
+
void ExtractBaseAddressAndLength(char** base_address,
size_t* length,
CMSampleBufferRef sample_buffer) {
diff --git a/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm b/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
index 4b6f6acea12..8c89e4ee2a2 100644
--- a/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_factory_mac_unittest.mm
@@ -5,7 +5,7 @@
#include "base/bind.h"
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "media/base/media_switches.h"
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.h b/chromium/media/capture/video/mac/video_capture_device_mac.h
index 5bed94fbbff..1090327d09b 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.h
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.h
@@ -82,9 +82,6 @@ class VideoCaptureDeviceMac
base::TimeDelta timestamp) override;
void ReceiveExternalGpuMemoryBufferFrame(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- read_access_permission,
const VideoCaptureFormat& frame_format,
const gfx::ColorSpace color_space,
base::TimeDelta timestamp) override;
diff --git a/chromium/media/capture/video/mac/video_capture_device_mac.mm b/chromium/media/capture/video/mac/video_capture_device_mac.mm
index 3480ff5dfc8..3a0c2e11974 100644
--- a/chromium/media/capture/video/mac/video_capture_device_mac.mm
+++ b/chromium/media/capture/video/mac/video_capture_device_mac.mm
@@ -804,8 +804,6 @@ void VideoCaptureDeviceMac::ReceiveFrame(const uint8_t* video_frame,
void VideoCaptureDeviceMac::ReceiveExternalGpuMemoryBufferFrame(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace color_space,
base::TimeDelta timestamp) {
@@ -816,9 +814,9 @@ void VideoCaptureDeviceMac::ReceiveExternalGpuMemoryBufferFrame(
", and expected " + capture_format_.frame_size.ToString());
return;
}
- client_->OnIncomingCapturedExternalBuffer(
- std::move(handle), std::move(read_access_permission), format, color_space,
- base::TimeTicks::Now(), timestamp);
+ client_->OnIncomingCapturedExternalBuffer(std::move(handle), format,
+ color_space, base::TimeTicks::Now(),
+ timestamp);
}
void VideoCaptureDeviceMac::OnPhotoTaken(const uint8_t* image_data,
diff --git a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
index df4f7e25ad2..adac8b74a2b 100644
--- a/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
+++ b/chromium/media/capture/video/mock_gpu_memory_buffer_manager.cc
@@ -3,10 +3,11 @@
// found in the LICENSE file.
#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
+#include "build/chromeos_buildflags.h"
#include "media/video/fake_gpu_memory_buffer.h"
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#include "media/capture/video/chromeos/request_manager.h"
#endif
@@ -25,7 +26,7 @@ MockGpuMemoryBufferManager::CreateFakeGpuMemoryBuffer(
gfx::BufferUsage usage,
gpu::SurfaceHandle surface_handle) {
auto gmb = std::make_unique<FakeGpuMemoryBuffer>(size, format);
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
// For faking a valid JPEG blob buffer.
if (base::checked_cast<size_t>(size.width()) >= sizeof(Camera3JpegBlob)) {
Camera3JpegBlob* header = reinterpret_cast<Camera3JpegBlob*>(
diff --git a/chromium/media/capture/video/mock_video_capture_device_client.h b/chromium/media/capture/video/mock_video_capture_device_client.h
index 2e5ba1ea3d5..1adfc805885 100644
--- a/chromium/media/capture/video/mock_video_capture_device_client.h
+++ b/chromium/media/capture/video/mock_video_capture_device_client.h
@@ -35,10 +35,8 @@ class MockVideoCaptureDeviceClient : public VideoCaptureDevice::Client {
base::TimeTicks reference_time,
base::TimeDelta timestamp,
int frame_feedback_id));
- MOCK_METHOD6(OnIncomingCapturedExternalBuffer,
+ MOCK_METHOD5(OnIncomingCapturedExternalBuffer,
void(gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<Buffer::ScopedAccessPermission>
- read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
diff --git a/chromium/media/capture/video/video_capture_buffer_pool.h b/chromium/media/capture/video/video_capture_buffer_pool.h
index f879d6bae1f..8b40a4dd7c4 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool.h
@@ -88,17 +88,14 @@ class CAPTURE_EXPORT VideoCaptureBufferPool
// of ReserveForProducer().
virtual void RelinquishProducerReservation(int buffer_id) = 0;
- // Reserve a buffer id to use for an external buffer (one that isn't in this
- // pool). This is needed to ensure that ids for external buffers don't
- // conflict with ids from the pool. This call cannot fail (no allocation is
- // done). The behavior of |buffer_id_to_drop| is the same as
- // ReserveForProducer.
+ // Reserve a buffer id to use for a buffer specified by |handle| (which was
+ // allocated by some external source). This call cannot fail (no allocation is
+ // done). It may return a new id, or may reuse an existing id, if the buffer
+ // represented by |handle| is already being tracked. The behavior of
+ // |buffer_id_to_drop| is the same as ReserveForProducer.
virtual int ReserveIdForExternalBuffer(
- std::vector<int>* buffer_ids_to_drop) = 0;
-
- // Notify the pool that a buffer id is no longer in use, and can be returned
- // via ReserveIdForExternalBuffer.
- virtual void RelinquishExternalBufferReservation(int buffer_id) = 0;
+ const gfx::GpuMemoryBufferHandle& handle,
+ int* buffer_id_to_drop) = 0;
// Returns a snapshot of the current number of buffers in-use divided by the
// maximum |count_|.
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
index 64180e436d1..6913bfed465 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.cc
@@ -133,33 +133,49 @@ void VideoCaptureBufferPoolImpl::RelinquishProducerReservation(int buffer_id) {
NOTREACHED() << "Invalid buffer_id.";
return;
}
- DCHECK(tracker->held_by_producer());
- tracker->set_held_by_producer(false);
+ tracker->SetHeldByProducer(false);
}
int VideoCaptureBufferPoolImpl::ReserveIdForExternalBuffer(
- std::vector<int>* buffer_ids_to_drop) {
+ const gfx::GpuMemoryBufferHandle& handle,
+ int* buffer_id_to_drop) {
base::AutoLock lock(lock_);
- int buffer_id = next_buffer_id_++;
- external_buffers_[buffer_id] = false;
-
- for (auto it = external_buffers_.begin(); it != external_buffers_.end();) {
- if (it->second) {
- buffer_ids_to_drop->push_back(it->first);
- it = external_buffers_.erase(it);
- } else {
- ++it;
+
+ // Look for a tracker that matches this buffer and is not in use. While
+ // iterating, find the least recently used tracker.
+ *buffer_id_to_drop = kInvalidId;
+ auto lru_tracker_it = trackers_.end();
+ for (auto it = trackers_.begin(); it != trackers_.end(); ++it) {
+ VideoCaptureBufferTracker* const tracker = it->second.get();
+ if (tracker->IsHeldByProducerOrConsumer())
+ continue;
+
+ if (tracker->IsSameGpuMemoryBuffer(handle)) {
+ tracker->SetHeldByProducer(true);
+ return it->first;
+ }
+
+ if (lru_tracker_it == trackers_.end() ||
+ lru_tracker_it->second->LastCustomerUseSequenceNumber() >
+ tracker->LastCustomerUseSequenceNumber()) {
+ lru_tracker_it = it;
}
}
- return buffer_id;
-}
-void VideoCaptureBufferPoolImpl::RelinquishExternalBufferReservation(
- int buffer_id) {
- base::AutoLock lock(lock_);
- auto found = external_buffers_.find(buffer_id);
- CHECK(found != external_buffers_.end());
- found->second = true;
+ // Free the least recently used tracker, if needed.
+ if (trackers_.size() >= static_cast<size_t>(count_) &&
+ lru_tracker_it != trackers_.end()) {
+ *buffer_id_to_drop = lru_tracker_it->first;
+ trackers_.erase(lru_tracker_it);
+ }
+
+ // Create the new tracker.
+ const int new_buffer_id = next_buffer_id_++;
+ auto tracker =
+ buffer_tracker_factory_->CreateTrackerForExternalGpuMemoryBuffer(handle);
+ tracker->SetHeldByProducer(true);
+ trackers_[new_buffer_id] = std::move(tracker);
+ return new_buffer_id;
}
void VideoCaptureBufferPoolImpl::HoldForConsumers(int buffer_id,
@@ -170,11 +186,8 @@ void VideoCaptureBufferPoolImpl::HoldForConsumers(int buffer_id,
NOTREACHED() << "Invalid buffer_id.";
return;
}
- DCHECK(tracker->held_by_producer());
- DCHECK(!tracker->consumer_hold_count());
-
- tracker->set_consumer_hold_count(num_clients);
- // Note: |held_by_producer()| will stay true until
+ tracker->AddConsumerHolds(num_clients);
+ // Note: The buffer will stay held by the producer until
// RelinquishProducerReservation() (usually called by destructor of the object
// wrapping this tracker, e.g. a VideoFrame).
}
@@ -187,10 +200,7 @@ void VideoCaptureBufferPoolImpl::RelinquishConsumerHold(int buffer_id,
NOTREACHED() << "Invalid buffer_id.";
return;
}
- DCHECK_GE(tracker->consumer_hold_count(), num_clients);
-
- tracker->set_consumer_hold_count(tracker->consumer_hold_count() -
- num_clients);
+ tracker->RemoveConsumerHolds(num_clients);
}
double VideoCaptureBufferPoolImpl::GetBufferPoolUtilization() const {
@@ -198,7 +208,7 @@ double VideoCaptureBufferPoolImpl::GetBufferPoolUtilization() const {
int num_buffers_held = 0;
for (const auto& entry : trackers_) {
VideoCaptureBufferTracker* const tracker = entry.second.get();
- if (tracker->held_by_producer() || tracker->consumer_hold_count() > 0)
+ if (tracker->IsHeldByProducerOrConsumer())
++num_buffers_held;
}
return static_cast<double>(num_buffers_held) / count_;
@@ -221,10 +231,10 @@ VideoCaptureBufferPoolImpl::ReserveForProducerInternal(
auto tracker_to_drop = trackers_.end();
for (auto it = trackers_.begin(); it != trackers_.end(); ++it) {
VideoCaptureBufferTracker* const tracker = it->second.get();
- if (!tracker->consumer_hold_count() && !tracker->held_by_producer()) {
+ if (!tracker->IsHeldByProducerOrConsumer()) {
if (tracker->IsReusableForFormat(dimensions, pixel_format, strides)) {
// Reuse this buffer
- tracker->set_held_by_producer(true);
+ tracker->SetHeldByProducer(true);
tracker->set_frame_feedback_id(frame_feedback_id);
*buffer_id = it->first;
return VideoCaptureDevice::Client::ReserveResult::kSucceeded;
@@ -242,6 +252,8 @@ VideoCaptureBufferPoolImpl::ReserveForProducerInternal(
if (tracker_to_drop == trackers_.end()) {
// We're out of space, and can't find an unused tracker to reallocate.
*buffer_id = kInvalidId;
+ DLOG(ERROR) << __func__
+ << " max buffer count exceeded count_ = " << count_;
return VideoCaptureDevice::Client::ReserveResult::kMaxBufferCountExceeded;
}
*buffer_id_to_drop = tracker_to_drop->first;
@@ -259,7 +271,7 @@ VideoCaptureBufferPoolImpl::ReserveForProducerInternal(
return VideoCaptureDevice::Client::ReserveResult::kAllocationFailed;
}
- tracker->set_held_by_producer(true);
+ tracker->SetHeldByProducer(true);
tracker->set_frame_feedback_id(frame_feedback_id);
trackers_[new_buffer_id] = std::move(tracker);
diff --git a/chromium/media/capture/video/video_capture_buffer_pool_impl.h b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
index e778b56cc0c..9de75bcbe8a 100644
--- a/chromium/media/capture/video/video_capture_buffer_pool_impl.h
+++ b/chromium/media/capture/video/video_capture_buffer_pool_impl.h
@@ -50,8 +50,8 @@ class CAPTURE_EXPORT VideoCaptureBufferPoolImpl
int* buffer_id,
int* buffer_id_to_drop) override;
void RelinquishProducerReservation(int buffer_id) override;
- int ReserveIdForExternalBuffer(std::vector<int>* buffer_ids_to_drop) override;
- void RelinquishExternalBufferReservation(int buffer_id) override;
+ int ReserveIdForExternalBuffer(const gfx::GpuMemoryBufferHandle& handle,
+ int* buffer_id_to_drop) override;
double GetBufferPoolUtilization() const override;
void HoldForConsumers(int buffer_id, int num_clients) override;
void RelinquishConsumerHold(int buffer_id, int num_clients) override;
@@ -87,10 +87,6 @@ class CAPTURE_EXPORT VideoCaptureBufferPoolImpl
std::map<int, std::unique_ptr<VideoCaptureBufferTracker>> trackers_
GUARDED_BY(lock_);
- // The external buffers, indexed by buffer id. The second parameter is whether
- // or not the the buffer's reservation has been relinquished.
- std::map<int, bool> external_buffers_ GUARDED_BY(lock_);
-
const std::unique_ptr<VideoCaptureBufferTrackerFactory>
buffer_tracker_factory_ GUARDED_BY(lock_);
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker.cc b/chromium/media/capture/video/video_capture_buffer_tracker.cc
new file mode 100644
index 00000000000..9e68a534c65
--- /dev/null
+++ b/chromium/media/capture/video/video_capture_buffer_tracker.cc
@@ -0,0 +1,45 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/capture/video/video_capture_buffer_tracker.h"
+
+namespace media {
+
+void VideoCaptureBufferTracker::SetHeldByProducer(bool new_held_by_producer) {
+ DCHECK_NE(held_by_producer_, new_held_by_producer);
+ // The producer can't take hold while a consumer still has hold.
+ if (new_held_by_producer)
+ DCHECK_EQ(consumer_hold_count_, 0);
+ held_by_producer_ = new_held_by_producer;
+}
+
+void VideoCaptureBufferTracker::AddConsumerHolds(int count) {
+ // New consumer holds may only be made while the producer hold is still on.
+ // This is because the buffer may disappear out from under us as soon as
+ // neither producer nor consumers have a hold on it.
+ DCHECK_EQ(consumer_hold_count_, 0);
+ DCHECK(held_by_producer_);
+ consumer_hold_count_ += count;
+ OnHeldByConsumersChanged(true);
+}
+
+void VideoCaptureBufferTracker::RemoveConsumerHolds(int count) {
+ DCHECK_GE(consumer_hold_count_, count);
+ consumer_hold_count_ -= count;
+ if (consumer_hold_count_ == 0) {
+ static uint64_t sequence_number = 0;
+ last_customer_use_sequence_number_ = ++sequence_number;
+ OnHeldByConsumersChanged(false);
+ }
+}
+
+bool VideoCaptureBufferTracker::IsSameGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) const {
+ return false;
+}
+
+void VideoCaptureBufferTracker::OnHeldByConsumersChanged(
+ bool is_held_by_consumers) {}
+
+} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker.h b/chromium/media/capture/video/video_capture_buffer_tracker.h
index 67576418379..abad4aa5a1d 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker.h
@@ -24,25 +24,33 @@ namespace media {
// for implementations using different kinds of storage.
class CAPTURE_EXPORT VideoCaptureBufferTracker {
public:
- VideoCaptureBufferTracker()
- : held_by_producer_(false),
- consumer_hold_count_(0),
- frame_feedback_id_(0) {}
+ VideoCaptureBufferTracker() = default;
virtual bool Init(const gfx::Size& dimensions,
VideoPixelFormat format,
const mojom::PlaneStridesPtr& strides) = 0;
virtual ~VideoCaptureBufferTracker() {}
- bool held_by_producer() const { return held_by_producer_; }
- void set_held_by_producer(bool value) { held_by_producer_ = value; }
- int consumer_hold_count() const { return consumer_hold_count_; }
- void set_consumer_hold_count(int value) { consumer_hold_count_ = value; }
+ bool IsHeldByProducerOrConsumer() const {
+ return held_by_producer_ || consumer_hold_count_ > 0;
+ }
+ void SetHeldByProducer(bool value);
+ void AddConsumerHolds(int count);
+ void RemoveConsumerHolds(int count);
+
void set_frame_feedback_id(int value) { frame_feedback_id_ = value; }
int frame_feedback_id() { return frame_feedback_id_; }
+ // Returns true if |handle| refers to the same buffer as |this|. This is used
+ // to reuse buffers that were externally allocated.
+ virtual bool IsSameGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) const;
+
+ // Returns true if |this| matches the specified parameters. This is used to
+ // reuse buffers that were internally allocated.
virtual bool IsReusableForFormat(const gfx::Size& dimensions,
VideoPixelFormat format,
const mojom::PlaneStridesPtr& strides) = 0;
+
virtual uint32_t GetMemorySizeInBytes() = 0;
virtual std::unique_ptr<VideoCaptureBufferHandle> GetMemoryMappedAccess() = 0;
@@ -51,15 +59,28 @@ class CAPTURE_EXPORT VideoCaptureBufferTracker {
virtual mojo::ScopedSharedBufferHandle DuplicateAsMojoBuffer() = 0;
virtual gfx::GpuMemoryBufferHandle GetGpuMemoryBufferHandle() = 0;
+ // This is called when the number of consumers goes from zero to non-zero (in
+ // which case |is_held_by_consumers| is true) or from non-zero to zero (in
+ // which case |is_held_by_consumers| is false).
+ virtual void OnHeldByConsumersChanged(bool is_held_by_consumers);
+
+ // External buffers are to be freed in least-recently-used order. This
+ // function returns a number which is greater for more recently used buffers.
+ uint64_t LastCustomerUseSequenceNumber() const {
+ return last_customer_use_sequence_number_;
+ }
+
private:
// Indicates whether this VideoCaptureBufferTracker is currently referenced by
// the producer.
- bool held_by_producer_;
+ bool held_by_producer_ = false;
// Number of consumer processes which hold this VideoCaptureBufferTracker.
- int consumer_hold_count_;
+ int consumer_hold_count_ = 0;
+
+ int frame_feedback_id_ = 0;
- int frame_feedback_id_;
+ uint64_t last_customer_use_sequence_number_ = 0;
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory.h b/chromium/media/capture/video/video_capture_buffer_tracker_factory.h
index 4729b4715b6..92b1674b462 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory.h
@@ -10,6 +10,10 @@
#include "media/capture/capture_export.h"
#include "media/capture/video_capture_types.h"
+namespace gfx {
+struct GpuMemoryBufferHandle;
+} // namespace gfx
+
namespace media {
class VideoCaptureBufferTracker;
@@ -19,6 +23,9 @@ class CAPTURE_EXPORT VideoCaptureBufferTrackerFactory {
virtual ~VideoCaptureBufferTrackerFactory() {}
virtual std::unique_ptr<VideoCaptureBufferTracker> CreateTracker(
VideoCaptureBufferType buffer_type) = 0;
+ virtual std::unique_ptr<VideoCaptureBufferTracker>
+ CreateTrackerForExternalGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) = 0;
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
index 496a316f17d..92c2a96d7b1 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.cc
@@ -5,13 +5,19 @@
#include "media/capture/video/video_capture_buffer_tracker_factory_impl.h"
#include <memory>
+#include "build/chromeos_buildflags.h"
+#include "build/build_config.h"
#include "media/capture/video/shared_memory_buffer_tracker.h"
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#include "media/capture/video/chromeos/gpu_memory_buffer_tracker.h"
#endif
+#if defined(OS_MAC)
+#include "media/capture/video/mac/gpu_memory_buffer_tracker_mac.h"
+#endif
+
namespace media {
std::unique_ptr<VideoCaptureBufferTracker>
@@ -19,8 +25,10 @@ VideoCaptureBufferTrackerFactoryImpl::CreateTracker(
VideoCaptureBufferType buffer_type) {
switch (buffer_type) {
case VideoCaptureBufferType::kGpuMemoryBuffer:
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
return std::make_unique<GpuMemoryBufferTracker>();
+#elif defined(OS_MAC)
+ return std::make_unique<GpuMemoryBufferTrackerMac>();
#else
return nullptr;
#endif
@@ -29,4 +37,14 @@ VideoCaptureBufferTrackerFactoryImpl::CreateTracker(
}
}
+std::unique_ptr<VideoCaptureBufferTracker>
+VideoCaptureBufferTrackerFactoryImpl::CreateTrackerForExternalGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) {
+#if defined(OS_MAC)
+ return std::make_unique<GpuMemoryBufferTrackerMac>(handle.io_surface);
+#else
+ return nullptr;
+#endif
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h
index 4354ce4a2d8..9702074feb1 100644
--- a/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h
+++ b/chromium/media/capture/video/video_capture_buffer_tracker_factory_impl.h
@@ -17,6 +17,9 @@ class CAPTURE_EXPORT VideoCaptureBufferTrackerFactoryImpl
public:
std::unique_ptr<VideoCaptureBufferTracker> CreateTracker(
VideoCaptureBufferType buffer_type) override;
+ std::unique_ptr<VideoCaptureBufferTracker>
+ CreateTrackerForExternalGpuMemoryBuffer(
+ const gfx::GpuMemoryBufferHandle& handle) override;
};
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device.h b/chromium/media/capture/video/video_capture_device.h
index e8efc2d8c23..8fee0be95e5 100644
--- a/chromium/media/capture/video/video_capture_device.h
+++ b/chromium/media/capture/video/video_capture_device.h
@@ -27,6 +27,7 @@
#include "base/time/time.h"
#include "build/build_config.h"
#include "media/base/video_frame.h"
+#include "media/base/video_frame_feedback.h"
#include "media/capture/capture_export.h"
#include "media/capture/mojom/image_capture.mojom.h"
#include "media/capture/video/video_capture_buffer_handle.h"
@@ -179,11 +180,14 @@ class CAPTURE_EXPORT VideoCaptureDevice
int frame_feedback_id = 0) = 0;
// Captured a new video frame. The data for this frame is in |handle|,
- // which is owned by the platform-specific capture device, and is kept valid
- // by |read_access_permission|.
+ // which is owned by the platform-specific capture device. It is the
+ // responsibilty of the implementation to prevent the buffer in |handle|
+ // from being reused by the external capturer. In practice, this is used
+ // only on macOS, the external capturer maintains a CVPixelBufferPool, and
+ // gfx::ScopedInUseIOSurface is used to prevent reuse of buffers until all
+ // consumers have consumed them.
virtual void OnIncomingCapturedExternalBuffer(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<Buffer::ScopedAccessPermission> read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
diff --git a/chromium/media/capture/video/video_capture_device_client.cc b/chromium/media/capture/video/video_capture_device_client.cc
index 3e43da884b5..d11eece6e42 100644
--- a/chromium/media/capture/video/video_capture_device_client.cc
+++ b/chromium/media/capture/video/video_capture_device_client.cc
@@ -14,6 +14,7 @@
#include "base/strings/stringprintf.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_frame.h"
#include "media/capture/video/scoped_buffer_pool_reservation.h"
@@ -23,9 +24,9 @@
#include "media/capture/video_capture_types.h"
#include "third_party/libyuv/include/libyuv.h"
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#include "media/capture/video/chromeos/video_capture_jpeg_decoder.h"
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
namespace {
@@ -162,7 +163,7 @@ class BufferPoolBufferHandleProvider
const int buffer_id_;
};
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
VideoCaptureDeviceClient::VideoCaptureDeviceClient(
VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
@@ -187,7 +188,7 @@ VideoCaptureDeviceClient::VideoCaptureDeviceClient(
receiver_(std::move(receiver)),
buffer_pool_(std::move(buffer_pool)),
last_captured_pixel_format_(PIXEL_FORMAT_UNKNOWN) {}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {
for (int buffer_id : buffer_ids_known_by_receiver_)
@@ -225,7 +226,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
OnLog("Pixel format: " + VideoPixelFormatToString(format.pixel_format));
last_captured_pixel_format_ = format.pixel_format;
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
if (format.pixel_format == PIXEL_FORMAT_MJPEG &&
optional_jpeg_decoder_factory_callback_) {
external_jpeg_decoder_ =
@@ -233,7 +234,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
DCHECK(external_jpeg_decoder_);
external_jpeg_decoder_->Initialize();
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
}
if (!format.IsValid()) {
@@ -352,7 +353,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
// paddings and/or alignments, but it cannot be smaller.
DCHECK_GE(static_cast<size_t>(length), format.ImageAllocationSize());
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
if (external_jpeg_decoder_) {
const VideoCaptureJpegDecoder::STATUS status =
external_jpeg_decoder_->GetStatus();
@@ -368,7 +369,7 @@ void VideoCaptureDeviceClient::OnIncomingCapturedData(
return;
}
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
// libyuv::ConvertToI420 use Rec601 to convert RGB to YUV.
if (libyuv::ConvertToI420(
@@ -465,19 +466,18 @@ void VideoCaptureDeviceClient::OnIncomingCapturedGfxBuffer(
void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- scoped_access_permission_to_wrap,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
// Reserve an ID for this buffer that will not conflict with any of the IDs
// used by |buffer_pool_|.
- std::vector<int> buffer_ids_to_drop;
- int buffer_id = buffer_pool_->ReserveIdForExternalBuffer(&buffer_ids_to_drop);
+ int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
+ int buffer_id =
+ buffer_pool_->ReserveIdForExternalBuffer(handle, &buffer_id_to_drop);
// If a buffer to retire was specified, retire one.
- for (int buffer_id_to_drop : buffer_ids_to_drop) {
+ if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
auto entry_iter =
std::find(buffer_ids_known_by_receiver_.begin(),
buffer_ids_known_by_receiver_.end(), buffer_id_to_drop);
@@ -487,8 +487,8 @@ void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
}
}
- // Register the buffer with the receiver.
- {
+ // Register the buffer with the receiver if it is new.
+ if (!base::Contains(buffer_ids_known_by_receiver_, buffer_id)) {
media::mojom::VideoBufferHandlePtr buffer_handle =
media::mojom::VideoBufferHandle::New();
buffer_handle->set_gpu_memory_buffer_handle(std::move(handle));
@@ -496,25 +496,6 @@ void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
buffer_ids_known_by_receiver_.push_back(buffer_id);
}
- // Wrap |scoped_access_permission_to_wrap| in a ScopedAccessPermission that
- // will retire |buffer_id| as soon as access ends.
- std::unique_ptr<VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- scoped_access_permission;
- {
- auto callback_lambda =
- [](int buffer_id, scoped_refptr<VideoCaptureBufferPool> buffer_pool,
- std::unique_ptr<
- VideoCaptureDevice::Client::Buffer::ScopedAccessPermission>
- scoped_access_permission) {
- buffer_pool->RelinquishExternalBufferReservation(buffer_id);
- };
- auto closure = base::BindOnce(callback_lambda, buffer_id, buffer_pool_,
- std::move(scoped_access_permission_to_wrap));
- scoped_access_permission =
- std::make_unique<ScopedAccessPermissionEndWithCallback>(
- std::move(closure));
- }
-
// Tell |receiver_| that the frame has been received.
{
mojom::VideoFrameInfoPtr info = mojom::VideoFrameInfo::New();
@@ -525,9 +506,14 @@ void VideoCaptureDeviceClient::OnIncomingCapturedExternalBuffer(
info->visible_rect = gfx::Rect(format.frame_size);
info->metadata.frame_rate = format.frame_rate;
info->metadata.reference_time = reference_time;
- receiver_->OnFrameReadyInBuffer(buffer_id, 0 /* frame_feedback_id */,
- std::move(scoped_access_permission),
- std::move(info));
+
+ buffer_pool_->HoldForConsumers(buffer_id, 1);
+ buffer_pool_->RelinquishProducerReservation(buffer_id);
+ receiver_->OnFrameReadyInBuffer(
+ buffer_id, 0 /* frame_feedback_id */,
+ std::make_unique<ScopedBufferPoolReservation<ConsumerReleaseTraits>>(
+ buffer_pool_, buffer_id),
+ std::move(info));
}
}
@@ -557,8 +543,10 @@ VideoCaptureDeviceClient::ReserveOutputBuffer(const gfx::Size& frame_size,
receiver_->OnBufferRetired(buffer_id_to_drop);
}
}
- if (reservation_result_code != ReserveResult::kSucceeded)
+ if (reservation_result_code != ReserveResult::kSucceeded) {
+ DVLOG(2) << __func__ << " reservation failed";
return reservation_result_code;
+ }
DCHECK_NE(VideoCaptureBufferPool::kInvalidId, buffer_id);
diff --git a/chromium/media/capture/video/video_capture_device_client.h b/chromium/media/capture/video/video_capture_device_client.h
index 067f3f68ad3..7e9d6eb82ff 100644
--- a/chromium/media/capture/video/video_capture_device_client.h
+++ b/chromium/media/capture/video/video_capture_device_client.h
@@ -14,6 +14,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/threading/thread_collision_warner.h"
+#include "build/chromeos_buildflags.h"
#include "media/capture/capture_export.h"
#include "media/capture/mojom/video_capture_types.mojom.h"
#include "media/capture/video/video_capture_device.h"
@@ -43,7 +44,7 @@ using VideoCaptureJpegDecoderFactoryCB =
class CAPTURE_EXPORT VideoCaptureDeviceClient
: public VideoCaptureDevice::Client {
public:
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
VideoCaptureDeviceClient(
VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
@@ -53,7 +54,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
VideoCaptureDeviceClient(VideoCaptureBufferType target_buffer_type,
std::unique_ptr<VideoFrameReceiver> receiver,
scoped_refptr<VideoCaptureBufferPool> buffer_pool);
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
~VideoCaptureDeviceClient() override;
static Buffer MakeBufferStruct(
@@ -81,7 +82,6 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
int frame_feedback_id = 0) override;
void OnIncomingCapturedExternalBuffer(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<Buffer::ScopedAccessPermission> read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
@@ -125,11 +125,11 @@ class CAPTURE_EXPORT VideoCaptureDeviceClient
const std::unique_ptr<VideoFrameReceiver> receiver_;
std::vector<int> buffer_ids_known_by_receiver_;
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
VideoCaptureJpegDecoderFactoryCB optional_jpeg_decoder_factory_callback_;
std::unique_ptr<VideoCaptureJpegDecoder> external_jpeg_decoder_;
base::OnceClosure on_started_using_gpu_cb_;
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
// The pool of shared-memory buffers used for capturing.
const scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
diff --git a/chromium/media/capture/video/video_capture_device_client_unittest.cc b/chromium/media/capture/video/video_capture_device_client_unittest.cc
index 5ccd550bee7..e23ddf03e51 100644
--- a/chromium/media/capture/video/video_capture_device_client_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_client_unittest.cc
@@ -12,6 +12,7 @@
#include "base/check.h"
#include "base/macros.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/limits.h"
#include "media/capture/video/mock_gpu_memory_buffer_manager.h"
#include "media/capture/video/mock_video_frame_receiver.h"
@@ -21,9 +22,9 @@
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#include "media/capture/video/chromeos/video_capture_jpeg_decoder.h"
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
using ::testing::_;
using ::testing::AtLeast;
@@ -37,11 +38,11 @@ namespace media {
namespace {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
std::unique_ptr<VideoCaptureJpegDecoder> ReturnNullPtrAsJpecDecoder() {
return nullptr;
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
} // namespace
@@ -60,7 +61,7 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
receiver_ = controller.get();
gpu_memory_buffer_manager_ =
std::make_unique<unittest_internal::MockGpuMemoryBufferManager>();
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
VideoCaptureBufferType::kSharedMemory, std::move(controller),
buffer_pool, base::BindRepeating(&ReturnNullPtrAsJpecDecoder));
@@ -68,7 +69,7 @@ class VideoCaptureDeviceClientTest : public ::testing::Test {
device_client_ = std::make_unique<VideoCaptureDeviceClient>(
VideoCaptureBufferType::kSharedMemory, std::move(controller),
buffer_pool);
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
}
~VideoCaptureDeviceClientTest() override = default;
diff --git a/chromium/media/capture/video/video_capture_device_factory.cc b/chromium/media/capture/video/video_capture_device_factory.cc
index caa2508ca94..7bcdbb65e3f 100644
--- a/chromium/media/capture/video/video_capture_device_factory.cc
+++ b/chromium/media/capture/video/video_capture_device_factory.cc
@@ -8,6 +8,7 @@
#include "base/command_line.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/media_switches.h"
#include "media/capture/video/fake_video_capture_device_factory.h"
#include "media/capture/video/file_video_capture_device_factory.h"
@@ -20,10 +21,10 @@ VideoCaptureDeviceFactory::VideoCaptureDeviceFactory() {
VideoCaptureDeviceFactory::~VideoCaptureDeviceFactory() = default;
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
bool VideoCaptureDeviceFactory::IsSupportedCameraAppDeviceBridge() {
return false;
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
} // namespace media
diff --git a/chromium/media/capture/video/video_capture_device_factory.h b/chromium/media/capture/video/video_capture_device_factory.h
index f988bd937e6..8b653ef4750 100644
--- a/chromium/media/capture/video/video_capture_device_factory.h
+++ b/chromium/media/capture/video/video_capture_device_factory.h
@@ -8,6 +8,7 @@
#include "base/macros.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_checker.h"
+#include "build/chromeos_buildflags.h"
#include "gpu/command_buffer/client/gpu_memory_buffer_manager.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/video_capture_device_info.h"
@@ -43,9 +44,9 @@ class CAPTURE_EXPORT VideoCaptureDeviceFactory {
std::vector<VideoCaptureDeviceInfo> devices_info)>;
virtual void GetDevicesInfo(GetDevicesInfoCallback callback) = 0;
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
virtual bool IsSupportedCameraAppDeviceBridge();
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
protected:
base::ThreadChecker thread_checker_;
diff --git a/chromium/media/capture/video/video_capture_device_unittest.cc b/chromium/media/capture/video/video_capture_device_unittest.cc
index a878a5530cc..563f687e126 100644
--- a/chromium/media/capture/video/video_capture_device_unittest.cc
+++ b/chromium/media/capture/video/video_capture_device_unittest.cc
@@ -11,17 +11,18 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/memory/ref_counted.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
#include "base/test/test_timeouts.h"
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/bind_to_current_loop.h"
#include "media/capture/video/create_video_capture_device_factory.h"
#include "media/capture/video/mock_video_capture_device_client.h"
@@ -48,14 +49,14 @@
#include "media/capture/video/android/video_capture_device_factory_android.h"
#endif
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#include "chromeos/dbus/power/power_manager_client.h"
#include "media/capture/video/chromeos/camera_buffer_factory.h"
#include "media/capture/video/chromeos/camera_hal_dispatcher_impl.h"
#include "media/capture/video/chromeos/public/cros_features.h"
#include "media/capture/video/chromeos/video_capture_device_chromeos_halv3.h"
#include "media/capture/video/chromeos/video_capture_device_factory_chromeos.h"
-#include "media/gpu/test/local_gpu_memory_buffer_manager.h"
+#include "media/gpu/test/local_gpu_memory_buffer_manager.h" // nogncheck
#include "mojo/public/cpp/bindings/pending_receiver.h"
#endif
@@ -93,7 +94,7 @@
#define MAYBE_UsingRealWebcam_CaptureWithSize UsingRealWebcam_CaptureWithSize
#define MAYBE_UsingRealWebcam_CheckPhotoCallbackRelease \
UsingRealWebcam_CheckPhotoCallbackRelease
-#elif defined(OS_CHROMEOS)
+#elif BUILDFLAG(IS_ASH)
#define MAYBE_UsingRealWebcam_AllocateBadSize \
DISABLED_UsingRealWebcam_AllocateBadSize
#define MAYBE_UsingRealWebcam_CaptureMjpeg UsingRealWebcam_CaptureMjpeg
@@ -269,7 +270,7 @@ class VideoCaptureDeviceTest
main_thread_task_runner_(base::ThreadTaskRunnerHandle::Get()),
video_capture_client_(CreateDeviceClient()),
image_capture_client_(new MockImageCaptureClient()) {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
local_gpu_memory_buffer_manager_ =
std::make_unique<LocalGpuMemoryBufferManager>();
VideoCaptureDeviceFactoryChromeOS::SetGpuBufferManager(
@@ -288,7 +289,7 @@ class VideoCaptureDeviceTest
}
void SetUp() override {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
chromeos::PowerManagerClient::InitializeFake();
#endif
#if defined(OS_ANDROID)
@@ -303,7 +304,7 @@ class VideoCaptureDeviceTest
}
void TearDown() override {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
chromeos::PowerManagerClient::Shutdown();
#endif
}
@@ -458,7 +459,7 @@ class VideoCaptureDeviceTest
std::unique_ptr<MockVideoCaptureDeviceClient> video_capture_client_;
const scoped_refptr<MockImageCaptureClient> image_capture_client_;
VideoCaptureFormat last_format_;
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
std::unique_ptr<LocalGpuMemoryBufferManager> local_gpu_memory_buffer_manager_;
#endif
std::unique_ptr<VideoCaptureDeviceFactory> video_capture_device_factory_;
@@ -466,7 +467,7 @@ class VideoCaptureDeviceTest
// Causes a flaky crash on Chrome OS. https://crbug.com/1069608
// Cause hangs on Windows Debug. http://crbug.com/417824
-#if defined(OS_CHROMEOS) || (defined(OS_WIN) && !defined(NDEBUG))
+#if BUILDFLAG(IS_ASH) || (defined(OS_WIN) && !defined(NDEBUG))
#define MAYBE_OpenInvalidDevice DISABLED_OpenInvalidDevice
#else
#define MAYBE_OpenInvalidDevice OpenInvalidDevice
@@ -653,7 +654,7 @@ WRAPPED_TEST_P(VideoCaptureDeviceTest, MAYBE_UsingRealWebcam_CaptureMjpeg) {
base::Unretained(this)));
}
void VideoCaptureDeviceTest::RunCaptureMjpegTestCase() {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
if (media::ShouldUseCrosCameraService()) {
VLOG(1)
<< "Skipped on Chrome OS device where HAL v3 camera service is used";
@@ -694,7 +695,7 @@ void VideoCaptureDeviceTest::RunCaptureMjpegTestCase() {
}
// Flaky on ChromeOS. See https://crbug.com/1096082
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#define MAYBE_NoCameraSupportsPixelFormatMax \
DISABLED_NoCameraSupportsPixelFormatMax
#else
diff --git a/chromium/media/capture/video/video_capture_system_impl.cc b/chromium/media/capture/video/video_capture_system_impl.cc
index 836692ada24..6f13af9ff0e 100644
--- a/chromium/media/capture/video/video_capture_system_impl.cc
+++ b/chromium/media/capture/video/video_capture_system_impl.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
@@ -46,9 +46,10 @@ void ConsolidateCaptureFormats(media::VideoCaptureFormats* formats) {
return;
// Mark all formats as I420, since this is what the renderer side will get
// anyhow: the actual pixel format is decided at the device level.
- // Don't do this for Y16 format as it is handled separatelly.
+ // Don't do this for the Y16 or NV12 formats as they are handled separately.
for (auto& format : *formats) {
- if (format.pixel_format != media::PIXEL_FORMAT_Y16)
+ if (format.pixel_format != media::PIXEL_FORMAT_Y16 &&
+ format.pixel_format != media::PIXEL_FORMAT_NV12)
format.pixel_format = media::PIXEL_FORMAT_I420;
}
std::sort(formats->begin(), formats->end(), IsCaptureFormatSmaller);
diff --git a/chromium/media/capture/video/win/capability_list_win.cc b/chromium/media/capture/video/win/capability_list_win.cc
index 1e4a3425309..4ea9c690490 100644
--- a/chromium/media/capture/video/win/capability_list_win.cc
+++ b/chromium/media/capture/video/win/capability_list_win.cc
@@ -19,11 +19,12 @@ namespace {
bool CompareCapability(const VideoCaptureFormat& requested,
const VideoCaptureFormat& lhs,
const VideoCaptureFormat& rhs) {
- // When 16-bit format is requested and available, avoid other formats.
+ // When 16-bit format or NV12 is requested and available, avoid other formats.
// If both lhs and rhs are 16-bit, we still need to compare them based on
// height, width and frame rate.
const bool use_requested =
- (requested.pixel_format == media::PIXEL_FORMAT_Y16);
+ (requested.pixel_format == media::PIXEL_FORMAT_Y16) ||
+ (requested.pixel_format == media::PIXEL_FORMAT_NV12);
if (use_requested && lhs.pixel_format != rhs.pixel_format) {
if (lhs.pixel_format == requested.pixel_format)
return true;
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win.cc b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
index 5f6c82c413f..fe412a57c28 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win.cc
@@ -896,9 +896,23 @@ VideoCaptureFormats
VideoCaptureDeviceFactoryWin::GetSupportedFormatsMediaFoundation(
ComPtr<IMFMediaSource> source,
const std::string& display_name) {
+ ComPtr<IMFAttributes> source_reader_attributes;
+ if (dxgi_device_manager_) {
+ dxgi_device_manager_->RegisterWithMediaSource(source);
+
+ HRESULT hr = MFCreateAttributes(&source_reader_attributes, 1);
+ if (SUCCEEDED(hr)) {
+ dxgi_device_manager_->RegisterInSourceReaderAttributes(
+ source_reader_attributes.Get());
+ } else {
+ DLOG(ERROR) << "MFCreateAttributes failed: "
+ << logging::SystemErrorCodeToString(hr);
+ }
+ }
+
ComPtr<IMFSourceReader> reader;
- HRESULT hr =
- MFCreateSourceReaderFromMediaSource(source.Get(), nullptr, &reader);
+ HRESULT hr = MFCreateSourceReaderFromMediaSource(
+ source.Get(), source_reader_attributes.Get(), &reader);
if (FAILED(hr)) {
DLOG(ERROR) << "MFCreateSourceReaderFromMediaSource failed: "
<< logging::SystemErrorCodeToString(hr);
@@ -909,6 +923,7 @@ VideoCaptureDeviceFactoryWin::GetSupportedFormatsMediaFoundation(
DWORD stream_index = 0;
ComPtr<IMFMediaType> type;
+ const bool dxgi_device_manager_available = dxgi_device_manager_ != nullptr;
while (SUCCEEDED(hr = reader->GetNativeMediaType(
static_cast<DWORD>(MF_SOURCE_READER_FIRST_VIDEO_STREAM),
stream_index, &type))) {
@@ -940,11 +955,17 @@ VideoCaptureDeviceFactoryWin::GetSupportedFormatsMediaFoundation(
return {};
}
VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
- type_guid, &capture_format.pixel_format);
+ type_guid, /*use_hardware_format=*/dxgi_device_manager_available,
+ &capture_format.pixel_format);
type.Reset();
++stream_index;
if (capture_format.pixel_format == PIXEL_FORMAT_UNKNOWN)
continue;
+ // If we're using the hardware capture path, ignore non-NV12 pixel formats
+ // to prevent copies
+ if (dxgi_device_manager_available &&
+ capture_format.pixel_format != PIXEL_FORMAT_NV12)
+ continue;
formats.push_back(capture_format);
DVLOG(1) << display_name << " "
diff --git a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
index a7937470157..7cd97c083b3 100644
--- a/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_factory_win_unittest.cc
@@ -17,7 +17,7 @@
#include "base/memory/scoped_refptr.h"
#include "base/run_loop.h"
#include "base/strings/sys_string_conversions.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "base/win/windows_version.h"
#include "media/capture/video/win/video_capture_device_factory_win.h"
@@ -352,10 +352,20 @@ class StubMFActivate final : public StubInterface<IMFActivate> {
const bool kscategory_sensor_camera_;
};
-// Stub IMFMediaSource with IAMCameraControl and IAMVideoProcAmp interfaces for
-// all devices except from Device 0.
-class StubMFMediaSource final : public StubDeviceInterface<IMFMediaSource> {
+// Stub IMFMediaSourceEx with IAMCameraControl and IAMVideoProcAmp interfaces
+// for all devices except from Device 0.
+class StubMFMediaSource final : public StubDeviceInterface<IMFMediaSourceEx> {
public:
+ StubMFMediaSource(std::string device_id,
+ std::vector<VideoPixelFormat> native_formats)
+ : StubDeviceInterface(device_id),
+ native_formats_(std::move(native_formats)) {
+ // If no native formats were specified, default to I420
+ if (native_formats_.size() == 0) {
+ native_formats_.push_back(PIXEL_FORMAT_I420);
+ }
+ }
+
using StubDeviceInterface::StubDeviceInterface;
// IUnknown
IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override {
@@ -369,45 +379,102 @@ class StubMFMediaSource final : public StubDeviceInterface<IMFMediaSource> {
return S_OK;
}
}
+ if (riid == __uuidof(IMFMediaSource)) {
+ *object = AddReference(static_cast<IMFMediaSource*>(this));
+ return S_OK;
+ }
return StubDeviceInterface::QueryInterface(riid, object);
}
// IMFMediaEventGenerator
IFACEMETHODIMP BeginGetEvent(IMFAsyncCallback* callback,
IUnknown* state) override {
- return E_NOTIMPL;
+ return S_OK;
}
IFACEMETHODIMP EndGetEvent(IMFAsyncResult* result,
IMFMediaEvent** event) override {
- return E_NOTIMPL;
+ return S_OK;
}
IFACEMETHODIMP GetEvent(DWORD flags, IMFMediaEvent** event) override {
- return E_NOTIMPL;
+ return S_OK;
}
IFACEMETHODIMP QueueEvent(MediaEventType met,
REFGUID extended_type,
HRESULT status,
const PROPVARIANT* value) override {
- return E_NOTIMPL;
+ return S_OK;
}
// IMFMediaSource
IFACEMETHODIMP CreatePresentationDescriptor(
IMFPresentationDescriptor** presentation_descriptor) override {
- return E_NOTIMPL;
+ HRESULT hr = S_OK;
+ std::vector<Microsoft::WRL::ComPtr<IMFMediaType>> media_types;
+ std::vector<IMFMediaType*> media_type_list;
+ for (const VideoPixelFormat& pixel_format : native_formats_) {
+ Microsoft::WRL::ComPtr<IMFMediaType> media_type;
+ hr = MFCreateMediaType(&media_type);
+ if (FAILED(hr)) {
+ return hr;
+ }
+ hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ if (FAILED(hr)) {
+ return hr;
+ }
+ GUID subType = GUID_NULL;
+ switch (pixel_format) {
+ case PIXEL_FORMAT_I420:
+ subType = MFVideoFormat_I420;
+ break;
+ case PIXEL_FORMAT_NV12:
+ subType = MFVideoFormat_NV12;
+ break;
+ default:
+ break;
+ }
+ hr = media_type->SetGUID(MF_MT_SUBTYPE, subType);
+ if (FAILED(hr)) {
+ return hr;
+ }
+ media_types.push_back(media_type);
+ media_type_list.push_back(media_type.Get());
+ }
+ if (media_type_list.empty()) {
+ ADD_FAILURE() << "media_type_list empty";
+ return MF_E_UNEXPECTED;
+ }
+ Microsoft::WRL::ComPtr<IMFStreamDescriptor> stream_descriptor;
+ hr = MFCreateStreamDescriptor(0, media_type_list.size(),
+ &media_type_list[0], &stream_descriptor);
+ if (FAILED(hr)) {
+ return hr;
+ }
+ IMFStreamDescriptor* stream_descriptors = stream_descriptor.Get();
+ return MFCreatePresentationDescriptor(1, &stream_descriptors,
+ presentation_descriptor);
}
IFACEMETHODIMP GetCharacteristics(DWORD* characteristics) override {
- return E_NOTIMPL;
+ return S_OK;
}
- IFACEMETHODIMP Pause() override { return E_NOTIMPL; }
- IFACEMETHODIMP Shutdown() override { return E_NOTIMPL; }
+ IFACEMETHODIMP Pause() override { return S_OK; }
+ IFACEMETHODIMP Shutdown() override { return S_OK; }
IFACEMETHODIMP Start(IMFPresentationDescriptor* presentation_descriptor,
const GUID* time_format,
const PROPVARIANT* start_position) override {
+ return S_OK;
+ }
+ IFACEMETHODIMP Stop() override { return S_OK; }
+ // IMFMediaSourceEx
+ IFACEMETHODIMP GetSourceAttributes(IMFAttributes** attributes) {
return E_NOTIMPL;
}
- IFACEMETHODIMP Stop() override { return E_NOTIMPL; }
+ IFACEMETHODIMP GetStreamAttributes(DWORD stream_id,
+ IMFAttributes** attributes) {
+ return E_NOTIMPL;
+ }
+ IFACEMETHODIMP SetD3DManager(IUnknown* manager) { return S_OK; }
private:
~StubMFMediaSource() override = default;
+ std::vector<VideoPixelFormat> native_formats_;
};
// Stub ICameraControl with pan, tilt and zoom range for all devices except
@@ -1105,6 +1172,18 @@ class StubEnumMoniker : public StubInterface<IEnumMoniker> {
};
class FakeVideoCaptureDeviceFactoryWin : public VideoCaptureDeviceFactoryWin {
+ public:
+ void set_disable_get_supported_formats_mf_mocking(
+ bool disable_get_supported_formats_mf_mocking) {
+ disable_get_supported_formats_mf_mocking_ =
+ disable_get_supported_formats_mf_mocking;
+ }
+
+ void AddNativeFormatForMfDevice(std::wstring device_id,
+ VideoPixelFormat format) {
+ device_source_native_formats_[device_id].push_back(format);
+ }
+
protected:
bool CreateDeviceEnumMonikerDirectShow(IEnumMoniker** enum_moniker) override {
*enum_moniker = AddReference(new StubEnumMoniker(
@@ -1143,8 +1222,9 @@ class FakeVideoCaptureDeviceFactoryWin : public VideoCaptureDeviceFactoryWin {
has_dxgi_device_manager) {
return false;
}
- *source =
- AddReference(new StubMFMediaSource(base::SysWideToUTF8(symbolic_link)));
+ *source = AddReference(
+ new StubMFMediaSource(base::SysWideToUTF8(symbolic_link),
+ device_source_native_formats_[symbolic_link]));
return true;
}
bool EnumerateDeviceSourcesMediaFoundation(
@@ -1198,6 +1278,10 @@ class FakeVideoCaptureDeviceFactoryWin : public VideoCaptureDeviceFactoryWin {
VideoCaptureFormats GetSupportedFormatsMediaFoundation(
Microsoft::WRL::ComPtr<IMFMediaSource> source,
const std::string& display_name) override {
+ if (disable_get_supported_formats_mf_mocking_) {
+ return VideoCaptureDeviceFactoryWin::GetSupportedFormatsMediaFoundation(
+ source, display_name);
+ }
VideoCaptureFormats supported_formats;
if (display_name == base::SysWideToUTF8(kMFDeviceName6)) {
VideoCaptureFormat arbitrary_format;
@@ -1205,6 +1289,10 @@ class FakeVideoCaptureDeviceFactoryWin : public VideoCaptureDeviceFactoryWin {
}
return supported_formats;
}
+
+ bool disable_get_supported_formats_mf_mocking_ = false;
+ std::map<std::wstring, std::vector<VideoPixelFormat>>
+ device_source_native_formats_;
};
} // namespace
@@ -1352,6 +1440,61 @@ TEST_P(VideoCaptureDeviceFactoryMFWinTest, GetDevicesInfo) {
EXPECT_TRUE(it->descriptor.control_support().zoom);
}
+TEST_P(VideoCaptureDeviceFactoryMFWinTest,
+ DeviceSupportedFormatNV12Passthrough) {
+ if (ShouldSkipMFTest())
+ return;
+
+ if (ShouldSkipD3D11Test())
+ return;
+
+ // Test whether the VideoCaptureDeviceFactory passes through NV12 as the
+ // output pixel format when D3D11 support is enabled
+
+ const bool use_d3d11 = GetParam();
+ factory_.set_use_d3d11_with_media_foundation_for_testing(use_d3d11);
+ factory_.set_disable_get_supported_formats_mf_mocking(true);
+
+ // Specify native NV12 format for first device and I420 for others
+ factory_.AddNativeFormatForMfDevice(kMFDeviceId0, PIXEL_FORMAT_NV12);
+ factory_.AddNativeFormatForMfDevice(kMFDeviceId1, PIXEL_FORMAT_I420);
+
+ const VideoPixelFormat expected_pixel_format_for_nv12 =
+ use_d3d11 ? PIXEL_FORMAT_NV12 : PIXEL_FORMAT_I420;
+
+ std::vector<VideoCaptureDeviceInfo> devices_info;
+ base::RunLoop run_loop;
+ factory_.GetDevicesInfo(base::BindLambdaForTesting(
+ [&devices_info, &run_loop](std::vector<VideoCaptureDeviceInfo> result) {
+ devices_info = std::move(result);
+ run_loop.Quit();
+ }));
+ run_loop.Run();
+
+ // Verify that the pixel formats advertised in supported_formats for each
+ // device match the expected format (NV12 when D3D11 support is enabled and
+ // the native source type for the device is NV12 or I420 in all other cases)
+
+ iterator it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kMFDeviceId0));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.display_name(), base::SysWideToUTF8(kMFDeviceName0));
+ for (size_t i = 0; i < it->supported_formats.size(); i++) {
+ SCOPED_TRACE(i);
+ EXPECT_EQ(it->supported_formats[i].pixel_format,
+ expected_pixel_format_for_nv12);
+ }
+
+ it = FindDeviceInRange(devices_info.begin(), devices_info.end(),
+ base::SysWideToUTF8(kMFDeviceId1));
+ ASSERT_NE(it, devices_info.end());
+ EXPECT_EQ(it->descriptor.display_name(), base::SysWideToUTF8(kMFDeviceName1));
+ for (size_t i = 0; i < it->supported_formats.size(); i++) {
+ SCOPED_TRACE(i);
+ EXPECT_EQ(it->supported_formats[i].pixel_format, PIXEL_FORMAT_I420);
+ }
+}
+
INSTANTIATE_TEST_SUITE_P(VideoCaptureDeviceFactoryMFWinTests,
VideoCaptureDeviceFactoryMFWinTest,
testing::Bool());
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.cc b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
index 12f13215190..18c9acd16ee 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.cc
@@ -227,6 +227,7 @@ bool GetFrameRateFromMediaType(IMFMediaType* type, float* frame_rate) {
bool GetFormatFromSourceMediaType(IMFMediaType* source_media_type,
bool photo,
+ bool use_hardware_format,
VideoCaptureFormat* format) {
GUID major_type_guid;
if (FAILED(source_media_type->GetGUID(MF_MT_MAJOR_TYPE, &major_type_guid)) ||
@@ -240,7 +241,7 @@ bool GetFormatFromSourceMediaType(IMFMediaType* source_media_type,
if (FAILED(source_media_type->GetGUID(MF_MT_SUBTYPE, &sub_type_guid)) ||
!GetFrameSizeFromMediaType(source_media_type, &format->frame_size) ||
!VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
- sub_type_guid, &format->pixel_format)) {
+ sub_type_guid, use_hardware_format, &format->pixel_format)) {
return false;
}
@@ -269,7 +270,15 @@ struct MediaFormatConfiguration {
bool GetMediaFormatConfigurationFromMFSourceMediaSubtype(
const GUID& mf_source_media_subtype,
+ bool use_hardware_format,
MediaFormatConfiguration* media_format_configuration) {
+ // Special case handling of the NV12 format when using hardware capture
+ // to ensure that captured buffers are passed through without copies
+ if (use_hardware_format && mf_source_media_subtype == MFVideoFormat_NV12) {
+ *media_format_configuration = {MFVideoFormat_NV12, MFVideoFormat_NV12,
+ PIXEL_FORMAT_NV12};
+ return true;
+ }
static const MediaFormatConfiguration kMediaFormatConfigurationMap[] = {
// IMFCaptureEngine inevitably performs the video frame decoding itself.
// This means that the sink must always be set to an uncompressed video
@@ -313,6 +322,7 @@ bool GetMediaFormatConfigurationFromMFSourceMediaSubtype(
// sink and source are the same and means that there should be no transcoding
// done by IMFCaptureEngine.
HRESULT GetMFSinkMediaSubtype(IMFMediaType* source_media_type,
+ bool use_hardware_format,
GUID* mf_sink_media_subtype,
bool* passthrough) {
GUID source_subtype;
@@ -321,7 +331,7 @@ HRESULT GetMFSinkMediaSubtype(IMFMediaType* source_media_type,
return hr;
MediaFormatConfiguration media_format_configuration;
if (!GetMediaFormatConfigurationFromMFSourceMediaSubtype(
- source_subtype, &media_format_configuration))
+ source_subtype, use_hardware_format, &media_format_configuration))
return E_FAIL;
*mf_sink_media_subtype = media_format_configuration.mf_sink_media_subtype;
*passthrough =
@@ -338,8 +348,8 @@ HRESULT ConvertToPhotoSinkMediaType(IMFMediaType* source_media_type,
bool passthrough = false;
GUID mf_sink_media_subtype;
- hr = GetMFSinkMediaSubtype(source_media_type, &mf_sink_media_subtype,
- &passthrough);
+ hr = GetMFSinkMediaSubtype(source_media_type, /*use_hardware_format=*/false,
+ &mf_sink_media_subtype, &passthrough);
if (FAILED(hr))
return hr;
@@ -352,6 +362,7 @@ HRESULT ConvertToPhotoSinkMediaType(IMFMediaType* source_media_type,
}
HRESULT ConvertToVideoSinkMediaType(IMFMediaType* source_media_type,
+ bool use_hardware_format,
IMFMediaType* sink_media_type) {
HRESULT hr = sink_media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (FAILED(hr))
@@ -359,8 +370,8 @@ HRESULT ConvertToVideoSinkMediaType(IMFMediaType* source_media_type,
bool passthrough = false;
GUID mf_sink_media_subtype;
- hr = GetMFSinkMediaSubtype(source_media_type, &mf_sink_media_subtype,
- &passthrough);
+ hr = GetMFSinkMediaSubtype(source_media_type, use_hardware_format,
+ &mf_sink_media_subtype, &passthrough);
if (FAILED(hr))
return hr;
@@ -559,10 +570,12 @@ class MFVideoCallback final
// static
bool VideoCaptureDeviceMFWin::GetPixelFormatFromMFSourceMediaSubtype(
const GUID& mf_source_media_subtype,
+ bool use_hardware_format,
VideoPixelFormat* pixel_format) {
MediaFormatConfiguration media_format_configuration;
if (!GetMediaFormatConfigurationFromMFSourceMediaSubtype(
- mf_source_media_subtype, &media_format_configuration))
+ mf_source_media_subtype, use_hardware_format,
+ &media_format_configuration))
return false;
*pixel_format = media_format_configuration.pixel_format;
@@ -702,7 +715,11 @@ HRESULT VideoCaptureDeviceMFWin::FillCapabilities(
while (SUCCEEDED(hr = GetAvailableDeviceMediaType(
source, stream_index, media_type_index, &type))) {
VideoCaptureFormat format;
- if (GetFormatFromSourceMediaType(type.Get(), photo, &format))
+ if (GetFormatFromSourceMediaType(
+ type.Get(), photo,
+ /*use_hardware_format=*/!photo &&
+ static_cast<bool>(dxgi_device_manager_),
+ &format))
capabilities->emplace_back(media_type_index, format, stream_index);
type.Reset();
++media_type_index;
@@ -929,8 +946,10 @@ void VideoCaptureDeviceMFWin::AllocateAndStart(
return;
}
- hr = ConvertToVideoSinkMediaType(source_video_media_type.Get(),
- sink_video_media_type.Get());
+ hr = ConvertToVideoSinkMediaType(
+ source_video_media_type.Get(),
+ /*use_hardware_format=*/static_cast<bool>(dxgi_device_manager_),
+ sink_video_media_type.Get());
if (FAILED(hr)) {
OnError(
VideoCaptureError::kWinMediaFoundationConvertToVideoSinkMediaTypeFailed,
@@ -1039,7 +1058,8 @@ void VideoCaptureDeviceMFWin::TakePhoto(TakePhotoCallback callback) {
}
VideoCaptureFormat format;
- hr = GetFormatFromSourceMediaType(sink_media_type.Get(), true, &format)
+ hr = GetFormatFromSourceMediaType(sink_media_type.Get(), true,
+ /*use_hardware_format=*/false, &format)
? S_OK
: E_FAIL;
if (FAILED(hr)) {
@@ -1362,12 +1382,17 @@ void VideoCaptureDeviceMFWin::OnIncomingCapturedData(
client_->OnStarted();
}
+ // We always calculate camera rotation for the first frame. We also cache
+ // the latest value to use when AutoRotation is turned off.
+ if (!camera_rotation_.has_value() || IsAutoRotationEnabled())
+ camera_rotation_ = GetCameraRotation(facing_mode_);
+
// TODO(julien.isorce): retrieve the color space information using Media
// Foundation api, MFGetAttributeSize/MF_MT_VIDEO_PRIMARIES,in order to
// build a gfx::ColorSpace. See http://crbug.com/959988.
client_->OnIncomingCapturedData(
data, length, selected_video_capability_->supported_format,
- gfx::ColorSpace(), GetCameraRotation(facing_mode_), false /* flip_y */,
+ gfx::ColorSpace(), camera_rotation_.value(), false /* flip_y */,
reference_time, timestamp);
}
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win.h b/chromium/media/capture/video/win/video_capture_device_mf_win.h
index 609220b3dad..116b6d964b2 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win.h
@@ -20,6 +20,7 @@
#include "base/callback_forward.h"
#include "base/macros.h"
+#include "base/optional.h"
#include "base/sequence_checker.h"
#include "media/capture/capture_export.h"
#include "media/capture/video/video_capture_device.h"
@@ -40,6 +41,7 @@ class MFVideoCallback;
class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
public:
static bool GetPixelFormatFromMFSourceMediaSubtype(const GUID& guid,
+ bool use_hardware_format,
VideoPixelFormat* format);
static VideoCaptureControlSupport GetControlSupport(
Microsoft::WRL::ComPtr<IMFMediaSource> source);
@@ -103,6 +105,8 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
dxgi_device_manager_ = std::move(dxgi_device_manager);
}
+ base::Optional<int> camera_rotation() const { return camera_rotation_; }
+
private:
HRESULT ExecuteHresultCallbackWithRetries(
base::RepeatingCallback<HRESULT()> callback,
@@ -159,6 +163,7 @@ class CAPTURE_EXPORT VideoCaptureDeviceMFWin : public VideoCaptureDevice {
base::WaitableEvent capture_initialize_;
base::WaitableEvent capture_error_;
scoped_refptr<VideoCaptureDXGIDeviceManager> dxgi_device_manager_;
+ base::Optional<int> camera_rotation_;
SEQUENCE_CHECKER(sequence_checker_);
diff --git a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
index a360201a3a8..d38980b0924 100644
--- a/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
+++ b/chromium/media/capture/video/win/video_capture_device_mf_win_unittest.cc
@@ -11,9 +11,10 @@
#include <cmath>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/test/task_environment.h"
#include "base/win/windows_version.h"
+#include "media/base/win/mf_helpers.h"
#include "media/capture/video/win/sink_filter_win.h"
#include "media/capture/video/win/video_capture_device_factory_win.h"
#include "media/capture/video/win/video_capture_device_mf_win.h"
@@ -44,6 +45,8 @@ constexpr long kVideoProcAmpMinBase = -50;
constexpr long kVideoProcAmpMaxBase = 50;
constexpr long kVideoProcAmpStep = 1;
+constexpr uint32_t kMFSampleBufferLength = 1;
+
class MockClient : public VideoCaptureDevice::Client {
public:
void OnIncomingCapturedData(const uint8_t* data,
@@ -65,7 +68,6 @@ class MockClient : public VideoCaptureDevice::Client {
void OnIncomingCapturedExternalBuffer(
gfx::GpuMemoryBufferHandle handle,
- std::unique_ptr<Buffer::ScopedAccessPermission> read_access_permission,
const VideoCaptureFormat& format,
const gfx::ColorSpace& color_space,
base::TimeTicks reference_time,
@@ -268,7 +270,7 @@ class MockAMVideoProcAmp final : public MockInterface<IAMVideoProcAmp> {
~MockAMVideoProcAmp() override = default;
};
-class MockMFMediaSource : public MockInterface<IMFMediaSource> {
+class MockMFMediaSource : public MockInterface<IMFMediaSourceEx> {
public:
// IUnknown
IFACEMETHODIMP QueryInterface(REFIID riid, void** object) override {
@@ -280,6 +282,10 @@ class MockMFMediaSource : public MockInterface<IMFMediaSource> {
*object = AddReference(new MockAMVideoProcAmp);
return S_OK;
}
+ if (riid == __uuidof(IMFMediaSource)) {
+ *object = AddReference(static_cast<IMFMediaSource*>(this));
+ return S_OK;
+ }
return MockInterface::QueryInterface(riid, object);
}
// IMFMediaEventGenerator
@@ -316,6 +322,15 @@ class MockMFMediaSource : public MockInterface<IMFMediaSource> {
IFACEMETHODIMP Stop(void) override { return E_NOTIMPL; }
IFACEMETHODIMP Pause(void) override { return E_NOTIMPL; }
IFACEMETHODIMP Shutdown(void) override { return E_NOTIMPL; }
+ // IMFMediaSourceEx
+ IFACEMETHODIMP GetSourceAttributes(IMFAttributes** attributes) {
+ return E_NOTIMPL;
+ }
+ IFACEMETHODIMP GetStreamAttributes(DWORD stream_id,
+ IMFAttributes** attributes) {
+ return E_NOTIMPL;
+ }
+ IFACEMETHODIMP SetD3DManager(IUnknown* manager) { return S_OK; }
private:
~MockMFMediaSource() override = default;
@@ -1229,6 +1244,24 @@ TEST_F(VideoCaptureDeviceMFWinTest, StartPreviewOnAllocateAndStart) {
device_->StopAndDeAllocate();
}
+// Expects device's |camera_rotation_| to be populated after first OnSample().
+TEST_F(VideoCaptureDeviceMFWinTest, PopulateCameraRotationOnSample) {
+ if (ShouldSkipTest())
+ return;
+
+ PrepareMFDeviceWithOneVideoStream(MFVideoFormat_MJPG);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+
+ device_->AllocateAndStart(VideoCaptureParams(), std::move(client_));
+ // Create a valid IMFSample to use with the callback.
+ Microsoft::WRL::ComPtr<IMFSample> test_sample =
+ CreateEmptySampleWithBuffer(kMFSampleBufferLength, 0);
+ capture_preview_sink_->sample_callback->OnSample(test_sample.Get());
+ EXPECT_TRUE(device_->camera_rotation().has_value());
+}
+
// Expects OnError() to be called on an errored IMFMediaEvent
TEST_F(VideoCaptureDeviceMFWinTest, CallClientOnErrorMediaEvent) {
if (ShouldSkipTest())
@@ -1712,4 +1745,43 @@ TEST_F(VideoCaptureDeviceMFWinTestWithDXGI, SimpleInit) {
// All required logic for this test is in SetUp().
}
+TEST_F(VideoCaptureDeviceMFWinTestWithDXGI, EnsureNV12SinkSubtype) {
+ if (ShouldSkipTest())
+ return;
+
+ // Ensures that the stream which is added to the preview sink has a media type
+ // with a subtype of NV12
+ const GUID expected_subtype = MFVideoFormat_NV12;
+ PrepareMFDeviceWithOneVideoStream(expected_subtype);
+
+ EXPECT_CALL(*(engine_.Get()), OnStartPreview());
+ EXPECT_CALL(*client_, OnStarted());
+
+ EXPECT_CALL(*(capture_source_.get()), DoSetCurrentDeviceMediaType(0, _))
+ .WillOnce(Invoke(
+ [expected_subtype](DWORD stream_index, IMFMediaType* media_type) {
+ GUID source_video_media_subtype;
+ media_type->GetGUID(MF_MT_SUBTYPE, &source_video_media_subtype);
+ EXPECT_EQ(source_video_media_subtype, expected_subtype);
+ return S_OK;
+ }));
+
+ EXPECT_CALL(*(capture_preview_sink_.get()), DoAddStream(0, _, _, _))
+ .WillOnce(Invoke([expected_subtype](DWORD stream_index,
+ IMFMediaType* media_type,
+ IMFAttributes* attributes,
+ DWORD* sink_stream_index) {
+ GUID sink_video_media_subtype;
+ media_type->GetGUID(MF_MT_SUBTYPE, &sink_video_media_subtype);
+ EXPECT_EQ(sink_video_media_subtype, expected_subtype);
+ return S_OK;
+ }));
+
+ VideoCaptureFormat format(gfx::Size(640, 480), 30, media::PIXEL_FORMAT_NV12);
+ VideoCaptureParams video_capture_params;
+ video_capture_params.requested_format = format;
+ device_->AllocateAndStart(video_capture_params, std::move(client_));
+ capture_preview_sink_->sample_callback->OnSample(nullptr);
+}
+
} // namespace media
diff --git a/chromium/media/capture/video/win/video_capture_device_utils_win.cc b/chromium/media/capture/video/win/video_capture_device_utils_win.cc
index 0db1bd8d5e3..ee58fbc383e 100644
--- a/chromium/media/capture/video/win/video_capture_device_utils_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_utils_win.cc
@@ -82,10 +82,6 @@ double PlatformExposureTimeToCaptureStep(long log_step,
int GetCameraRotation(VideoFacingMode facing) {
int rotation = 0;
- if (!IsAutoRotationEnabled()) {
- return rotation;
- }
-
// Before Win10, we can't distinguish if the selected camera is an internal or
// external one. So we assume it's internal and do the frame rotation if the
// auto rotation is enabled to cover most user cases.
diff --git a/chromium/media/capture/video/win/video_capture_device_win.cc b/chromium/media/capture/video/win/video_capture_device_win.cc
index 53f44af0164..5f6d211a2f4 100644
--- a/chromium/media/capture/video/win/video_capture_device_win.cc
+++ b/chromium/media/capture/video/win/video_capture_device_win.cc
@@ -857,6 +857,11 @@ void VideoCaptureDeviceWin::FrameReceived(const uint8_t* buffer,
if (timestamp == kNoTimestamp)
timestamp = base::TimeTicks::Now() - first_ref_time_;
+ // We always calculate camera rotation for the first frame. We also cache the
+ // latest value to use when AutoRotation is turned off.
+ if (!camera_rotation_.has_value() || IsAutoRotationEnabled())
+ camera_rotation_ = GetCameraRotation(device_descriptor_.facing);
+
// TODO(julien.isorce): retrieve the color space information using the
// DirectShow api, AM_MEDIA_TYPE::VIDEOINFOHEADER2::dwControlFlags. If
// AMCONTROL_COLORINFO_PRESENT, then reinterpret dwControlFlags as a
@@ -864,8 +869,8 @@ void VideoCaptureDeviceWin::FrameReceived(const uint8_t* buffer,
// DXVA_VideoTransferMatrix, DXVA_VideoTransferFunction and
// DXVA_NominalRangeto build a gfx::ColorSpace. See http://crbug.com/959992.
client_->OnIncomingCapturedData(buffer, length, format, gfx::ColorSpace(),
- GetCameraRotation(device_descriptor_.facing),
- flip_y, base::TimeTicks::Now(), timestamp);
+ camera_rotation_.value(), flip_y,
+ base::TimeTicks::Now(), timestamp);
while (!take_photo_callbacks_.empty()) {
TakePhotoCallback cb = std::move(take_photo_callbacks_.front());
diff --git a/chromium/media/capture/video/win/video_capture_device_win.h b/chromium/media/capture/video/win/video_capture_device_win.h
index 903c35d31aa..00775a5f60e 100644
--- a/chromium/media/capture/video/win/video_capture_device_win.h
+++ b/chromium/media/capture/video/win/video_capture_device_win.h
@@ -20,6 +20,7 @@
#include "base/containers/queue.h"
#include "base/macros.h"
+#include "base/optional.h"
#include "base/threading/thread_checker.h"
#include "media/capture/video/video_capture_device.h"
#include "media/capture/video/win/capability_list_win.h"
@@ -155,6 +156,8 @@ class VideoCaptureDeviceWin : public VideoCaptureDevice,
bool enable_get_photo_state_;
+ base::Optional<int> camera_rotation_;
+
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoCaptureDeviceWin);
};
diff --git a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc
index 0dcce42f275..94650aabece 100644
--- a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc
+++ b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.cc
@@ -6,6 +6,7 @@
#include <d3d11.h>
#include <mfcaptureengine.h>
+#include <mfreadwrite.h>
#include "base/logging.h"
using Microsoft::WRL::ComPtr;
@@ -71,6 +72,13 @@ void VideoCaptureDXGIDeviceManager::RegisterInCaptureEngineAttributes(
DCHECK(SUCCEEDED(result));
}
+void VideoCaptureDXGIDeviceManager::RegisterInSourceReaderAttributes(
+ IMFAttributes* attributes) {
+ HRESULT result = attributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER,
+ mf_dxgi_device_manager_.Get());
+ DCHECK(SUCCEEDED(result));
+}
+
void VideoCaptureDXGIDeviceManager::RegisterWithMediaSource(
ComPtr<IMFMediaSource> media_source) {
ComPtr<IMFMediaSourceEx> source_ext;
diff --git a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h
index d52c55c2d18..d4c1bde2d44 100644
--- a/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h
+++ b/chromium/media/capture/video/win/video_capture_dxgi_device_manager.h
@@ -27,6 +27,9 @@ class CAPTURE_EXPORT VideoCaptureDXGIDeviceManager
// Registers this manager in capture engine attributes.
void RegisterInCaptureEngineAttributes(IMFAttributes* attributes);
+ // Registers this manager in source reader attributes.
+ void RegisterInSourceReaderAttributes(IMFAttributes* attributes);
+
// Registers this manager with a media source
void RegisterWithMediaSource(
Microsoft::WRL::ComPtr<IMFMediaSource> media_source);
diff --git a/chromium/media/capture/video_capture_types.h b/chromium/media/capture/video_capture_types.h
index 375b6d05911..23e1826aa12 100644
--- a/chromium/media/capture/video_capture_types.h
+++ b/chromium/media/capture/video_capture_types.h
@@ -189,7 +189,8 @@ enum class VideoCaptureError {
kFuchsiaSysmemInvalidBufferSize = 120,
kFuchsiaUnsupportedPixelFormat = 121,
kFuchsiaFailedToMapSysmemBuffer = 122,
- kMaxValue = 122
+ kCrosHalV3DeviceContextDuplicatedClient = 123,
+ kMaxValue = 123
};
// WARNING: Do not change the values assigned to the entries. They are used for
diff --git a/chromium/media/capture/video_capturer_source.cc b/chromium/media/capture/video_capturer_source.cc
index 8adf4b1dd7c..143506b8f8a 100644
--- a/chromium/media/capture/video_capturer_source.cc
+++ b/chromium/media/capture/video_capturer_source.cc
@@ -3,6 +3,7 @@
// found in the LICENSE file.
#include "media/capture/video_capturer_source.h"
+#include "base/callback_helpers.h"
namespace media {
@@ -13,4 +14,8 @@ namespace media {
// to generate symbols across linking units.
VideoCapturerSource::~VideoCapturerSource() = default;
+media::VideoCaptureFeedbackCB VideoCapturerSource::GetFeedbackCallback() const {
+ return base::DoNothing();
+}
+
} // namespace media
diff --git a/chromium/media/capture/video_capturer_source.h b/chromium/media/capture/video_capturer_source.h
index 21f872f7471..c48032be48e 100644
--- a/chromium/media/capture/video_capturer_source.h
+++ b/chromium/media/capture/video_capturer_source.h
@@ -11,6 +11,7 @@
#include "base/callback.h"
#include "base/memory/ref_counted.h"
#include "base/time/time.h"
+#include "media/base/video_frame_feedback.h"
#include "media/capture/capture_export.h"
#include "media/capture/video_capture_types.h"
@@ -69,6 +70,10 @@ class CAPTURE_EXPORT VideoCapturerSource {
const VideoCaptureDeliverFrameCB& new_frame_callback,
const RunningCallback& running_callback) = 0;
+ // Returns a callback for providing the feedback from the consumer.
+ // The callback can be called on any thread.
+ virtual media::VideoCaptureFeedbackCB GetFeedbackCallback() const;
+
// Asks source to send a refresh frame. In cases where source does not provide
// a continuous rate of new frames (e.g. canvas capture, screen capture where
// the screen's content has not changed in a while), consumers may request a
diff --git a/chromium/media/cast/BUILD.gn b/chromium/media/cast/BUILD.gn
index 2ebffd0815d..e8c3619f86b 100644
--- a/chromium/media/cast/BUILD.gn
+++ b/chromium/media/cast/BUILD.gn
@@ -3,6 +3,7 @@
# found in the LICENSE file.
import("//build/config/android/config.gni")
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//testing/test.gni")
@@ -363,7 +364,7 @@ test("cast_unittests") {
}
}
-if (is_win || is_mac || (is_linux && !is_chromeos)) {
+if (is_win || is_mac || is_linux || is_lacros) {
# This is a target for the collection of cast development tools. They are
# not built/linked into the Chromium browser.
group("testing_tools") {
@@ -495,13 +496,13 @@ if (is_win || is_mac || (is_linux && !is_chromeos)) {
"//net",
]
}
-} else { # !(is_win || is_mac || (is_linux && !is_chromeos))
+} else { # !(is_win || is_mac || is_linux || is_lacros)
# The testing tools are only built for the desktop platforms.
group("testing_tools") {
}
}
-if (is_linux && !is_chromeos) {
+if (is_linux || is_lacros) {
test("tap_proxy") {
sources = [ "test/utility/tap_proxy.cc" ]
diff --git a/chromium/media/cast/DIR_METADATA b/chromium/media/cast/DIR_METADATA
new file mode 100644
index 00000000000..9e255559a4f
--- /dev/null
+++ b/chromium/media/cast/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Internals>Cast>Streaming"
+} \ No newline at end of file
diff --git a/chromium/media/cast/OWNERS b/chromium/media/cast/OWNERS
index c0d2923cc1b..90320c9bd1b 100644
--- a/chromium/media/cast/OWNERS
+++ b/chromium/media/cast/OWNERS
@@ -1,4 +1,2 @@
miu@chromium.org
mfoltz@chromium.org
-
-# COMPONENT: Internals>Cast>Streaming
diff --git a/chromium/media/cast/cast_sender_impl.cc b/chromium/media/cast/cast_sender_impl.cc
index bf75331db7e..5882ea199c7 100644
--- a/chromium/media/cast/cast_sender_impl.cc
+++ b/chromium/media/cast/cast_sender_impl.cc
@@ -8,6 +8,7 @@
#include "base/bind.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/macros.h"
#include "media/base/video_frame.h"
@@ -18,7 +19,7 @@ namespace cast {
// The LocalVideoFrameInput class posts all incoming video frames to the main
// cast thread for processing.
-class LocalVideoFrameInput : public VideoFrameInput {
+class LocalVideoFrameInput final : public VideoFrameInput {
public:
LocalVideoFrameInput(scoped_refptr<CastEnvironment> cast_environment,
base::WeakPtr<VideoSender> video_sender)
@@ -62,7 +63,7 @@ class LocalVideoFrameInput : public VideoFrameInput {
// The LocalAudioFrameInput class posts all incoming audio frames to the main
// cast thread for processing. Therefore frames can be inserted from any thread.
-class LocalAudioFrameInput : public AudioFrameInput {
+class LocalAudioFrameInput final : public AudioFrameInput {
public:
LocalAudioFrameInput(scoped_refptr<CastEnvironment> cast_environment,
base::WeakPtr<AudioSender> audio_sender)
@@ -131,13 +132,15 @@ void CastSenderImpl::InitializeVideo(
VLOG(1) << "CastSenderImpl@" << this << "::InitializeVideo()";
+ // No feedback callback, since it's ignored for CastSender.
video_sender_ = std::make_unique<VideoSender>(
cast_environment_, video_config,
base::BindRepeating(&CastSenderImpl::OnVideoStatusChange,
weak_factory_.GetWeakPtr(), status_change_cb),
create_vea_cb, create_video_encode_mem_cb, transport_sender_,
base::BindRepeating(&CastSenderImpl::SetTargetPlayoutDelay,
- weak_factory_.GetWeakPtr()));
+ weak_factory_.GetWeakPtr()),
+ media::VideoCaptureFeedbackCB());
if (audio_sender_) {
DCHECK(audio_sender_->GetTargetPlayoutDelay() ==
video_sender_->GetTargetPlayoutDelay());
diff --git a/chromium/media/cast/cast_sender_impl.h b/chromium/media/cast/cast_sender_impl.h
index 0d36d971bb7..cb8cbe98fa2 100644
--- a/chromium/media/cast/cast_sender_impl.h
+++ b/chromium/media/cast/cast_sender_impl.h
@@ -21,7 +21,7 @@ class VideoSender;
// This class combines all required sending objects such as the audio and video
// senders, pacer, packet receiver and frame input.
-class CastSenderImpl : public CastSender {
+class CastSenderImpl final : public CastSender {
public:
CastSenderImpl(scoped_refptr<CastEnvironment> cast_environment,
CastTransport* const transport_sender);
diff --git a/chromium/media/cast/logging/encoding_event_subscriber.h b/chromium/media/cast/logging/encoding_event_subscriber.h
index 8e6f81b61d6..289db10844f 100644
--- a/chromium/media/cast/logging/encoding_event_subscriber.h
+++ b/chromium/media/cast/logging/encoding_event_subscriber.h
@@ -45,7 +45,7 @@ using PacketEventList =
// to a storage vector. This helps keep the size of the map small and
// lookup times fast. The storage itself is a circular buffer that will
// overwrite old entries once it has reached the size configured by user.
-class EncodingEventSubscriber : public RawEventSubscriber {
+class EncodingEventSubscriber final : public RawEventSubscriber {
public:
// |event_media_type|: The subscriber will only process events that
// corresponds to this type.
diff --git a/chromium/media/cast/logging/log_event_dispatcher.cc b/chromium/media/cast/logging/log_event_dispatcher.cc
index 37f5a7507d6..554685ebaf3 100644
--- a/chromium/media/cast/logging/log_event_dispatcher.cc
+++ b/chromium/media/cast/logging/log_event_dispatcher.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/synchronization/waitable_event.h"
#include "media/cast/cast_environment.h"
diff --git a/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h b/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h
index bb1dd49a972..9625ae266b1 100644
--- a/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h
+++ b/chromium/media/cast/logging/receiver_time_offset_estimator_impl.h
@@ -36,7 +36,8 @@ const size_t kClockDriftSpeed = 500;
// There is a causal relationship between these events in that these events
// must happen in order. This class obtains the lower and upper bounds for
// the offset by taking the difference of timestamps.
-class ReceiverTimeOffsetEstimatorImpl : public ReceiverTimeOffsetEstimator {
+class ReceiverTimeOffsetEstimatorImpl final
+ : public ReceiverTimeOffsetEstimator {
public:
ReceiverTimeOffsetEstimatorImpl();
diff --git a/chromium/media/cast/logging/simple_event_subscriber.h b/chromium/media/cast/logging/simple_event_subscriber.h
index d594cc28fbb..252efde7882 100644
--- a/chromium/media/cast/logging/simple_event_subscriber.h
+++ b/chromium/media/cast/logging/simple_event_subscriber.h
@@ -19,7 +19,7 @@ namespace cast {
// in std::vector's.
// The user of this class can call the GetXXXEventsAndReset functions to get
// list of events that have acccumulated since last inovcation.
-class SimpleEventSubscriber : public RawEventSubscriber {
+class SimpleEventSubscriber final : public RawEventSubscriber {
public:
SimpleEventSubscriber();
diff --git a/chromium/media/cast/logging/stats_event_subscriber.h b/chromium/media/cast/logging/stats_event_subscriber.h
index 7b9e505c6c6..8c88c920f84 100644
--- a/chromium/media/cast/logging/stats_event_subscriber.h
+++ b/chromium/media/cast/logging/stats_event_subscriber.h
@@ -30,7 +30,7 @@ class StatsEventSubscriberTest;
// A RawEventSubscriber implementation that subscribes to events,
// and aggregates them into stats.
-class StatsEventSubscriber : public RawEventSubscriber {
+class StatsEventSubscriber final : public RawEventSubscriber {
public:
StatsEventSubscriber(EventMediaType event_media_type,
const base::TickClock* clock,
diff --git a/chromium/media/cast/net/cast_transport_impl.cc b/chromium/media/cast/net/cast_transport_impl.cc
index 4013d7a7fd7..f6735eb4a79 100644
--- a/chromium/media/cast/net/cast_transport_impl.cc
+++ b/chromium/media/cast/net/cast_transport_impl.cc
@@ -10,7 +10,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/single_thread_task_runner.h"
#include "build/build_config.h"
#include "media/cast/net/cast_transport_defines.h"
@@ -271,8 +271,9 @@ void CastTransportImpl::ResendPackets(
}
PacketReceiverCallback CastTransportImpl::PacketReceiverForTesting() {
- return base::Bind(base::IgnoreResult(&CastTransportImpl::OnReceivedPacket),
- weak_factory_.GetWeakPtr());
+ return base::BindRepeating(
+ base::IgnoreResult(&CastTransportImpl::OnReceivedPacket),
+ weak_factory_.GetWeakPtr());
}
void CastTransportImpl::SendRawEvents() {
diff --git a/chromium/media/cast/net/cast_transport_impl_unittest.cc b/chromium/media/cast/net/cast_transport_impl_unittest.cc
index 137378e5e52..06c7e3eb6d2 100644
--- a/chromium/media/cast/net/cast_transport_impl_unittest.cc
+++ b/chromium/media/cast/net/cast_transport_impl_unittest.cc
@@ -11,7 +11,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/test/simple_test_tick_clock.h"
diff --git a/chromium/media/cast/net/pacing/paced_sender.h b/chromium/media/cast/net/pacing/paced_sender.h
index 8732ceff060..7dedb1c62b0 100644
--- a/chromium/media/cast/net/pacing/paced_sender.h
+++ b/chromium/media/cast/net/pacing/paced_sender.h
@@ -95,7 +95,7 @@ class PacedPacketSender {
virtual ~PacedPacketSender() {}
};
-class PacedSender : public PacedPacketSender {
+class PacedSender final : public PacedPacketSender {
public:
// |recent_packet_events| is an externally-owned vector where PacedSender will
// add PacketEvents related to sending, retransmission, and rejection. The
diff --git a/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h b/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h
index 83d31df4f22..d299698124f 100644
--- a/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h
+++ b/chromium/media/cast/net/rtcp/receiver_rtcp_event_subscriber.h
@@ -33,7 +33,7 @@ static const size_t kMaxEventsPerRTCP = 20;
// - Internally, the map is capped at a maximum size configurable by the caller.
// The subscriber only keeps the most recent events (determined by RTP
// timestamp) up to the size limit.
-class ReceiverRtcpEventSubscriber : public RawEventSubscriber {
+class ReceiverRtcpEventSubscriber final : public RawEventSubscriber {
public:
typedef std::pair<RtpTimeTicks, RtcpEvent> RtcpEventPair;
typedef std::vector<std::pair<RtpTimeTicks, RtcpEvent>> RtcpEvents;
diff --git a/chromium/media/cast/net/udp_transport_unittest.cc b/chromium/media/cast/net/udp_transport_unittest.cc
index 54c2a98b1d6..aa655636bb2 100644
--- a/chromium/media/cast/net/udp_transport_unittest.cc
+++ b/chromium/media/cast/net/udp_transport_unittest.cc
@@ -58,8 +58,8 @@ class MockPacketReceiver final : public UdpTransportReceiver {
};
void SendPacket(UdpTransportImpl* transport, Packet packet) {
- base::Closure cb;
- transport->SendPacket(new base::RefCountedData<Packet>(packet), cb);
+ transport->SendPacket(new base::RefCountedData<Packet>(packet),
+ base::OnceClosure());
}
static void UpdateCastTransportStatus(CastTransportStatus status) {
@@ -113,7 +113,6 @@ TEST_F(UdpTransportImplTest, PacketSenderSendAndReceive) {
recv_transport_->StartReceiving(
packet_receiver_on_receiver.packet_receiver());
- base::Closure cb;
SendPacket(send_transport_.get(), packet);
run_loop.Run();
std::unique_ptr<Packet> received_packet =
diff --git a/chromium/media/cast/receiver/audio_decoder.cc b/chromium/media/cast/receiver/audio_decoder.cc
index f634866fa8e..d3b760b81f2 100644
--- a/chromium/media/cast/receiver/audio_decoder.cc
+++ b/chromium/media/cast/receiver/audio_decoder.cc
@@ -9,7 +9,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
@@ -105,7 +105,7 @@ class AudioDecoder::ImplBase
DISALLOW_COPY_AND_ASSIGN(ImplBase);
};
-class AudioDecoder::OpusImpl : public AudioDecoder::ImplBase {
+class AudioDecoder::OpusImpl final : public AudioDecoder::ImplBase {
public:
OpusImpl(const scoped_refptr<CastEnvironment>& cast_environment,
int num_channels,
@@ -174,7 +174,7 @@ class AudioDecoder::OpusImpl : public AudioDecoder::ImplBase {
DISALLOW_COPY_AND_ASSIGN(OpusImpl);
};
-class AudioDecoder::Pcm16Impl : public AudioDecoder::ImplBase {
+class AudioDecoder::Pcm16Impl final : public AudioDecoder::ImplBase {
public:
Pcm16Impl(const scoped_refptr<CastEnvironment>& cast_environment,
int num_channels,
diff --git a/chromium/media/cast/receiver/audio_decoder_unittest.cc b/chromium/media/cast/receiver/audio_decoder_unittest.cc
index 6b4ed20a8ce..39268d66e5e 100644
--- a/chromium/media/cast/receiver/audio_decoder_unittest.cc
+++ b/chromium/media/cast/receiver/audio_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/stl_util.h"
#include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h"
@@ -129,11 +129,12 @@ class AudioDecoderTest : public ::testing::TestWithParam<TestScenario> {
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
- base::BindOnce(
- &AudioDecoder::DecodeFrame, base::Unretained(audio_decoder_.get()),
- std::move(encoded_frame),
- base::Bind(&AudioDecoderTest::OnDecodedFrame,
- base::Unretained(this), num_dropped_frames == 0)));
+ base::BindOnce(&AudioDecoder::DecodeFrame,
+ base::Unretained(audio_decoder_.get()),
+ std::move(encoded_frame),
+ base::BindRepeating(&AudioDecoderTest::OnDecodedFrame,
+ base::Unretained(this),
+ num_dropped_frames == 0)));
}
// Blocks the caller until all audio that has been feed in has been decoded.
diff --git a/chromium/media/cast/receiver/cast_receiver_impl.cc b/chromium/media/cast/receiver/cast_receiver_impl.cc
index 1c526ecf03d..9e279fb4c6e 100644
--- a/chromium/media/cast/receiver/cast_receiver_impl.cc
+++ b/chromium/media/cast/receiver/cast_receiver_impl.cc
@@ -9,8 +9,8 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/trace_event/trace_event.h"
@@ -145,8 +145,9 @@ void CastReceiverImpl::DecodeEncodedVideoFrame(
const base::TimeTicks playout_time = encoded_frame->reference_time;
video_decoder_->DecodeFrame(
std::move(encoded_frame),
- base::Bind(&CastReceiverImpl::EmitDecodedVideoFrame, cast_environment_,
- callback, frame_id, rtp_timestamp, playout_time));
+ base::BindRepeating(&CastReceiverImpl::EmitDecodedVideoFrame,
+ cast_environment_, callback, frame_id, rtp_timestamp,
+ playout_time));
}
// static
diff --git a/chromium/media/cast/receiver/cast_receiver_impl.h b/chromium/media/cast/receiver/cast_receiver_impl.h
index 8a0f90356e9..81b8304a802 100644
--- a/chromium/media/cast/receiver/cast_receiver_impl.h
+++ b/chromium/media/cast/receiver/cast_receiver_impl.h
@@ -26,7 +26,7 @@ class VideoDecoder;
// This is a pure owner class that groups all required receiver-related objects
// together, such as the paced packet sender, audio/video RTP frame receivers,
// and software decoders (created on-demand).
-class CastReceiverImpl : public CastReceiver {
+class CastReceiverImpl final : public CastReceiver {
public:
CastReceiverImpl(scoped_refptr<CastEnvironment> cast_environment,
const FrameReceiverConfig& audio_config,
diff --git a/chromium/media/cast/receiver/frame_receiver.h b/chromium/media/cast/receiver/frame_receiver.h
index a949f86c011..c5d2cb4f27e 100644
--- a/chromium/media/cast/receiver/frame_receiver.h
+++ b/chromium/media/cast/receiver/frame_receiver.h
@@ -57,7 +57,7 @@ using ReceiveEncodedFrameCallback =
//
// This class is not thread safe. Should only be called from the Main cast
// thread.
-class FrameReceiver : public RtpPayloadFeedback {
+class FrameReceiver final : public RtpPayloadFeedback {
public:
FrameReceiver(const scoped_refptr<CastEnvironment>& cast_environment,
const FrameReceiverConfig& config,
diff --git a/chromium/media/cast/receiver/video_decoder.cc b/chromium/media/cast/receiver/video_decoder.cc
index 1a3557711e0..ecf20f40a04 100644
--- a/chromium/media/cast/receiver/video_decoder.cc
+++ b/chromium/media/cast/receiver/video_decoder.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/json/json_reader.h"
#include "base/location.h"
#include "base/logging.h"
@@ -104,7 +104,7 @@ class VideoDecoder::ImplBase
DISALLOW_COPY_AND_ASSIGN(ImplBase);
};
-class VideoDecoder::Vp8Impl : public VideoDecoder::ImplBase {
+class VideoDecoder::Vp8Impl final : public VideoDecoder::ImplBase {
public:
explicit Vp8Impl(const scoped_refptr<CastEnvironment>& cast_environment)
: ImplBase(cast_environment, CODEC_VIDEO_VP8) {
@@ -180,7 +180,7 @@ class VideoDecoder::Vp8Impl : public VideoDecoder::ImplBase {
#ifndef OFFICIAL_BUILD
// A fake video decoder that always output 2x2 black frames.
-class VideoDecoder::FakeImpl : public VideoDecoder::ImplBase {
+class VideoDecoder::FakeImpl final : public VideoDecoder::ImplBase {
public:
explicit FakeImpl(const scoped_refptr<CastEnvironment>& cast_environment)
: ImplBase(cast_environment, CODEC_VIDEO_FAKE),
diff --git a/chromium/media/cast/receiver/video_decoder_unittest.cc b/chromium/media/cast/receiver/video_decoder_unittest.cc
index 1d5bd9fca4f..22579562d32 100644
--- a/chromium/media/cast/receiver/video_decoder_unittest.cc
+++ b/chromium/media/cast/receiver/video_decoder_unittest.cc
@@ -9,7 +9,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/synchronization/condition_variable.h"
#include "base/synchronization/lock.h"
@@ -108,9 +108,9 @@ class VideoDecoderTest : public ::testing::TestWithParam<Codec> {
base::BindOnce(&VideoDecoder::DecodeFrame,
base::Unretained(video_decoder_.get()),
std::move(encoded_frame),
- base::Bind(&VideoDecoderTest::OnDecodedFrame,
- base::Unretained(this), video_frame,
- num_dropped_frames == 0)));
+ base::BindRepeating(&VideoDecoderTest::OnDecodedFrame,
+ base::Unretained(this), video_frame,
+ num_dropped_frames == 0)));
}
// Blocks the caller until all video that has been feed in has been decoded.
diff --git a/chromium/media/cast/sender/audio_encoder.cc b/chromium/media/cast/sender/audio_encoder.cc
index d3e4e9c34ea..c7544c30e09 100644
--- a/chromium/media/cast/sender/audio_encoder.cc
+++ b/chromium/media/cast/sender/audio_encoder.cc
@@ -12,7 +12,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/macros.h"
@@ -232,7 +232,7 @@ class AudioEncoder::ImplBase
};
#if !defined(OS_IOS)
-class AudioEncoder::OpusImpl : public AudioEncoder::ImplBase {
+class AudioEncoder::OpusImpl final : public AudioEncoder::ImplBase {
public:
OpusImpl(const scoped_refptr<CastEnvironment>& cast_environment,
int num_channels,
@@ -705,7 +705,7 @@ class AudioEncoder::AppleAacImpl : public AudioEncoder::ImplBase {
};
#endif // defined(OS_MAC)
-class AudioEncoder::Pcm16Impl : public AudioEncoder::ImplBase {
+class AudioEncoder::Pcm16Impl final : public AudioEncoder::ImplBase {
public:
Pcm16Impl(const scoped_refptr<CastEnvironment>& cast_environment,
int num_channels,
diff --git a/chromium/media/cast/sender/audio_encoder_unittest.cc b/chromium/media/cast/sender/audio_encoder_unittest.cc
index 2c22d0dbcd9..0f0d377e58c 100644
--- a/chromium/media/cast/sender/audio_encoder_unittest.cc
+++ b/chromium/media/cast/sender/audio_encoder_unittest.cc
@@ -12,7 +12,7 @@
#include <string>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/stl_util.h"
#include "build/build_config.h"
@@ -157,14 +157,11 @@ class AudioEncoderTest : public ::testing::TestWithParam<TestScenario> {
receiver_.reset(new TestEncodedAudioFrameReceiver());
- audio_encoder_.reset(new AudioEncoder(
- cast_environment_,
- kNumChannels,
- kDefaultAudioSamplingRate,
- kDefaultAudioEncoderBitrate,
- codec,
- base::Bind(&TestEncodedAudioFrameReceiver::FrameEncoded,
- base::Unretained(receiver_.get()))));
+ audio_encoder_ = std::make_unique<AudioEncoder>(
+ cast_environment_, kNumChannels, kDefaultAudioSamplingRate,
+ kDefaultAudioEncoderBitrate, codec,
+ base::BindRepeating(&TestEncodedAudioFrameReceiver::FrameEncoded,
+ base::Unretained(receiver_.get())));
receiver_->SetSamplesPerFrame(audio_encoder_->GetSamplesPerFrame());
}
diff --git a/chromium/media/cast/sender/audio_sender.h b/chromium/media/cast/sender/audio_sender.h
index ee3b6b5207c..cdae56680f9 100644
--- a/chromium/media/cast/sender/audio_sender.h
+++ b/chromium/media/cast/sender/audio_sender.h
@@ -29,7 +29,7 @@ class AudioEncoder;
// RTCP packets.
// Additionally it posts a bunch of delayed tasks to the main thread for various
// timeouts.
-class AudioSender : public FrameSender {
+class AudioSender final : public FrameSender {
public:
AudioSender(scoped_refptr<CastEnvironment> cast_environment,
const FrameSenderConfig& audio_config,
diff --git a/chromium/media/cast/sender/audio_sender_unittest.cc b/chromium/media/cast/sender/audio_sender_unittest.cc
index 54ca3921278..c4aaad84f31 100644
--- a/chromium/media/cast/sender/audio_sender_unittest.cc
+++ b/chromium/media/cast/sender/audio_sender_unittest.cc
@@ -10,7 +10,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
diff --git a/chromium/media/cast/sender/congestion_control.cc b/chromium/media/cast/sender/congestion_control.cc
index 6b2094b0211..29ff4935339 100644
--- a/chromium/media/cast/sender/congestion_control.cc
+++ b/chromium/media/cast/sender/congestion_control.cc
@@ -28,7 +28,7 @@
namespace media {
namespace cast {
-class AdaptiveCongestionControl : public CongestionControl {
+class AdaptiveCongestionControl final : public CongestionControl {
public:
AdaptiveCongestionControl(const base::TickClock* clock,
int max_bitrate_configured,
@@ -104,7 +104,7 @@ class AdaptiveCongestionControl : public CongestionControl {
DISALLOW_COPY_AND_ASSIGN(AdaptiveCongestionControl);
};
-class FixedCongestionControl : public CongestionControl {
+class FixedCongestionControl final : public CongestionControl {
public:
explicit FixedCongestionControl(int bitrate) : bitrate_(bitrate) {}
~FixedCongestionControl() final = default;
diff --git a/chromium/media/cast/sender/external_video_encoder.cc b/chromium/media/cast/sender/external_video_encoder.cc
index 23e02f0e302..a85cbdf429c 100644
--- a/chromium/media/cast/sender/external_video_encoder.cc
+++ b/chromium/media/cast/sender/external_video_encoder.cc
@@ -84,7 +84,7 @@ struct InProgressExternalVideoFrameEncode {
// to encode media::VideoFrames and emit media::cast::EncodedFrames. All
// methods must be called on the thread associated with the given
// SingleThreadTaskRunner, except for the task_runner() accessor.
-class ExternalVideoEncoder::VEAClientImpl
+class ExternalVideoEncoder::VEAClientImpl final
: public VideoEncodeAccelerator::Client,
public base::RefCountedThreadSafe<VEAClientImpl> {
public:
@@ -219,7 +219,7 @@ class ExternalVideoEncoder::VEAClientImpl
}
frame->BackWithSharedMemory(&input_buffer->first);
- frame->AddDestructionObserver(media::BindToCurrentLoop(base::Bind(
+ frame->AddDestructionObserver(media::BindToCurrentLoop(base::BindOnce(
&ExternalVideoEncoder::VEAClientImpl::ReturnInputBufferToPool, this,
index)));
free_input_buffer_index_.pop_back();
@@ -754,7 +754,7 @@ void ExternalVideoEncoder::OnCreateVideoEncodeAccelerator(
// Create a callback that wraps the StatusChangeCallback. It monitors when a
// fatal error occurs and schedules destruction of the VEAClientImpl.
- StatusChangeCallback wrapped_status_change_cb = base::Bind(
+ StatusChangeCallback wrapped_status_change_cb = base::BindRepeating(
[](base::WeakPtr<ExternalVideoEncoder> self,
const StatusChangeCallback& status_change_cb,
OperationalStatus status) {
diff --git a/chromium/media/cast/sender/external_video_encoder.h b/chromium/media/cast/sender/external_video_encoder.h
index a65706cf219..fbbfdb3c7d5 100644
--- a/chromium/media/cast/sender/external_video_encoder.h
+++ b/chromium/media/cast/sender/external_video_encoder.h
@@ -24,7 +24,7 @@ namespace cast {
// Cast MAIN thread proxy to the internal media::VideoEncodeAccelerator
// implementation running on a separate thread. Encodes media::VideoFrames and
// emits media::cast::EncodedFrames.
-class ExternalVideoEncoder : public VideoEncoder {
+class ExternalVideoEncoder final : public VideoEncoder {
public:
// Returns true if the current platform and system configuration supports
// using ExternalVideoEncoder with the given |video_config|.
@@ -85,7 +85,8 @@ class ExternalVideoEncoder : public VideoEncoder {
// An implementation of SizeAdaptableVideoEncoderBase to proxy for
// ExternalVideoEncoder instances.
-class SizeAdaptableExternalVideoEncoder : public SizeAdaptableVideoEncoderBase {
+class SizeAdaptableExternalVideoEncoder final
+ : public SizeAdaptableVideoEncoderBase {
public:
SizeAdaptableExternalVideoEncoder(
const scoped_refptr<CastEnvironment>& cast_environment,
diff --git a/chromium/media/cast/sender/fake_software_video_encoder.h b/chromium/media/cast/sender/fake_software_video_encoder.h
index 09106a2406b..aafc07938eb 100644
--- a/chromium/media/cast/sender/fake_software_video_encoder.h
+++ b/chromium/media/cast/sender/fake_software_video_encoder.h
@@ -14,7 +14,7 @@
namespace media {
namespace cast {
-class FakeSoftwareVideoEncoder : public SoftwareVideoEncoder {
+class FakeSoftwareVideoEncoder final : public SoftwareVideoEncoder {
public:
FakeSoftwareVideoEncoder(const FrameSenderConfig& video_config);
~FakeSoftwareVideoEncoder() final;
diff --git a/chromium/media/cast/sender/h264_vt_encoder.cc b/chromium/media/cast/sender/h264_vt_encoder.cc
index ff4852a20d0..844c30ef675 100644
--- a/chromium/media/cast/sender/h264_vt_encoder.cc
+++ b/chromium/media/cast/sender/h264_vt_encoder.cc
@@ -11,7 +11,7 @@
#include "base/big_endian.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/power_monitor/power_monitor.h"
@@ -73,8 +73,8 @@ class H264VideoToolboxEncoder::VideoFrameFactoryImpl
DVLOG(1) << "MaybeCreateFrame: Detected frame size change.";
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
- base::Bind(&H264VideoToolboxEncoder::UpdateFrameSize, encoder_,
- frame_size));
+ base::BindOnce(&H264VideoToolboxEncoder::UpdateFrameSize, encoder_,
+ frame_size));
pool_frame_size_ = frame_size;
pool_.reset();
return nullptr;
@@ -170,7 +170,7 @@ H264VideoToolboxEncoder::H264VideoToolboxEncoder(
: STATUS_UNSUPPORTED_CODEC;
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
- base::Bind(status_change_cb_, operational_status));
+ base::BindOnce(status_change_cb_, operational_status));
if (operational_status == STATUS_INITIALIZED) {
// Create the shared video frame factory. It persists for the combined
@@ -204,7 +204,7 @@ void H264VideoToolboxEncoder::ResetCompressionSession() {
// Notify that we're resetting the encoder.
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
- base::Bind(status_change_cb_, STATUS_CODEC_REINIT_PENDING));
+ base::BindOnce(status_change_cb_, STATUS_CODEC_REINIT_PENDING));
// Destroy the current session, if any.
DestroyCompressionSession();
@@ -263,7 +263,7 @@ void H264VideoToolboxEncoder::ResetCompressionSession() {
// Notify that reinitialization has failed.
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
- base::Bind(status_change_cb_, STATUS_CODEC_INIT_FAILED));
+ base::BindOnce(status_change_cb_, STATUS_CODEC_INIT_FAILED));
return;
}
@@ -280,7 +280,7 @@ void H264VideoToolboxEncoder::ResetCompressionSession() {
// Notify that reinitialization is done.
cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
- base::Bind(status_change_cb_, STATUS_INITIALIZED));
+ base::BindOnce(status_change_cb_, STATUS_INITIALIZED));
}
void H264VideoToolboxEncoder::ConfigureCompressionSession() {
@@ -502,7 +502,7 @@ void H264VideoToolboxEncoder::CompressionCallback(void* encoder_opaque,
DLOG(ERROR) << " encoding failed: " << status;
encoder->cast_environment_->PostTask(
CastEnvironment::MAIN, FROM_HERE,
- base::Bind(encoder->status_change_cb_, STATUS_CODEC_RUNTIME_ERROR));
+ base::BindOnce(encoder->status_change_cb_, STATUS_CODEC_RUNTIME_ERROR));
} else if ((info & kVTEncodeInfo_FrameDropped)) {
DVLOG(2) << " frame dropped";
} else {
diff --git a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
index e91ef8f326d..c777a17cb1e 100644
--- a/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
+++ b/chromium/media/cast/sender/h264_vt_encoder_unittest.cc
@@ -5,7 +5,7 @@
#include <stdint.h>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/containers/queue.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -130,9 +130,9 @@ class EndToEndFrameChecker
bool decoder_init_result;
decoder_.Initialize(
config, false, nullptr,
- base::Bind(&SaveDecoderInitResult, &decoder_init_result),
- base::Bind(&EndToEndFrameChecker::CompareFrameWithExpected,
- base::Unretained(this)),
+ base::BindOnce(&SaveDecoderInitResult, &decoder_init_result),
+ base::BindRepeating(&EndToEndFrameChecker::CompareFrameWithExpected,
+ base::Unretained(this)),
base::NullCallback());
base::RunLoop().RunUntilIdle();
EXPECT_TRUE(decoder_init_result);
@@ -145,8 +145,8 @@ class EndToEndFrameChecker
void EncodeDone(std::unique_ptr<SenderEncodedFrame> encoded_frame) {
auto buffer = DecoderBuffer::CopyFrom(encoded_frame->bytes(),
encoded_frame->data.size());
- decoder_.Decode(buffer, base::Bind(&EndToEndFrameChecker::DecodeDone,
- base::Unretained(this)));
+ decoder_.Decode(buffer, base::BindOnce(&EndToEndFrameChecker::DecodeDone,
+ base::Unretained(this)));
}
void CompareFrameWithExpected(scoped_refptr<VideoFrame> frame) {
@@ -220,7 +220,7 @@ class H264VideoToolboxEncoderTest : public ::testing::Test {
task_environment_.GetMainThreadTaskRunner());
encoder_ = std::make_unique<H264VideoToolboxEncoder>(
cast_environment_, video_sender_config_,
- base::Bind(&SaveOperationalStatus, &operational_status_));
+ base::BindRepeating(&SaveOperationalStatus, &operational_status_));
base::RunLoop().RunUntilIdle();
EXPECT_EQ(STATUS_INITIALIZED, operational_status_);
}
diff --git a/chromium/media/cast/sender/sender_encoded_frame.h b/chromium/media/cast/sender/sender_encoded_frame.h
index cd0357aac0f..be31b153c69 100644
--- a/chromium/media/cast/sender/sender_encoded_frame.h
+++ b/chromium/media/cast/sender/sender_encoded_frame.h
@@ -13,7 +13,7 @@ namespace cast {
// Extends EncodedFrame with additional fields used within the sender-side of
// the library.
-struct SenderEncodedFrame : public EncodedFrame {
+struct SenderEncodedFrame final : public EncodedFrame {
SenderEncodedFrame();
~SenderEncodedFrame() final;
diff --git a/chromium/media/cast/sender/video_encoder_impl.cc b/chromium/media/cast/sender/video_encoder_impl.cc
index 4fc74e74b5d..17005ba3138 100644
--- a/chromium/media/cast/sender/video_encoder_impl.cc
+++ b/chromium/media/cast/sender/video_encoder_impl.cc
@@ -7,8 +7,8 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/check.h"
#include "media/base/video_frame.h"
#include "media/cast/sender/fake_software_video_encoder.h"
diff --git a/chromium/media/cast/sender/video_encoder_impl.h b/chromium/media/cast/sender/video_encoder_impl.h
index f8160ed2941..ae39c398b45 100644
--- a/chromium/media/cast/sender/video_encoder_impl.h
+++ b/chromium/media/cast/sender/video_encoder_impl.h
@@ -20,7 +20,7 @@ namespace cast {
// This object is called external from the main cast thread and internally from
// the video encoder thread.
-class VideoEncoderImpl : public VideoEncoder {
+class VideoEncoderImpl final : public VideoEncoder {
public:
struct CodecDynamicConfig {
bool key_frame_requested;
diff --git a/chromium/media/cast/sender/video_encoder_unittest.cc b/chromium/media/cast/sender/video_encoder_unittest.cc
index 32360e36c8a..290da56b605 100644
--- a/chromium/media/cast/sender/video_encoder_unittest.cc
+++ b/chromium/media/cast/sender/video_encoder_unittest.cc
@@ -72,8 +72,8 @@ class VideoEncoderTest
video_config_.video_codec_params.max_number_of_video_buffers_used = 1;
video_encoder_ = VideoEncoder::Create(
cast_environment_, video_config_,
- base::Bind(&VideoEncoderTest::OnOperationalStatusChange,
- base::Unretained(this)),
+ base::BindRepeating(&VideoEncoderTest::OnOperationalStatusChange,
+ base::Unretained(this)),
base::Bind(
&FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator,
base::Unretained(vea_factory_.get())),
diff --git a/chromium/media/cast/sender/video_sender.cc b/chromium/media/cast/sender/video_sender.cc
index 3674c21b149..a2960f8aaee 100644
--- a/chromium/media/cast/sender/video_sender.cc
+++ b/chromium/media/cast/sender/video_sender.cc
@@ -93,7 +93,8 @@ VideoSender::VideoSender(
const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb,
CastTransport* const transport_sender,
- PlayoutDelayChangeCB playout_delay_change_cb)
+ PlayoutDelayChangeCB playout_delay_change_cb,
+ media::VideoCaptureFeedbackCB feedback_callback)
: FrameSender(
cast_environment,
transport_sender,
@@ -108,6 +109,7 @@ VideoSender::VideoSender(
frames_in_encoder_(0),
last_bitrate_(0),
playout_delay_change_cb_(std::move(playout_delay_change_cb)),
+ feedback_cb_(feedback_callback),
low_latency_mode_(false),
last_reported_encoder_utilization_(-1.0),
last_reported_lossy_utilization_(-1.0) {
@@ -326,10 +328,13 @@ void VideoSender::OnEncodedVideoFrame(
// Key frames are artificially capped to 1.0 because their actual
// utilization is atypical compared to the other frames in the stream, and
// this can misguide the producer of the input video frames.
- video_frame->feedback()->resource_utilization =
+ VideoFrameFeedback feedback;
+ feedback.resource_utilization =
encoded_frame->dependency == EncodedFrame::KEY
? std::min(1.0, attenuated_utilization)
: attenuated_utilization;
+ if (feedback_cb_)
+ feedback_cb_.Run(feedback);
}
SendEncodedFrame(encoder_bitrate, std::move(encoded_frame));
diff --git a/chromium/media/cast/sender/video_sender.h b/chromium/media/cast/sender/video_sender.h
index aa1f9b1eff3..7bcb6358428 100644
--- a/chromium/media/cast/sender/video_sender.h
+++ b/chromium/media/cast/sender/video_sender.h
@@ -13,6 +13,7 @@
#include "base/memory/weak_ptr.h"
#include "base/time/tick_clock.h"
#include "base/time/time.h"
+#include "media/base/video_frame_feedback.h"
#include "media/cast/cast_config.h"
#include "media/cast/cast_sender.h"
#include "media/cast/common/rtp_time.h"
@@ -45,7 +46,8 @@ class VideoSender : public FrameSender {
const CreateVideoEncodeAcceleratorCallback& create_vea_cb,
const CreateVideoEncodeMemoryCallback& create_video_encode_mem_cb,
CastTransport* const transport_sender,
- PlayoutDelayChangeCB playout_delay_change_cb);
+ PlayoutDelayChangeCB playout_delay_change_cb,
+ media::VideoCaptureFeedbackCB feedback_callback);
~VideoSender() override;
@@ -93,6 +95,8 @@ class VideoSender : public FrameSender {
PlayoutDelayChangeCB playout_delay_change_cb_;
+ media::VideoCaptureFeedbackCB feedback_cb_;
+
// Indicates we are operating in a mode where the target playout latency is
// low for best user experience. When operating in low latency mode, we
// prefer dropping frames over increasing target playout time.
diff --git a/chromium/media/cast/sender/video_sender_unittest.cc b/chromium/media/cast/sender/video_sender_unittest.cc
index 78fc6c4b8b9..f4b4b829173 100644
--- a/chromium/media/cast/sender/video_sender_unittest.cc
+++ b/chromium/media/cast/sender/video_sender_unittest.cc
@@ -120,9 +120,21 @@ class PeerVideoSender : public VideoSender {
create_vea_cb,
create_video_encode_mem_cb,
transport_sender,
- base::Bind(&IgnorePlayoutDelayChanges)) {}
+ base::BindRepeating(&IgnorePlayoutDelayChanges),
+ base::BindRepeating(&PeerVideoSender::ProcessFeedback,
+ base::Unretained(this))) {}
+
using VideoSender::OnReceivedCastFeedback;
using VideoSender::OnReceivedPli;
+
+ void ProcessFeedback(const media::VideoFrameFeedback& feedback) {
+ feedback_ = feedback;
+ }
+
+ VideoFrameFeedback GetFeedback() { return feedback_; }
+
+ private:
+ VideoFrameFeedback feedback_;
};
class TransportClient : public CastTransport::Client {
@@ -180,7 +192,7 @@ class VideoSenderTest : public ::testing::Test {
vea_factory_.SetInitializationWillSucceed(expect_init_success);
video_sender_.reset(new PeerVideoSender(
cast_environment_, video_config,
- base::Bind(&SaveOperationalStatus, &operational_status_),
+ base::BindRepeating(&SaveOperationalStatus, &operational_status_),
base::Bind(
&FakeVideoEncodeAcceleratorFactory::CreateVideoEncodeAccelerator,
base::Unretained(&vea_factory_)),
@@ -190,7 +202,7 @@ class VideoSenderTest : public ::testing::Test {
} else {
video_sender_.reset(new PeerVideoSender(
cast_environment_, video_config,
- base::Bind(&SaveOperationalStatus, &operational_status_),
+ base::BindRepeating(&SaveOperationalStatus, &operational_status_),
CreateDefaultVideoEncodeAcceleratorCallback(),
CreateDefaultVideoEncodeMemoryCallback(), transport_sender_.get()));
}
@@ -567,13 +579,12 @@ TEST_F(VideoSenderTest, CheckVideoFrameFactoryIsNull) {
EXPECT_EQ(nullptr, video_sender_->CreateVideoFrameFactory().get());
}
-TEST_F(VideoSenderTest, PopulatesResourceUtilizationInFrameFeedback) {
+TEST_F(VideoSenderTest, ReportsResourceUtilizationInCallback) {
InitEncoder(false, true);
ASSERT_EQ(STATUS_INITIALIZED, operational_status_);
for (int i = 0; i < 3; ++i) {
scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame();
- EXPECT_LE(video_frame->feedback()->resource_utilization, 0.0);
const base::TimeTicks reference_time = testing_clock_.NowTicks();
video_sender_->InsertRawVideoFrame(video_frame, reference_time);
@@ -586,7 +597,7 @@ TEST_F(VideoSenderTest, PopulatesResourceUtilizationInFrameFeedback) {
// Check that the resource_utilization value is set and non-negative. Don't
// check for specific values because they are dependent on real-world CPU
// encode time, which can vary across test runs.
- double utilization = video_frame->feedback()->resource_utilization;
+ double utilization = video_sender_->GetFeedback().resource_utilization;
EXPECT_LE(0.0, utilization);
if (i == 0)
EXPECT_GE(1.0, utilization); // Key frames never exceed 1.0.
diff --git a/chromium/media/cast/sender/vp8_encoder.h b/chromium/media/cast/sender/vp8_encoder.h
index 79468ca3485..a6080c41eec 100644
--- a/chromium/media/cast/sender/vp8_encoder.h
+++ b/chromium/media/cast/sender/vp8_encoder.h
@@ -24,7 +24,7 @@ class VideoFrame;
namespace media {
namespace cast {
-class Vp8Encoder : public SoftwareVideoEncoder {
+class Vp8Encoder final : public SoftwareVideoEncoder {
public:
explicit Vp8Encoder(const FrameSenderConfig& video_config);
diff --git a/chromium/media/cdm/BUILD.gn b/chromium/media/cdm/BUILD.gn
index 1a76e0ee452..fd2f3718983 100644
--- a/chromium/media/cdm/BUILD.gn
+++ b/chromium/media/cdm/BUILD.gn
@@ -19,7 +19,6 @@ source_set("cdm") {
# TODO(crbug.com/583067): These files should not be in //media/base.
"//media/base/android",
-
"//media/filters",
"//media/fuchsia/cdm",
]
@@ -134,6 +133,7 @@ source_set("unit_tests") {
deps = [
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//crypto",
"//media:test_support",
"//testing/gmock",
diff --git a/chromium/media/cdm/cdm_adapter.h b/chromium/media/cdm/cdm_adapter.h
index 2dd11c09d3c..5a8b0e93d77 100644
--- a/chromium/media/cdm/cdm_adapter.h
+++ b/chromium/media/cdm/cdm_adapter.h
@@ -37,11 +37,11 @@ class AudioFramesImpl;
class CdmAuxiliaryHelper;
class CdmWrapper;
-class MEDIA_EXPORT CdmAdapter : public ContentDecryptionModule,
- public CdmContext,
- public Decryptor,
- public cdm::Host_10,
- public cdm::Host_11 {
+class MEDIA_EXPORT CdmAdapter final : public ContentDecryptionModule,
+ public CdmContext,
+ public Decryptor,
+ public cdm::Host_10,
+ public cdm::Host_11 {
public:
using CreateCdmFunc = void* (*)(int cdm_interface_version,
const char* key_system,
diff --git a/chromium/media/cdm/cdm_context_ref_impl.h b/chromium/media/cdm/cdm_context_ref_impl.h
index ec3143a52ff..299690cd3ba 100644
--- a/chromium/media/cdm/cdm_context_ref_impl.h
+++ b/chromium/media/cdm/cdm_context_ref_impl.h
@@ -17,7 +17,7 @@ namespace media {
class ContentDecryptionModule;
-class MEDIA_EXPORT CdmContextRefImpl : public CdmContextRef {
+class MEDIA_EXPORT CdmContextRefImpl final : public CdmContextRef {
public:
explicit CdmContextRefImpl(scoped_refptr<ContentDecryptionModule> cdm);
~CdmContextRefImpl() final;
diff --git a/chromium/media/cdm/cdm_helpers.h b/chromium/media/cdm/cdm_helpers.h
index 26a81fb40a6..f4a5a977153 100644
--- a/chromium/media/cdm/cdm_helpers.h
+++ b/chromium/media/cdm/cdm_helpers.h
@@ -19,7 +19,7 @@ namespace media {
class VideoFrame;
-class DecryptedBlockImpl : public cdm::DecryptedBlock {
+class DecryptedBlockImpl final : public cdm::DecryptedBlock {
public:
DecryptedBlockImpl();
~DecryptedBlockImpl() final;
@@ -102,7 +102,7 @@ class MEDIA_EXPORT VideoFrameImpl : public cdm::VideoFrame,
DISALLOW_COPY_AND_ASSIGN(VideoFrameImpl);
};
-class AudioFramesImpl : public cdm::AudioFrames {
+class AudioFramesImpl final : public cdm::AudioFrames {
public:
AudioFramesImpl();
~AudioFramesImpl() final;
diff --git a/chromium/media/cdm/cdm_paths_unittest.cc b/chromium/media/cdm/cdm_paths_unittest.cc
index ca554e28e18..2e0fe96ea54 100644
--- a/chromium/media/cdm/cdm_paths_unittest.cc
+++ b/chromium/media/cdm/cdm_paths_unittest.cc
@@ -8,6 +8,7 @@
#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/media_buildflags.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -16,8 +17,8 @@
// "cdm_platform_specific_path" in cdm_paths.gni.
// TODO(crbug.com/971433). Move the CDMs out of the install directory on
// ChromeOS.
-#if (defined(OS_MAC) || defined(OS_WIN) || \
- (defined(OS_LINUX) && !defined(OS_CHROMEOS)))
+#if (defined(OS_MAC) || defined(OS_WIN) || defined(OS_LINUX) || \
+ BUILDFLAG(IS_LACROS))
#define CDM_USE_PLATFORM_SPECIFIC_PATH
#endif
@@ -36,7 +37,7 @@ const char kComponentPlatform[] =
"mac";
#elif defined(OS_WIN)
"win";
-#elif defined(OS_CHROMEOS)
+#elif BUILDFLAG(IS_ASH)
"cros";
#elif defined(OS_LINUX)
"linux";
diff --git a/chromium/media/cdm/default_cdm_factory.cc b/chromium/media/cdm/default_cdm_factory.cc
index 49b7ae0f8e9..05b10ea1b82 100644
--- a/chromium/media/cdm/default_cdm_factory.cc
+++ b/chromium/media/cdm/default_cdm_factory.cc
@@ -5,7 +5,7 @@
#include "media/cdm/default_cdm_factory.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
diff --git a/chromium/media/cdm/default_cdm_factory.h b/chromium/media/cdm/default_cdm_factory.h
index 62c50f22617..4134fdfb018 100644
--- a/chromium/media/cdm/default_cdm_factory.h
+++ b/chromium/media/cdm/default_cdm_factory.h
@@ -13,7 +13,7 @@ namespace media {
struct CdmConfig;
-class MEDIA_EXPORT DefaultCdmFactory : public CdmFactory {
+class MEDIA_EXPORT DefaultCdmFactory final : public CdmFactory {
public:
DefaultCdmFactory();
~DefaultCdmFactory() final;
diff --git a/chromium/media/cdm/library_cdm/cdm_paths.gni b/chromium/media/cdm/library_cdm/cdm_paths.gni
index 833d9c8b5e6..1842847e7e7 100644
--- a/chromium/media/cdm/library_cdm/cdm_paths.gni
+++ b/chromium/media/cdm/library_cdm/cdm_paths.gni
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chromeos/ui_mode.gni")
+
# This file defines output paths for the CDM adapters and CDMs.
# Naming and folder structure below are following the recommendation for Chrome
@@ -9,7 +11,7 @@
# OS name for components is close to "target_os" but has some differences.
# Explicitly define what we use to avoid confusion.
-if (is_chromeos) {
+if (is_ash) {
component_os = "cros"
} else if (is_linux) {
component_os = "linux"
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
index fd52b40c0e4..c1fc8867088 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/cdm_video_decoder.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/containers/queue.h"
#include "base/feature_list.h"
@@ -34,10 +34,6 @@
#include "media/filters/vpx_video_decoder.h"
#endif
-#if BUILDFLAG(ENABLE_LIBAOM)
-#include "media/filters/aom_video_decoder.h"
-#endif
-
#if BUILDFLAG(ENABLE_DAV1D_DECODER)
#include "media/filters/dav1d_video_decoder.h"
#endif
@@ -165,7 +161,7 @@ void SetupGlobalEnvironmentIfNeeded() {
// |kNestableTasksAllowed| because we could be running the RunLoop in a task,
// e.g. in component builds when we share the same task runner as the host. In
// a static build, this is not necessary.
-class VideoDecoderAdapter : public CdmVideoDecoder {
+class VideoDecoderAdapter final : public CdmVideoDecoder {
public:
VideoDecoderAdapter(CdmHostProxy* cdm_host_proxy,
std::unique_ptr<VideoDecoder> video_decoder)
@@ -321,9 +317,6 @@ std::unique_ptr<CdmVideoDecoder> CreateVideoDecoder(
#if BUILDFLAG(ENABLE_DAV1D_DECODER)
if (config.codec == cdm::kCodecAv1)
video_decoder.reset(new Dav1dVideoDecoder(null_media_log.get()));
-#elif BUILDFLAG(ENABLE_LIBAOM)
- if (config.codec == cdm::kCodecAv1)
- video_decoder.reset(new AomVideoDecoder(null_media_log.get()));
#endif
}
diff --git a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
index a043005d57a..34d7927e70e 100644
--- a/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
+++ b/chromium/media/cdm/library_cdm/clear_key_cdm/ffmpeg_cdm_audio_decoder.cc
@@ -9,8 +9,8 @@
#include <algorithm>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "media/base/audio_bus.h"
#include "media/base/audio_timestamp_helper.h"
diff --git a/chromium/media/cdm/win/media_foundation_cdm_factory.cc b/chromium/media/cdm/win/media_foundation_cdm_factory.cc
index 2c95ee4dbdf..3d3a924a06a 100644
--- a/chromium/media/cdm/win/media_foundation_cdm_factory.cc
+++ b/chromium/media/cdm/win/media_foundation_cdm_factory.cc
@@ -10,7 +10,7 @@
#include <propvarutil.h>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/files/file_util.h"
#include "base/strings/utf_string_conversions.h"
#include "base/win/scoped_propvariant.h"
diff --git a/chromium/media/device_monitors/BUILD.gn b/chromium/media/device_monitors/BUILD.gn
index 676212143ac..9345776627b 100644
--- a/chromium/media/device_monitors/BUILD.gn
+++ b/chromium/media/device_monitors/BUILD.gn
@@ -4,25 +4,13 @@
import("//media/media_options.gni")
-# This file depends on the legacy global sources assignment filter. It should
-# be converted to check target platform before assigning source files to the
-# sources variable. Remove this import and set_sources_assignment_filter call
-# when the file has been converted. See https://crbug.com/1018739 for details.
-import("//build/config/deprecated_default_sources_assignment_filter.gni")
-set_sources_assignment_filter(deprecated_default_sources_assignment_filter)
-
source_set("device_monitors") {
# Do not expand the visibility here without double-checking with OWNERS, this
# is a roll-up target which is part of the //media component. Most other DEPs
# should be using //media and not directly DEP this roll-up target.
visibility = [ "//media" ]
- sources = [
- "device_monitor_mac.h",
- "device_monitor_mac.mm",
- "system_message_window_win.cc",
- "system_message_window_win.h",
- ]
+ sources = []
configs += [ "//media:subcomponent_config" ]
@@ -41,13 +29,24 @@ source_set("device_monitors") {
}
if (is_mac) {
+ sources += [
+ "device_monitor_mac.h",
+ "device_monitor_mac.mm",
+ ]
frameworks = [ "Foundation.framework" ]
}
+
+ if (is_win) {
+ sources += [
+ "system_message_window_win.cc",
+ "system_message_window_win.h",
+ ]
+ }
}
source_set("unit_tests") {
testonly = true
- sources = [ "system_message_window_win_unittest.cc" ]
+ sources = []
deps = [
"//base",
@@ -56,4 +55,8 @@ source_set("unit_tests") {
"//testing/gmock",
"//testing/gtest",
]
+
+ if (is_win) {
+ sources += [ "system_message_window_win_unittest.cc" ]
+ }
}
diff --git a/chromium/media/device_monitors/device_monitor_mac.mm b/chromium/media/device_monitors/device_monitor_mac.mm
index f8d651e1cb0..26bab09017d 100644
--- a/chromium/media/device_monitors/device_monitor_mac.mm
+++ b/chromium/media/device_monitors/device_monitor_mac.mm
@@ -8,7 +8,7 @@
#include <set>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/mac/scoped_nsobject.h"
#include "base/macros.h"
diff --git a/chromium/media/ffmpeg/ffmpeg_common.cc b/chromium/media/ffmpeg/ffmpeg_common.cc
index c17dd9f8371..336edbb492f 100644
--- a/chromium/media/ffmpeg/ffmpeg_common.cc
+++ b/chromium/media/ffmpeg/ffmpeg_common.cc
@@ -648,7 +648,7 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
if (side_data.type != AV_PKT_DATA_MASTERING_DISPLAY_METADATA)
continue;
- gl::HDRMetadata hdr_metadata{};
+ gfx::HDRMetadata hdr_metadata{};
AVMasteringDisplayMetadata* metadata =
reinterpret_cast<AVMasteringDisplayMetadata*>(side_data.data);
if (metadata->has_primaries) {
diff --git a/chromium/media/filters/BUILD.gn b/chromium/media/filters/BUILD.gn
index e192b5d546a..573ef0ee042 100644
--- a/chromium/media/filters/BUILD.gn
+++ b/chromium/media/filters/BUILD.gn
@@ -85,6 +85,7 @@ source_set("filters") {
deps = [
"//base/util/type_safety:type_safety",
+ "//build:chromeos_buildflags",
"//cc/base", # For MathUtil.
"//media:media_buildflags",
"//media/base",
@@ -140,14 +141,6 @@ source_set("filters") {
deps += [ "//third_party/libvpx" ]
}
- if (enable_libaom) {
- sources += [
- "aom_video_decoder.cc",
- "aom_video_decoder.h",
- ]
- deps += [ "//third_party/libaom" ]
- }
-
if (enable_dav1d_decoder) {
sources += [
"dav1d_video_decoder.cc",
@@ -223,6 +216,7 @@ source_set("filters") {
"fuchsia/fuchsia_video_decoder.h",
]
deps += [
+ "//components/viz/common",
"//gpu/command_buffer/client",
"//gpu/command_buffer/common",
"//gpu/ipc/common",
@@ -310,6 +304,7 @@ source_set("unit_tests") {
deps = [
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//media:test_support",
"//testing/gmock",
"//testing/gtest",
@@ -334,8 +329,10 @@ source_set("unit_tests") {
if (is_fuchsia) {
sources += [ "fuchsia/fuchsia_video_decoder_unittest.cc" ]
deps += [
+ "//components/viz/common",
"//components/viz/test:test_support",
"//gpu/command_buffer/client",
+ "//gpu/config",
"//third_party/fuchsia-sdk/sdk/fidl/fuchsia.sysmem",
"//third_party/fuchsia-sdk/sdk/pkg/sys_cpp",
]
@@ -380,10 +377,6 @@ source_set("unit_tests") {
sources += [ "vpx_video_decoder_unittest.cc" ]
}
- if (enable_libaom) {
- sources += [ "aom_video_decoder_unittest.cc" ]
- }
-
if (enable_dav1d_decoder) {
sources += [ "dav1d_video_decoder_unittest.cc" ]
}
diff --git a/chromium/media/filters/android/media_codec_audio_decoder.cc b/chromium/media/filters/android/media_codec_audio_decoder.cc
index 5372f772b23..587b78c1a30 100644
--- a/chromium/media/filters/android/media_codec_audio_decoder.cc
+++ b/chromium/media/filters/android/media_codec_audio_decoder.cc
@@ -8,7 +8,6 @@
#include "base/android/build_info.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/filters/aom_video_decoder.cc b/chromium/media/filters/aom_video_decoder.cc
deleted file mode 100644
index a9ae561587b..00000000000
--- a/chromium/media/filters/aom_video_decoder.cc
+++ /dev/null
@@ -1,314 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/filters/aom_video_decoder.h"
-
-#include "base/bind.h"
-#include "base/callback.h"
-#include "base/logging.h"
-#include "base/threading/sequenced_task_runner_handle.h"
-#include "media/base/bind_to_current_loop.h"
-#include "media/base/decoder_buffer.h"
-#include "media/base/media_log.h"
-#include "media/base/status.h"
-#include "media/base/video_util.h"
-#include "media/filters/frame_buffer_pool.h"
-#include "third_party/libyuv/include/libyuv/convert.h"
-
-// Include libaom header files.
-extern "C" {
-#include "third_party/libaom/source/libaom/aom/aom_decoder.h"
-#include "third_party/libaom/source/libaom/aom/aom_frame_buffer.h"
-#include "third_party/libaom/source/libaom/aom/aomdx.h"
-}
-
-namespace media {
-
-// Returns the number of threads.
-static int GetAomVideoDecoderThreadCount(const VideoDecoderConfig& config) {
- // For AOM decode when using the default thread count, increase the number
- // of decode threads to equal the maximum number of tiles possible for
- // higher resolution streams.
- return VideoDecoder::GetRecommendedThreadCount(config.coded_size().width() /
- 256);
-}
-
-static VideoPixelFormat AomImgFmtToVideoPixelFormat(const aom_image_t* img) {
- switch (img->fmt) {
- case AOM_IMG_FMT_I420:
- return PIXEL_FORMAT_I420;
- case AOM_IMG_FMT_I422:
- return PIXEL_FORMAT_I422;
- case AOM_IMG_FMT_I444:
- return PIXEL_FORMAT_I444;
-
- case AOM_IMG_FMT_I42016:
- switch (img->bit_depth) {
- case 10:
- return PIXEL_FORMAT_YUV420P10;
- case 12:
- return PIXEL_FORMAT_YUV420P12;
- default:
- DLOG(ERROR) << "Unsupported bit depth: " << img->bit_depth;
- return PIXEL_FORMAT_UNKNOWN;
- }
-
- case AOM_IMG_FMT_I42216:
- switch (img->bit_depth) {
- case 10:
- return PIXEL_FORMAT_YUV422P10;
- case 12:
- return PIXEL_FORMAT_YUV422P12;
- default:
- DLOG(ERROR) << "Unsupported bit depth: " << img->bit_depth;
- return PIXEL_FORMAT_UNKNOWN;
- }
-
- case AOM_IMG_FMT_I44416:
- switch (img->bit_depth) {
- case 10:
- return PIXEL_FORMAT_YUV444P10;
- case 12:
- return PIXEL_FORMAT_YUV444P12;
- default:
- DLOG(ERROR) << "Unsupported bit depth: " << img->bit_depth;
- return PIXEL_FORMAT_UNKNOWN;
- }
-
- default:
- DLOG(ERROR) << "Unsupported pixel format: " << img->fmt;
- return PIXEL_FORMAT_UNKNOWN;
- }
-}
-
-static void SetColorSpaceForFrame(const aom_image_t* img,
- const VideoDecoderConfig& config,
- VideoFrame* frame) {
- gfx::ColorSpace::RangeID range = img->range == AOM_CR_FULL_RANGE
- ? gfx::ColorSpace::RangeID::FULL
- : gfx::ColorSpace::RangeID::LIMITED;
-
- // AOM color space defines match ISO 23001-8:2016 via ISO/IEC 23091-4/ITU-T
- // H.273.
- // http://av1-spec.argondesign.com/av1-spec/av1-spec.html#color-config-semantics
- media::VideoColorSpace color_space(img->cp, img->tc, img->mc, range);
-
- // If the bitstream doesn't specify a color space, use the one from the
- // container.
- if (!color_space.IsSpecified())
- color_space = config.color_space_info();
-
- frame->set_color_space(color_space.ToGfxColorSpace());
-}
-
-static int GetFrameBuffer(void* cb_priv,
- size_t min_size,
- aom_codec_frame_buffer* fb) {
- DCHECK(cb_priv);
- DCHECK(fb);
- FrameBufferPool* pool = static_cast<FrameBufferPool*>(cb_priv);
- fb->data = pool->GetFrameBuffer(min_size, &fb->priv);
- fb->size = min_size;
- return 0;
-}
-
-static int ReleaseFrameBuffer(void* cb_priv, aom_codec_frame_buffer* fb) {
- DCHECK(cb_priv);
- DCHECK(fb);
- if (!fb->priv)
- return -1;
-
- FrameBufferPool* pool = static_cast<FrameBufferPool*>(cb_priv);
- pool->ReleaseFrameBuffer(fb->priv);
- return 0;
-}
-
-AomVideoDecoder::AomVideoDecoder(MediaLog* media_log) : media_log_(media_log) {
- DETACH_FROM_THREAD(thread_checker_);
-}
-
-AomVideoDecoder::~AomVideoDecoder() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- CloseDecoder();
-}
-
-std::string AomVideoDecoder::GetDisplayName() const {
- return "AomVideoDecoder";
-}
-
-void AomVideoDecoder::Initialize(const VideoDecoderConfig& config,
- bool /* low_delay */,
- CdmContext* /* cdm_context */,
- InitCB init_cb,
- const OutputCB& output_cb,
- const WaitingCB& /* waiting_cb */) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- DCHECK(config.IsValidConfig());
-
- InitCB bound_init_cb = BindToCurrentLoop(std::move(init_cb));
- if (config.is_encrypted() || config.codec() != kCodecAV1) {
- std::move(bound_init_cb).Run(StatusCode::kDecoderFailedInitialization);
- return;
- }
-
- // Clear any previously initialized decoder.
- CloseDecoder();
-
- aom_codec_dec_cfg_t aom_config = {0};
- aom_config.w = config.coded_size().width();
- aom_config.h = config.coded_size().height();
- aom_config.threads = GetAomVideoDecoderThreadCount(config);
-
- // Misleading name. Required to ensure libaom doesn't output 8-bit samples
- // in uint16_t containers. Without this we have to manually pack the values
- // into uint8_t samples.
- aom_config.allow_lowbitdepth = 1;
-
- // TODO(dalecurtis, tguilbert): Move decoding off the media thread to the
- // offload thread via OffloadingVideoDecoder. https://crbug.com/867613
-
- std::unique_ptr<aom_codec_ctx> context = std::make_unique<aom_codec_ctx>();
- if (aom_codec_dec_init(context.get(), aom_codec_av1_dx(), &aom_config,
- 0 /* flags */) != AOM_CODEC_OK) {
- MEDIA_LOG(ERROR, media_log_) << "aom_codec_dec_init() failed: "
- << aom_codec_error(aom_decoder_.get());
- std::move(bound_init_cb).Run(StatusCode::kDecoderFailedInitialization);
- return;
- }
-
- // Setup codec for zero copy frames.
- if (!memory_pool_)
- memory_pool_ = new FrameBufferPool();
- if (aom_codec_set_frame_buffer_functions(
- context.get(), &GetFrameBuffer, &ReleaseFrameBuffer,
- memory_pool_.get()) != AOM_CODEC_OK) {
- DLOG(ERROR) << "Failed to configure external buffers. "
- << aom_codec_error(context.get());
- std::move(bound_init_cb).Run(StatusCode::kDecoderFailedInitialization);
- return;
- }
-
- config_ = config;
- state_ = DecoderState::kNormal;
- output_cb_ = BindToCurrentLoop(output_cb);
- aom_decoder_ = std::move(context);
- std::move(bound_init_cb).Run(OkStatus());
-}
-
-void AomVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
- DecodeCB decode_cb) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- DCHECK(buffer);
- DCHECK(decode_cb);
- DCHECK_NE(state_, DecoderState::kUninitialized)
- << "Called Decode() before successful Initialize()";
-
- DecodeCB bound_decode_cb = BindToCurrentLoop(std::move(decode_cb));
-
- if (state_ == DecoderState::kError) {
- std::move(bound_decode_cb).Run(DecodeStatus::DECODE_ERROR);
- return;
- }
-
- // No need to flush since we retrieve all available frames after a packet is
- // provided.
- if (buffer->end_of_stream()) {
- DCHECK_EQ(state_, DecoderState::kNormal);
- state_ = DecoderState::kDecodeFinished;
- std::move(bound_decode_cb).Run(DecodeStatus::OK);
- return;
- }
-
- if (!DecodeBuffer(buffer.get())) {
- state_ = DecoderState::kError;
- std::move(bound_decode_cb).Run(DecodeStatus::DECODE_ERROR);
- return;
- }
-
- // VideoDecoderShim expects |decode_cb| call after |output_cb_|.
- std::move(bound_decode_cb).Run(DecodeStatus::OK);
-}
-
-void AomVideoDecoder::Reset(base::OnceClosure closure) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- state_ = DecoderState::kNormal;
- timestamps_.clear();
- base::SequencedTaskRunnerHandle::Get()->PostTask(FROM_HERE,
- std::move(closure));
-}
-
-void AomVideoDecoder::CloseDecoder() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (!aom_decoder_)
- return;
- aom_codec_destroy(aom_decoder_.get());
- aom_decoder_.reset();
-
- if (memory_pool_) {
- memory_pool_->Shutdown();
- memory_pool_ = nullptr;
- }
-}
-
-bool AomVideoDecoder::DecodeBuffer(const DecoderBuffer* buffer) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- DCHECK(!buffer->end_of_stream());
-
- timestamps_.push_back(buffer->timestamp());
- if (aom_codec_decode(aom_decoder_.get(), buffer->data(), buffer->data_size(),
- nullptr) != AOM_CODEC_OK) {
- const char* detail = aom_codec_error_detail(aom_decoder_.get());
- MEDIA_LOG(ERROR, media_log_)
- << "aom_codec_decode() failed: " << aom_codec_error(aom_decoder_.get())
- << (detail ? ", " : "") << (detail ? detail : "")
- << ", input: " << buffer->AsHumanReadableString();
- return false;
- }
-
- aom_codec_iter_t iter = nullptr;
- while (aom_image_t* img = aom_codec_get_frame(aom_decoder_.get(), &iter)) {
- auto frame = CopyImageToVideoFrame(img);
- if (!frame) {
- MEDIA_LOG(DEBUG, media_log_)
- << "Failed to produce video frame from aom_image_t.";
- return false;
- }
-
- // TODO(dalecurtis): Is this true even for low resolutions?
- frame->metadata()->power_efficient = false;
-
- // Ensure the frame memory is returned to the MemoryPool upon discard.
- frame->AddDestructionObserver(
- memory_pool_->CreateFrameCallback(img->fb_priv));
-
- SetColorSpaceForFrame(img, config_, frame.get());
- output_cb_.Run(std::move(frame));
- }
-
- return true;
-}
-
-scoped_refptr<VideoFrame> AomVideoDecoder::CopyImageToVideoFrame(
- const struct aom_image* img) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-
- VideoPixelFormat pixel_format = AomImgFmtToVideoPixelFormat(img);
- if (pixel_format == PIXEL_FORMAT_UNKNOWN)
- return nullptr;
-
- // Pull the expected timestamp from the front of the queue.
- DCHECK(!timestamps_.empty());
- const base::TimeDelta timestamp = timestamps_.front();
- timestamps_.pop_front();
-
- const gfx::Rect visible_rect(img->d_w, img->d_h);
- return VideoFrame::WrapExternalYuvData(
- pixel_format, visible_rect.size(), visible_rect,
- GetNaturalSize(visible_rect, config_.GetPixelAspectRatio()),
- img->stride[AOM_PLANE_Y], img->stride[AOM_PLANE_U],
- img->stride[AOM_PLANE_V], img->planes[AOM_PLANE_Y],
- img->planes[AOM_PLANE_U], img->planes[AOM_PLANE_V], timestamp);
-}
-
-} // namespace media
diff --git a/chromium/media/filters/aom_video_decoder.h b/chromium/media/filters/aom_video_decoder.h
deleted file mode 100644
index 42589cece6f..00000000000
--- a/chromium/media/filters/aom_video_decoder.h
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#ifndef MEDIA_FILTERS_AOM_VIDEO_DECODER_H_
-#define MEDIA_FILTERS_AOM_VIDEO_DECODER_H_
-
-#include "base/callback_forward.h"
-#include "base/containers/circular_deque.h"
-#include "base/macros.h"
-#include "base/threading/thread_checker.h"
-#include "media/base/video_decoder.h"
-#include "media/base/video_decoder_config.h"
-#include "media/base/video_frame.h"
-
-struct aom_codec_ctx;
-struct aom_image;
-
-namespace media {
-class FrameBufferPool;
-class MediaLog;
-
-// libaom video decoder wrapper.
-class MEDIA_EXPORT AomVideoDecoder : public VideoDecoder {
- public:
- explicit AomVideoDecoder(MediaLog* media_log);
- ~AomVideoDecoder() override;
-
- // VideoDecoder implementation.
- std::string GetDisplayName() const override;
- void Initialize(const VideoDecoderConfig& config,
- bool low_delay,
- CdmContext* cdm_context,
- InitCB init_cb,
- const OutputCB& output_cb,
- const WaitingCB& waiting_cb) override;
- void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
- void Reset(base::OnceClosure closure) override;
-
- private:
- enum class DecoderState {
- kUninitialized,
- kNormal,
- kFlushCodec,
- kDecodeFinished,
- kError
- };
-
- // Releases any configured decoder and clears |aom_decoder_|.
- void CloseDecoder();
-
- // Invokes the decoder and calls |output_cb_| for any returned frames.
- bool DecodeBuffer(const DecoderBuffer* buffer);
-
- // Copies the contents of |img| into a new VideoFrame; attempts to reuse
- // previously allocated memory via |frame_pool_| for performance.
- scoped_refptr<VideoFrame> CopyImageToVideoFrame(const struct aom_image* img);
-
- THREAD_CHECKER(thread_checker_);
-
- // Used to report error messages to the client.
- MediaLog* const media_log_;
-
- // Current decoder state. Used to ensure methods are called as expected.
- DecoderState state_ = DecoderState::kUninitialized;
-
- // Callback given during Initialize() used for delivering decoded frames.
- OutputCB output_cb_;
-
- // The configuration passed to Initialize(), saved since some fields are
- // needed to annotate video frames after decoding.
- VideoDecoderConfig config_;
-
- // Pool used for memory efficiency when vending frames from the decoder.
- scoped_refptr<FrameBufferPool> memory_pool_;
-
- // Timestamps are FIFO for libaom decoding.
- base::circular_deque<base::TimeDelta> timestamps_;
-
- // The allocated decoder; null before Initialize() and anytime after
- // CloseDecoder().
- std::unique_ptr<aom_codec_ctx> aom_decoder_;
-
- DISALLOW_COPY_AND_ASSIGN(AomVideoDecoder);
-};
-
-} // namespace media
-
-#endif // MEDIA_FILTERS_AOM_VIDEO_DECODER_H_
diff --git a/chromium/media/filters/aom_video_decoder_unittest.cc b/chromium/media/filters/aom_video_decoder_unittest.cc
deleted file mode 100644
index 3de1a5440c0..00000000000
--- a/chromium/media/filters/aom_video_decoder_unittest.cc
+++ /dev/null
@@ -1,288 +0,0 @@
-// Copyright 2017 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include <memory>
-#include <string>
-#include <utility>
-#include <vector>
-
-#include "base/bind.h"
-#include "base/bind_helpers.h"
-#include "base/run_loop.h"
-#include "base/test/task_environment.h"
-#include "build/build_config.h"
-#include "media/base/decoder_buffer.h"
-#include "media/base/limits.h"
-#include "media/base/mock_media_log.h"
-#include "media/base/test_data_util.h"
-#include "media/base/test_helpers.h"
-#include "media/base/video_frame.h"
-#include "media/ffmpeg/ffmpeg_common.h"
-#include "media/filters/aom_video_decoder.h"
-#include "media/filters/in_memory_url_protocol.h"
-#include "testing/gmock/include/gmock/gmock.h"
-
-using ::testing::_;
-
-namespace media {
-
-namespace {
-
-MATCHER(ContainsDecoderErrorLog, "") {
- return CONTAINS_STRING(arg, "aom_codec_decode() failed");
-}
-
-} // namespace
-
-class AomVideoDecoderTest : public testing::Test {
- public:
- AomVideoDecoderTest()
- : decoder_(new AomVideoDecoder(&media_log_)),
- i_frame_buffer_(ReadTestDataFile("av1-I-frame-320x240")) {}
-
- ~AomVideoDecoderTest() override { Destroy(); }
-
- void Initialize() {
- InitializeWithConfig(TestVideoConfig::Normal(kCodecAV1));
- }
-
- void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
- bool success) {
- decoder_->Initialize(config, false, nullptr,
- base::BindOnce(
- [](bool success, Status status) {
- EXPECT_EQ(status.is_ok(), success);
- },
- success),
- base::BindRepeating(&AomVideoDecoderTest::FrameReady,
- base::Unretained(this)),
- base::NullCallback());
- base::RunLoop().RunUntilIdle();
- }
-
- void InitializeWithConfig(const VideoDecoderConfig& config) {
- InitializeWithConfigWithResult(config, true);
- }
-
- void Reinitialize() {
- InitializeWithConfig(TestVideoConfig::Large(kCodecAV1));
- }
-
- void Reset() {
- decoder_->Reset(NewExpectedClosure());
- base::RunLoop().RunUntilIdle();
- }
-
- void Destroy() {
- decoder_.reset();
- base::RunLoop().RunUntilIdle();
- }
-
- // Sets up expectations and actions to put AomVideoDecoder in an active
- // decoding state.
- void ExpectDecodingState() {
- EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(i_frame_buffer_));
- ASSERT_EQ(1U, output_frames_.size());
- }
-
- // Sets up expectations and actions to put AomVideoDecoder in an end
- // of stream state.
- void ExpectEndOfStreamState() {
- EXPECT_EQ(DecodeStatus::OK,
- DecodeSingleFrame(DecoderBuffer::CreateEOSBuffer()));
- ASSERT_FALSE(output_frames_.empty());
- }
-
- using InputBuffers = std::vector<scoped_refptr<DecoderBuffer>>;
- using OutputFrames = std::vector<scoped_refptr<VideoFrame>>;
-
- // Decodes all buffers in |input_buffers| and push all successfully decoded
- // output frames into |output_frames|. Returns the last decode status returned
- // by the decoder.
- DecodeStatus DecodeMultipleFrames(const InputBuffers& input_buffers) {
- for (auto iter = input_buffers.begin(); iter != input_buffers.end();
- ++iter) {
- DecodeStatus status = Decode(*iter);
- switch (status) {
- case DecodeStatus::OK:
- break;
- case DecodeStatus::ABORTED:
- NOTREACHED();
- FALLTHROUGH;
- case DecodeStatus::DECODE_ERROR:
- DCHECK(output_frames_.empty());
- return status;
- }
- }
- return DecodeStatus::OK;
- }
-
- // Decodes the single compressed frame in |buffer|.
- DecodeStatus DecodeSingleFrame(scoped_refptr<DecoderBuffer> buffer) {
- InputBuffers input_buffers;
- input_buffers.push_back(std::move(buffer));
- return DecodeMultipleFrames(input_buffers);
- }
-
- // Decodes |i_frame_buffer_| and then decodes the data contained in the file
- // named |test_file_name|. This function expects both buffers to decode to
- // frames that are the same size.
- void DecodeIFrameThenTestFile(const std::string& test_file_name,
- const gfx::Size& expected_size) {
- Initialize();
- scoped_refptr<DecoderBuffer> buffer = ReadTestDataFile(test_file_name);
-
- InputBuffers input_buffers;
- input_buffers.push_back(i_frame_buffer_);
- input_buffers.push_back(buffer);
- input_buffers.push_back(DecoderBuffer::CreateEOSBuffer());
-
- DecodeStatus status = DecodeMultipleFrames(input_buffers);
-
- EXPECT_EQ(DecodeStatus::OK, status);
- ASSERT_EQ(2U, output_frames_.size());
-
- gfx::Size original_size = TestVideoConfig::NormalCodedSize();
- EXPECT_EQ(original_size.width(),
- output_frames_[0]->visible_rect().size().width());
- EXPECT_EQ(original_size.height(),
- output_frames_[0]->visible_rect().size().height());
- EXPECT_EQ(expected_size.width(),
- output_frames_[1]->visible_rect().size().width());
- EXPECT_EQ(expected_size.height(),
- output_frames_[1]->visible_rect().size().height());
- }
-
- DecodeStatus Decode(scoped_refptr<DecoderBuffer> buffer) {
- DecodeStatus status;
- EXPECT_CALL(*this, DecodeDone(_)).WillOnce(testing::SaveArg<0>(&status));
-
- decoder_->Decode(std::move(buffer),
- base::BindOnce(&AomVideoDecoderTest::DecodeDone,
- base::Unretained(this)));
- base::RunLoop().RunUntilIdle();
-
- return status;
- }
-
- void FrameReady(scoped_refptr<VideoFrame> frame) {
- DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM));
- output_frames_.push_back(std::move(frame));
- }
-
- MOCK_METHOD1(DecodeDone, void(DecodeStatus));
-
- testing::StrictMock<MockMediaLog> media_log_;
-
- base::test::SingleThreadTaskEnvironment task_environment_;
- std::unique_ptr<AomVideoDecoder> decoder_;
-
- scoped_refptr<DecoderBuffer> i_frame_buffer_;
- OutputFrames output_frames_;
-
- private:
- DISALLOW_COPY_AND_ASSIGN(AomVideoDecoderTest);
-};
-
-TEST_F(AomVideoDecoderTest, Initialize_Normal) {
- Initialize();
-}
-
-TEST_F(AomVideoDecoderTest, Reinitialize_Normal) {
- Initialize();
- Reinitialize();
-}
-
-TEST_F(AomVideoDecoderTest, Reinitialize_AfterDecodeFrame) {
- Initialize();
- ExpectDecodingState();
- Reinitialize();
-}
-
-TEST_F(AomVideoDecoderTest, Reinitialize_AfterReset) {
- Initialize();
- ExpectDecodingState();
- Reset();
- Reinitialize();
-}
-
-TEST_F(AomVideoDecoderTest, DecodeFrame_Normal) {
- Initialize();
-
- // Simulate decoding a single frame.
- EXPECT_EQ(DecodeStatus::OK, DecodeSingleFrame(i_frame_buffer_));
- ASSERT_EQ(1U, output_frames_.size());
-}
-
-// Decode |i_frame_buffer_| and then a frame with a larger width and verify
-// the output size was adjusted.
-// TODO(dalecurtis): Get an I-frame from a larger video.
-TEST_F(AomVideoDecoderTest, DISABLED_DecodeFrame_LargerWidth) {
- DecodeIFrameThenTestFile("av1-I-frame-320x240", gfx::Size(1280, 720));
-}
-
-// Decode a VP9 frame which should trigger a decoder error.
-TEST_F(AomVideoDecoderTest, DecodeFrame_Error) {
- Initialize();
- EXPECT_MEDIA_LOG(ContainsDecoderErrorLog());
- DecodeSingleFrame(ReadTestDataFile("vp9-I-frame-320x240"));
-}
-
-// Test resetting when decoder has initialized but not decoded.
-TEST_F(AomVideoDecoderTest, Reset_Initialized) {
- Initialize();
- Reset();
-}
-
-// Test resetting when decoder has decoded single frame.
-TEST_F(AomVideoDecoderTest, Reset_Decoding) {
- Initialize();
- ExpectDecodingState();
- Reset();
-}
-
-// Test resetting when decoder has hit end of stream.
-TEST_F(AomVideoDecoderTest, Reset_EndOfStream) {
- Initialize();
- ExpectDecodingState();
- ExpectEndOfStreamState();
- Reset();
-}
-
-// Test destruction when decoder has initialized but not decoded.
-TEST_F(AomVideoDecoderTest, Destroy_Initialized) {
- Initialize();
- Destroy();
-}
-
-// Test destruction when decoder has decoded single frame.
-TEST_F(AomVideoDecoderTest, Destroy_Decoding) {
- Initialize();
- ExpectDecodingState();
- Destroy();
-}
-
-// Test destruction when decoder has hit end of stream.
-TEST_F(AomVideoDecoderTest, Destroy_EndOfStream) {
- Initialize();
- ExpectDecodingState();
- ExpectEndOfStreamState();
- Destroy();
-}
-
-TEST_F(AomVideoDecoderTest, FrameValidAfterPoolDestruction) {
- Initialize();
- Decode(i_frame_buffer_);
- Destroy();
-
- ASSERT_FALSE(output_frames_.empty());
-
- // Write to the Y plane. The memory tools should detect a
- // use-after-free if the storage was actually removed by pool destruction.
- memset(output_frames_.front()->data(VideoFrame::kYPlane), 0xff,
- output_frames_.front()->rows(VideoFrame::kYPlane) *
- output_frames_.front()->stride(VideoFrame::kYPlane));
-}
-
-} // namespace media
diff --git a/chromium/media/filters/audio_decoder_stream_unittest.cc b/chromium/media/filters/audio_decoder_stream_unittest.cc
index f842fbf48eb..483b0fa0b6d 100644
--- a/chromium/media/filters/audio_decoder_stream_unittest.cc
+++ b/chromium/media/filters/audio_decoder_stream_unittest.cc
@@ -6,7 +6,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/filters/audio_decoder_unittest.cc b/chromium/media/filters/audio_decoder_unittest.cc
index 72f5e2a9c60..c02f516e192 100644
--- a/chromium/media/filters/audio_decoder_unittest.cc
+++ b/chromium/media/filters/audio_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
#include "base/format_macros.h"
#include "base/hash/md5.h"
@@ -177,8 +177,8 @@ class AudioDecoderTest
base::RunLoop run_loop;
decoder_->Decode(
std::move(buffer),
- base::Bind(&AudioDecoderTest::DecodeFinished, base::Unretained(this),
- run_loop.QuitClosure()));
+ base::BindOnce(&AudioDecoderTest::DecodeFinished,
+ base::Unretained(this), run_loop.QuitClosure()));
run_loop.Run();
ASSERT_FALSE(pending_decode_);
}
@@ -246,15 +246,15 @@ class AudioDecoderTest
void InitializeDecoderWithResult(const AudioDecoderConfig& config,
bool success) {
- decoder_->Initialize(
- config, nullptr,
- base::BindOnce(
- [](bool success, Status status) {
- EXPECT_EQ(status.is_ok(), success);
- },
- success),
- base::Bind(&AudioDecoderTest::OnDecoderOutput, base::Unretained(this)),
- base::DoNothing());
+ decoder_->Initialize(config, nullptr,
+ base::BindOnce(
+ [](bool success, Status status) {
+ EXPECT_EQ(status.is_ok(), success);
+ },
+ success),
+ base::BindRepeating(&AudioDecoderTest::OnDecoderOutput,
+ base::Unretained(this)),
+ base::DoNothing());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/audio_video_metadata_extractor.cc b/chromium/media/filters/audio_video_metadata_extractor.cc
index 77b3bfcf202..185819eb936 100644
--- a/chromium/media/filters/audio_video_metadata_extractor.cc
+++ b/chromium/media/filters/audio_video_metadata_extractor.cc
@@ -78,7 +78,8 @@ bool AudioVideoMetadataExtractor::Extract(DataSource* source,
DCHECK(!extracted_);
bool read_ok = true;
- media::BlockingUrlProtocol protocol(source, base::Bind(&OnError, &read_ok));
+ media::BlockingUrlProtocol protocol(source,
+ base::BindRepeating(&OnError, &read_ok));
media::FFmpegGlue glue(&protocol);
AVFormatContext* format_context = glue.format_context();
diff --git a/chromium/media/filters/blocking_url_protocol.cc b/chromium/media/filters/blocking_url_protocol.cc
index 0cf42ec3a67..add92a51165 100644
--- a/chromium/media/filters/blocking_url_protocol.cc
+++ b/chromium/media/filters/blocking_url_protocol.cc
@@ -15,7 +15,7 @@
namespace media {
BlockingUrlProtocol::BlockingUrlProtocol(DataSource* data_source,
- const base::Closure& error_cb)
+ const base::RepeatingClosure& error_cb)
: data_source_(data_source),
error_cb_(error_cb),
is_streaming_(data_source_->IsStreaming()),
diff --git a/chromium/media/filters/blocking_url_protocol.h b/chromium/media/filters/blocking_url_protocol.h
index 7e6a1eb4f31..be5cdd0bc47 100644
--- a/chromium/media/filters/blocking_url_protocol.h
+++ b/chromium/media/filters/blocking_url_protocol.h
@@ -25,7 +25,8 @@ class MEDIA_EXPORT BlockingUrlProtocol : public FFmpegURLProtocol {
public:
// Implements FFmpegURLProtocol using the given |data_source|. |error_cb| is
// fired any time DataSource::Read() returns an error.
- BlockingUrlProtocol(DataSource* data_source, const base::Closure& error_cb);
+ BlockingUrlProtocol(DataSource* data_source,
+ const base::RepeatingClosure& error_cb);
virtual ~BlockingUrlProtocol();
// Aborts any pending reads by returning a read error. After this method
@@ -51,7 +52,7 @@ class MEDIA_EXPORT BlockingUrlProtocol : public FFmpegURLProtocol {
base::Lock data_source_lock_;
DataSource* data_source_;
- base::Closure error_cb_;
+ base::RepeatingClosure error_cb_;
const bool is_streaming_;
// Used to unblock the thread during shutdown and when reads complete.
diff --git a/chromium/media/filters/blocking_url_protocol_unittest.cc b/chromium/media/filters/blocking_url_protocol_unittest.cc
index 362e0b772bc..8d7c9a7fa47 100644
--- a/chromium/media/filters/blocking_url_protocol_unittest.cc
+++ b/chromium/media/filters/blocking_url_protocol_unittest.cc
@@ -4,6 +4,8 @@
#include <stdint.h>
+#include <memory>
+
#include "base/bind.h"
#include "base/files/file_path.h"
#include "base/macros.h"
@@ -22,8 +24,8 @@ class BlockingUrlProtocolTest : public testing::Test {
BlockingUrlProtocolTest()
: url_protocol_(new BlockingUrlProtocol(
&data_source_,
- base::Bind(&BlockingUrlProtocolTest::OnDataSourceError,
- base::Unretained(this)))) {
+ base::BindRepeating(&BlockingUrlProtocolTest::OnDataSourceError,
+ base::Unretained(this)))) {
CHECK(data_source_.Initialize(GetTestDataFilePath("bear-320x240.webm")));
}
@@ -113,9 +115,10 @@ TEST_F(BlockingUrlProtocolTest, IsStreaming) {
EXPECT_FALSE(url_protocol_->IsStreaming());
data_source_.force_streaming_for_testing();
- url_protocol_.reset(new BlockingUrlProtocol(
- &data_source_, base::Bind(&BlockingUrlProtocolTest::OnDataSourceError,
- base::Unretained(this))));
+ url_protocol_ = std::make_unique<BlockingUrlProtocol>(
+ &data_source_,
+ base::BindRepeating(&BlockingUrlProtocolTest::OnDataSourceError,
+ base::Unretained(this)));
EXPECT_TRUE(data_source_.IsStreaming());
EXPECT_TRUE(url_protocol_->IsStreaming());
}
diff --git a/chromium/media/filters/chunk_demuxer.cc b/chromium/media/filters/chunk_demuxer.cc
index db2ba40a41c..9f0f2cc5999 100644
--- a/chromium/media/filters/chunk_demuxer.cc
+++ b/chromium/media/filters/chunk_demuxer.cc
@@ -10,7 +10,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/macros.h"
diff --git a/chromium/media/filters/chunk_demuxer_unittest.cc b/chromium/media/filters/chunk_demuxer_unittest.cc
index 2856b0104cc..2ed13c82723 100644
--- a/chromium/media/filters/chunk_demuxer_unittest.cc
+++ b/chromium/media/filters/chunk_demuxer_unittest.cc
@@ -8,11 +8,12 @@
#include <stdint.h>
#include <algorithm>
+#include <memory>
#include <queue>
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/run_loop.h"
#include "base/stl_util.h"
@@ -177,22 +178,23 @@ class ChunkDemuxerTest : public ::testing::Test {
ChunkDemuxerTest()
: did_progress_(false),
append_window_end_for_next_append_(kInfiniteDuration) {
- init_segment_received_cb_ = base::Bind(
+ init_segment_received_cb_ = base::BindRepeating(
&ChunkDemuxerTest::InitSegmentReceived, base::Unretained(this));
CreateNewDemuxer();
}
void CreateNewDemuxer() {
- base::Closure open_cb =
- base::Bind(&ChunkDemuxerTest::DemuxerOpened, base::Unretained(this));
- base::Closure progress_cb =
- base::Bind(&ChunkDemuxerTest::OnProgress, base::Unretained(this));
+ base::OnceClosure open_cb = base::BindOnce(&ChunkDemuxerTest::DemuxerOpened,
+ base::Unretained(this));
+ base::RepeatingClosure progress_cb = base::BindRepeating(
+ &ChunkDemuxerTest::OnProgress, base::Unretained(this));
Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb =
base::BindRepeating(&ChunkDemuxerTest::OnEncryptedMediaInitData,
base::Unretained(this));
EXPECT_MEDIA_LOG(ChunkDemuxerCtor());
- demuxer_.reset(new ChunkDemuxer(open_cb, progress_cb,
- encrypted_media_init_data_cb, &media_log_));
+ demuxer_ = std::make_unique<ChunkDemuxer>(std::move(open_cb), progress_cb,
+ encrypted_media_init_data_cb,
+ &media_log_);
}
virtual ~ChunkDemuxerTest() {
@@ -4428,7 +4430,7 @@ void DisableAndEnableDemuxerTracks(
task_environment->RunUntilIdle();
}
-}
+} // namespace
TEST_F(ChunkDemuxerTest, StreamStatusNotifications) {
ASSERT_TRUE(InitDemuxer(HAS_AUDIO | HAS_VIDEO));
diff --git a/chromium/media/filters/dav1d_video_decoder.cc b/chromium/media/filters/dav1d_video_decoder.cc
index ca3deeba97d..bb31801e439 100644
--- a/chromium/media/filters/dav1d_video_decoder.cc
+++ b/chromium/media/filters/dav1d_video_decoder.cc
@@ -9,9 +9,9 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/bits.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/strings/stringprintf.h"
#include "base/threading/sequenced_task_runner_handle.h"
diff --git a/chromium/media/filters/dav1d_video_decoder.h b/chromium/media/filters/dav1d_video_decoder.h
index 67efae99e1e..22c4e1fd381 100644
--- a/chromium/media/filters/dav1d_video_decoder.h
+++ b/chromium/media/filters/dav1d_video_decoder.h
@@ -10,7 +10,7 @@
#include "base/callback_forward.h"
#include "base/macros.h"
#include "base/memory/ref_counted_memory.h"
-#include "base/threading/thread_checker.h"
+#include "base/sequence_checker.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame.h"
diff --git a/chromium/media/filters/dav1d_video_decoder_unittest.cc b/chromium/media/filters/dav1d_video_decoder_unittest.cc
index 93b53b79867..19572a06873 100644
--- a/chromium/media/filters/dav1d_video_decoder_unittest.cc
+++ b/chromium/media/filters/dav1d_video_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/hash/md5.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/filters/decoder_selector.cc b/chromium/media/filters/decoder_selector.cc
index 65901d0e0bf..adba332ab89 100644
--- a/chromium/media/filters/decoder_selector.cc
+++ b/chromium/media/filters/decoder_selector.cc
@@ -13,10 +13,11 @@
#include "base/location.h"
#include "base/logging.h"
#include "base/metrics/histogram_functions.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "base/strings/stringprintf.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/audio_decoder.h"
#include "media/base/cdm_context.h"
#include "media/base/demuxer_stream.h"
@@ -42,7 +43,7 @@ DecoderPriority ResolutionBasedDecoderPriority(const VideoDecoderConfig& config,
const VideoDecoder& decoder) {
#if defined(OS_ANDROID)
constexpr auto kSoftwareDecoderHeightCutoff = 360;
-#elif defined(OS_CHROMEOS)
+#elif BUILDFLAG(IS_ASH)
constexpr auto kSoftwareDecoderHeightCutoff = 360;
#else
constexpr auto kSoftwareDecoderHeightCutoff = 720;
@@ -94,19 +95,20 @@ void SetDefaultDecoderPriorityCB(AudioDecoderSelector::DecoderPriorityCB* out) {
template <DemuxerStream::Type StreamType>
DecoderSelector<StreamType>::DecoderSelector(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
CreateDecodersCB create_decoders_cb,
MediaLog* media_log)
: task_runner_(std::move(task_runner)),
create_decoders_cb_(std::move(create_decoders_cb)),
media_log_(media_log) {
+ DETACH_FROM_SEQUENCE(sequence_checker_);
SetDefaultDecoderPriorityCB(&decoder_priority_cb_);
}
template <DemuxerStream::Type StreamType>
DecoderSelector<StreamType>::~DecoderSelector() {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (select_decoder_cb_)
ReturnNullDecoder();
}
@@ -131,7 +133,7 @@ void DecoderSelector<StreamType>::SelectDecoder(
SelectDecoderCB select_decoder_cb,
typename Decoder::OutputCB output_cb) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(select_decoder_cb);
DCHECK(!select_decoder_cb_);
select_decoder_cb_ = std::move(select_decoder_cb);
@@ -162,7 +164,7 @@ void DecoderSelector<StreamType>::SelectDecoder(
template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::FinalizeDecoderSelection() {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!select_decoder_cb_);
is_selecting_decoders_ = false;
@@ -196,7 +198,7 @@ void DecoderSelector<StreamType>::FinalizeDecoderSelection() {
template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::NotifyConfigChanged() {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
is_selecting_for_config_change_ = true;
@@ -211,7 +213,7 @@ template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::PrependDecoder(
std::unique_ptr<Decoder> decoder) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// Decoders inserted directly should be given priority over those returned by
// |create_decoders_cb_|.
@@ -242,7 +244,7 @@ void DecoderSelector<StreamType>::CreateDecoders() {
template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::InitializeDecoder() {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!decoder_);
if (decoders_.empty()) {
@@ -277,7 +279,7 @@ template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::OnDecoderInitializeDone(Status status) {
DVLOG(2) << __func__ << ": " << decoder_->GetDisplayName()
<< " success=" << std::hex << status.code();
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!status.is_ok()) {
// TODO(tmathmeyer) this was too noisy in media log. Batch all the logs
@@ -298,7 +300,7 @@ void DecoderSelector<StreamType>::OnDecoderInitializeDone(Status status) {
template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::ReturnNullDecoder() {
DVLOG(1) << __func__ << ": No decoder selected";
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
decrypting_demuxer_stream_.reset();
decoder_.reset();
@@ -328,7 +330,7 @@ template <DemuxerStream::Type StreamType>
void DecoderSelector<StreamType>::OnDecryptingDemuxerStreamInitializeDone(
PipelineStatus status) {
DVLOG(2) << __func__ << ": status=" << status;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (status != PIPELINE_OK) {
// Since we already tried every potential decoder without DDS, give up.
diff --git a/chromium/media/filters/decoder_selector.h b/chromium/media/filters/decoder_selector.h
index 55b4242076a..d97df5d9002 100644
--- a/chromium/media/filters/decoder_selector.h
+++ b/chromium/media/filters/decoder_selector.h
@@ -12,6 +12,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
+#include "base/sequence_checker.h"
#include "base/time/time.h"
#include "media/base/demuxer_stream.h"
#include "media/base/pipeline_status.h"
@@ -19,7 +20,7 @@
#include "media/filters/decoder_stream_traits.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -78,7 +79,7 @@ class MEDIA_EXPORT DecoderSelector {
base::OnceCallback<void(std::unique_ptr<Decoder>,
std::unique_ptr<DecryptingDemuxerStream>)>;
- DecoderSelector(scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ DecoderSelector(scoped_refptr<base::SequencedTaskRunner> task_runner,
CreateDecodersCB create_decoders_cb,
MediaLog* media_log);
@@ -133,7 +134,9 @@ class MEDIA_EXPORT DecoderSelector {
void RunSelectDecoderCB();
void FilterAndSortAvailableDecoders();
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+ SEQUENCE_CHECKER(sequence_checker_);
+
CreateDecodersCB create_decoders_cb_;
DecoderPriorityCB decoder_priority_cb_;
MediaLog* media_log_;
diff --git a/chromium/media/filters/decoder_stream.cc b/chromium/media/filters/decoder_stream.cc
index f1f45c939df..c4b4c806d6c 100644
--- a/chromium/media/filters/decoder_stream.cc
+++ b/chromium/media/filters/decoder_stream.cc
@@ -11,7 +11,7 @@
#include "base/feature_list.h"
#include "base/location.h"
#include "base/logging.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/cdm_context.h"
@@ -99,17 +99,17 @@ const char* GetStatusString(
template <DemuxerStream::Type StreamType>
DecoderStream<StreamType>::DecoderStream(
std::unique_ptr<DecoderStreamTraits<StreamType>> traits,
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
CreateDecodersCB create_decoders_cb,
MediaLog* media_log)
: traits_(std::move(traits)),
- task_runner_(task_runner),
+ task_runner_(std::move(task_runner)),
media_log_(media_log),
state_(STATE_UNINITIALIZED),
stream_(nullptr),
cdm_context_(nullptr),
decoder_produced_a_frame_(false),
- decoder_selector_(task_runner, std::move(create_decoders_cb), media_log),
+ decoder_selector_(task_runner_, std::move(create_decoders_cb), media_log),
decoding_eos_(false),
preparing_output_(false),
pending_decode_requests_(0),
@@ -122,7 +122,7 @@ DecoderStream<StreamType>::DecoderStream(
template <DemuxerStream::Type StreamType>
DecoderStream<StreamType>::~DecoderStream() {
FUNCTION_DVLOG(1);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (init_cb_) {
task_runner_->PostTask(FROM_HERE,
@@ -154,7 +154,7 @@ void DecoderStream<StreamType>::Initialize(DemuxerStream* stream,
StatisticsCB statistics_cb,
WaitingCB waiting_cb) {
FUNCTION_DVLOG(1);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(state_, STATE_UNINITIALIZED);
DCHECK(!init_cb_);
DCHECK(init_cb);
@@ -178,7 +178,7 @@ void DecoderStream<StreamType>::Initialize(DemuxerStream* stream,
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::Read(ReadCB read_cb) {
FUNCTION_DVLOG(3);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ != STATE_UNINITIALIZED && state_ != STATE_INITIALIZING)
<< state_;
// No two reads in the flight at any time.
@@ -216,7 +216,7 @@ void DecoderStream<StreamType>::Read(ReadCB read_cb) {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::Reset(base::OnceClosure closure) {
FUNCTION_DVLOG(2);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_NE(state_, STATE_UNINITIALIZED);
DCHECK(!reset_cb_);
@@ -265,14 +265,14 @@ void DecoderStream<StreamType>::Reset(base::OnceClosure closure) {
template <DemuxerStream::Type StreamType>
bool DecoderStream<StreamType>::CanReadWithoutStalling() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
return !ready_outputs_.empty() || !unprepared_outputs_.empty() ||
(decoder_ && decoder_->CanReadWithoutStalling());
}
template <>
bool DecoderStream<DemuxerStream::AUDIO>::CanReadWithoutStalling() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
return true;
}
@@ -306,7 +306,7 @@ int DecoderStream<DemuxerStream::AUDIO>::GetMaxReadyOutputs() const {
template <DemuxerStream::Type StreamType>
bool DecoderStream<StreamType>::CanDecodeMore() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
bool buffers_left = !(fallback_buffers_.empty() && decoding_eos_);
@@ -319,21 +319,21 @@ bool DecoderStream<StreamType>::CanDecodeMore() const {
template <DemuxerStream::Type StreamType>
base::TimeDelta DecoderStream<StreamType>::AverageDuration() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
return duration_tracker_.count() ? duration_tracker_.Average()
: base::TimeDelta();
}
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::SetPrepareCB(PrepareCB prepare_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
prepare_cb_ = std::move(prepare_cb);
}
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::SkipPrepareUntil(
base::TimeDelta start_timestamp) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
skip_prepare_until_timestamp_ = start_timestamp;
}
@@ -353,7 +353,7 @@ void DecoderStream<StreamType>::OnDecoderSelected(
FUNCTION_DVLOG(1) << ": "
<< (selected_decoder ? selected_decoder->GetDisplayName()
: "No decoder selected.");
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ == STATE_INITIALIZING || state_ == STATE_REINITIALIZING_DECODER)
<< state_;
@@ -581,27 +581,23 @@ void DecoderStream<StreamType>::OnDecodeDone(
return;
default:
- // TODO(liberato): Use |status| better, since it might not be a generic
- // error anymore.
if (!decoder_produced_a_frame_ &&
base::FeatureList::IsEnabled(kFallbackAfterDecodeError)) {
- pending_decode_requests_ = 0;
+ MEDIA_LOG(WARNING, media_log_)
+ << GetStreamTypeString()
+ << " decoder fallback after initial decode error.";
// Prevent all pending decode requests and outputs from those requests
// from being called back.
fallback_weak_factory_.InvalidateWeakPtrs();
-
- std::string fallback_message =
- GetStreamTypeString() +
- " fallback to new decoder after initial decode error.";
- FUNCTION_DVLOG(1) << ": " << fallback_message;
- MEDIA_LOG(WARNING, media_log_) << fallback_message;
+ pending_decode_requests_ = 0;
state_ = STATE_REINITIALIZING_DECODER;
SelectDecoder();
} else {
- std::string error_message = GetStreamTypeString() + " decode error!";
- FUNCTION_DVLOG(1) << ": " << error_message;
- MEDIA_LOG(ERROR, media_log_) << error_message;
+ media_log_->NotifyError(std::move(status));
+ MEDIA_LOG(ERROR, media_log_)
+ << GetStreamTypeString() << " decode error!";
+
state_ = STATE_ERROR;
ClearOutputs();
if (read_cb_)
@@ -701,7 +697,7 @@ void DecoderStream<StreamType>::OnBufferReady(
FUNCTION_DVLOG(3) << ": " << status << ", "
<< (buffer ? buffer->AsHumanReadableString() : "nullptr");
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(pending_demuxer_read_);
if (!decoder_produced_a_frame_) {
DCHECK(state_ == STATE_ERROR || state_ == STATE_REINITIALIZING_DECODER ||
@@ -838,7 +834,7 @@ void DecoderStream<StreamType>::OnBufferReady(
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::ReinitializeDecoder() {
FUNCTION_DVLOG(2);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(state_, STATE_FLUSHING_DECODER);
DCHECK_EQ(pending_decode_requests_, 0);
@@ -850,7 +846,7 @@ void DecoderStream<StreamType>::ReinitializeDecoder() {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::CompleteDecoderReinitialization(bool success) {
FUNCTION_DVLOG(2);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(state_, STATE_REINITIALIZING_DECODER);
state_ = success ? STATE_NORMAL : STATE_ERROR;
@@ -888,7 +884,7 @@ void DecoderStream<StreamType>::CompleteDecoderReinitialization(bool success) {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::ResetDecoder() {
FUNCTION_DVLOG(2);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ == STATE_NORMAL || state_ == STATE_FLUSHING_DECODER ||
state_ == STATE_ERROR || state_ == STATE_END_OF_STREAM)
<< state_;
@@ -901,7 +897,7 @@ void DecoderStream<StreamType>::ResetDecoder() {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::OnDecoderReset() {
FUNCTION_DVLOG(2);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ == STATE_NORMAL || state_ == STATE_FLUSHING_DECODER ||
state_ == STATE_ERROR || state_ == STATE_END_OF_STREAM)
<< state_;
@@ -938,7 +934,7 @@ void DecoderStream<StreamType>::ClearOutputs() {
template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::MaybePrepareAnotherOutput() {
FUNCTION_DVLOG(3);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ == STATE_NORMAL || state_ == STATE_FLUSHING_DECODER ||
state_ == STATE_END_OF_STREAM ||
state_ == STATE_REINITIALIZING_DECODER)
@@ -967,7 +963,7 @@ template <DemuxerStream::Type StreamType>
void DecoderStream<StreamType>::OnPreparedOutputReady(
scoped_refptr<Output> output) {
FUNCTION_DVLOG(2);
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
// Errors and reset invalidate the WeakPtr factory for this function, so it
// should only be called in normal and flush states.
diff --git a/chromium/media/filters/decoder_stream.h b/chromium/media/filters/decoder_stream.h
index cbea02b988e..0d881689024 100644
--- a/chromium/media/filters/decoder_stream.h
+++ b/chromium/media/filters/decoder_stream.h
@@ -28,7 +28,7 @@
#include "media/filters/decoder_stream_traits.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -64,7 +64,7 @@ class MEDIA_EXPORT DecoderStream {
using ReadCB = base::OnceCallback<void(ReadStatus, scoped_refptr<Output>)>;
DecoderStream(std::unique_ptr<DecoderStreamTraits<StreamType>> traits,
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
CreateDecodersCB create_decoders_cb,
MediaLog* media_log);
virtual ~DecoderStream();
@@ -229,7 +229,7 @@ class MEDIA_EXPORT DecoderStream {
std::unique_ptr<DecoderStreamTraits<StreamType>> traits_;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
MediaLog* media_log_;
State state_;
diff --git a/chromium/media/filters/decrypting_audio_decoder.cc b/chromium/media/filters/decrypting_audio_decoder.cc
index f8fb4211c27..e42c00d8c6a 100644
--- a/chromium/media/filters/decrypting_audio_decoder.cc
+++ b/chromium/media/filters/decrypting_audio_decoder.cc
@@ -12,7 +12,7 @@
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "media/base/audio_buffer.h"
#include "media/base/audio_decoder_config.h"
@@ -34,7 +34,7 @@ static inline bool IsOutOfSync(const base::TimeDelta& timestamp_1,
}
DecryptingAudioDecoder::DecryptingAudioDecoder(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log)
: task_runner_(task_runner), media_log_(media_log) {}
@@ -52,7 +52,7 @@ void DecryptingAudioDecoder::Initialize(const AudioDecoderConfig& config,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
DVLOG(2) << "Initialize()";
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(!decode_cb_);
DCHECK(!reset_cb_);
@@ -110,7 +110,7 @@ void DecryptingAudioDecoder::Initialize(const AudioDecoderConfig& config,
void DecryptingAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DVLOG(3) << "Decode()";
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ == kIdle || state_ == kDecodeFinished) << state_;
DCHECK(decode_cb);
CHECK(!decode_cb_) << "Overlapping decodes are not supported.";
@@ -138,7 +138,7 @@ void DecryptingAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void DecryptingAudioDecoder::Reset(base::OnceClosure closure) {
DVLOG(2) << "Reset() - state: " << state_;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ == kIdle || state_ == kPendingDecode ||
state_ == kWaitingForKey || state_ == kDecodeFinished)
<< state_;
@@ -170,7 +170,7 @@ void DecryptingAudioDecoder::Reset(base::OnceClosure closure) {
DecryptingAudioDecoder::~DecryptingAudioDecoder() {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (state_ == kUninitialized)
return;
@@ -198,7 +198,7 @@ void DecryptingAudioDecoder::InitializeDecoder() {
void DecryptingAudioDecoder::FinishInitialization(bool success) {
DVLOG(2) << "FinishInitialization()";
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(state_ == kPendingDecoderInit) << state_;
DCHECK(init_cb_);
DCHECK(!reset_cb_); // No Reset() before initialization finished.
@@ -222,7 +222,7 @@ void DecryptingAudioDecoder::FinishInitialization(bool success) {
}
void DecryptingAudioDecoder::DecodePendingBuffer() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(state_, kPendingDecode) << state_;
int buffer_size = 0;
@@ -241,7 +241,7 @@ void DecryptingAudioDecoder::DeliverFrame(
Decryptor::Status status,
const Decryptor::AudioFrames& frames) {
DVLOG(3) << "DeliverFrame() - status: " << status;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(state_, kPendingDecode) << state_;
DCHECK(decode_cb_);
DCHECK(pending_buffer_to_decode_.get());
@@ -319,7 +319,7 @@ void DecryptingAudioDecoder::DeliverFrame(
}
void DecryptingAudioDecoder::OnCdmContextEvent(CdmContext::Event event) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (event != CdmContext::Event::kHasAdditionalUsableKey)
return;
diff --git a/chromium/media/filters/decrypting_audio_decoder.h b/chromium/media/filters/decrypting_audio_decoder.h
index d176cdb88b5..aff41e2c9b7 100644
--- a/chromium/media/filters/decrypting_audio_decoder.h
+++ b/chromium/media/filters/decrypting_audio_decoder.h
@@ -19,7 +19,7 @@
#include "media/base/demuxer_stream.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -36,7 +36,7 @@ class MediaLog;
class MEDIA_EXPORT DecryptingAudioDecoder : public AudioDecoder {
public:
DecryptingAudioDecoder(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log);
~DecryptingAudioDecoder() override;
@@ -91,7 +91,7 @@ class MEDIA_EXPORT DecryptingAudioDecoder : public AudioDecoder {
void ProcessDecodedFrames(const Decryptor::AudioFrames& frames);
// Set in constructor.
- scoped_refptr<base::SingleThreadTaskRunner> const task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> const task_runner_;
MediaLog* const media_log_;
State state_ = kUninitialized;
diff --git a/chromium/media/filters/decrypting_audio_decoder_unittest.cc b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
index 7772462a510..b88ec863f1f 100644
--- a/chromium/media/filters/decrypting_audio_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_audio_decoder_unittest.cc
@@ -83,16 +83,17 @@ class DecryptingAudioDecoderTest : public testing::Test {
kNoTimestamp);
decoded_frame_list_.push_back(decoded_frame_);
- decoder_->Initialize(config, cdm_context_.get(),
- base::BindOnce(
- [](bool success, Status status) {
- EXPECT_EQ(status.is_ok(), success);
- },
- success),
- base::Bind(&DecryptingAudioDecoderTest::FrameReady,
- base::Unretained(this)),
- base::Bind(&DecryptingAudioDecoderTest::OnWaiting,
- base::Unretained(this)));
+ decoder_->Initialize(
+ config, cdm_context_.get(),
+ base::BindOnce(
+ [](bool success, Status status) {
+ EXPECT_EQ(status.is_ok(), success);
+ },
+ success),
+ base::BindRepeating(&DecryptingAudioDecoderTest::FrameReady,
+ base::Unretained(this)),
+ base::BindRepeating(&DecryptingAudioDecoderTest::OnWaiting,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
@@ -129,17 +130,18 @@ class DecryptingAudioDecoderTest : public testing::Test {
decoder_->Initialize(
new_config, cdm_context_.get(),
base::BindOnce([](Status status) { EXPECT_TRUE(status.is_ok()); }),
- base::Bind(&DecryptingAudioDecoderTest::FrameReady,
- base::Unretained(this)),
- base::Bind(&DecryptingAudioDecoderTest::OnWaiting,
- base::Unretained(this)));
+ base::BindRepeating(&DecryptingAudioDecoderTest::FrameReady,
+ base::Unretained(this)),
+ base::BindRepeating(&DecryptingAudioDecoderTest::OnWaiting,
+ base::Unretained(this)));
}
// Decode |buffer| and expect DecodeDone to get called with |status|.
void DecodeAndExpect(scoped_refptr<DecoderBuffer> buffer, StatusCode status) {
EXPECT_CALL(*this, DecodeDone(HasStatusCode(status)));
- decoder_->Decode(buffer, base::Bind(&DecryptingAudioDecoderTest::DecodeDone,
- base::Unretained(this)));
+ decoder_->Decode(buffer,
+ base::BindOnce(&DecryptingAudioDecoderTest::DecodeDone,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
@@ -190,8 +192,8 @@ class DecryptingAudioDecoderTest : public testing::Test {
.WillOnce(SaveArg<1>(&pending_audio_decode_cb_));
decoder_->Decode(encrypted_buffer_,
- base::Bind(&DecryptingAudioDecoderTest::DecodeDone,
- base::Unretained(this)));
+ base::BindOnce(&DecryptingAudioDecoderTest::DecodeDone,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
// Make sure the Decode() on the decoder triggers a DecryptAndDecode() on
// the decryptor.
@@ -204,8 +206,8 @@ class DecryptingAudioDecoderTest : public testing::Test {
RunCallback<1>(Decryptor::kNoKey, Decryptor::AudioFrames()));
EXPECT_CALL(*this, OnWaiting(WaitingReason::kNoDecryptionKey));
decoder_->Decode(encrypted_buffer_,
- base::Bind(&DecryptingAudioDecoderTest::DecodeDone,
- base::Unretained(this)));
+ base::BindOnce(&DecryptingAudioDecoderTest::DecodeDone,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/decrypting_demuxer_stream.cc b/chromium/media/filters/decrypting_demuxer_stream.cc
index d147053cc2f..febfcbd8d98 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream.cc
@@ -8,6 +8,7 @@
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
+#include "base/sequenced_task_runner.h"
#include "base/single_thread_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
@@ -27,12 +28,14 @@ static bool IsStreamValid(DemuxerStream* stream) {
}
DecryptingDemuxerStream::DecryptingDemuxerStream(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log,
const WaitingCB& waiting_cb)
: task_runner_(task_runner),
media_log_(media_log),
- waiting_cb_(waiting_cb) {}
+ waiting_cb_(waiting_cb) {
+ DETACH_FROM_SEQUENCE(sequence_checker_);
+}
std::string DecryptingDemuxerStream::GetDisplayName() const {
return "DecryptingDemuxerStream";
@@ -42,7 +45,7 @@ void DecryptingDemuxerStream::Initialize(DemuxerStream* stream,
CdmContext* cdm_context,
PipelineStatusCallback status_cb) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kUninitialized) << state_;
DCHECK(stream);
DCHECK(cdm_context);
@@ -71,7 +74,7 @@ void DecryptingDemuxerStream::Initialize(DemuxerStream* stream,
void DecryptingDemuxerStream::Read(ReadCB read_cb) {
DVLOG(3) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kIdle) << state_;
DCHECK(read_cb);
CHECK(!read_cb_) << "Overlapping reads are not supported.";
@@ -85,7 +88,7 @@ void DecryptingDemuxerStream::Read(ReadCB read_cb) {
void DecryptingDemuxerStream::Reset(base::OnceClosure closure) {
DVLOG(2) << __func__ << " - state: " << state_;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(state_ != kUninitialized) << state_;
DCHECK(!reset_cb_);
@@ -145,7 +148,7 @@ bool DecryptingDemuxerStream::SupportsConfigChanges() {
DecryptingDemuxerStream::~DecryptingDemuxerStream() {
DVLOG(2) << __func__ << " : state_ = " << state_;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (state_ == kUninitialized)
return;
@@ -172,7 +175,7 @@ void DecryptingDemuxerStream::OnBufferReadFromDemuxerStream(
DemuxerStream::Status status,
scoped_refptr<DecoderBuffer> buffer) {
DVLOG(3) << __func__ << ": status = " << status;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kPendingDemuxerRead) << state_;
DCHECK(read_cb_);
DCHECK_EQ(buffer.get() != nullptr, status == kOk) << status;
@@ -233,7 +236,7 @@ void DecryptingDemuxerStream::OnBufferReadFromDemuxerStream(
}
void DecryptingDemuxerStream::DecryptPendingBuffer() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kPendingDecrypt) << state_;
DCHECK(!pending_buffer_to_decrypt_->end_of_stream());
TRACE_EVENT_ASYNC_BEGIN2(
@@ -250,7 +253,7 @@ void DecryptingDemuxerStream::OnBufferDecrypted(
Decryptor::Status status,
scoped_refptr<DecoderBuffer> decrypted_buffer) {
DVLOG(3) << __func__ << " - status: " << status;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kPendingDecrypt) << state_;
DCHECK(read_cb_);
DCHECK(pending_buffer_to_decrypt_);
@@ -318,7 +321,7 @@ void DecryptingDemuxerStream::OnBufferDecrypted(
}
void DecryptingDemuxerStream::OnCdmContextEvent(CdmContext::Event event) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (event != CdmContext::Event::kHasAdditionalUsableKey)
return;
diff --git a/chromium/media/filters/decrypting_demuxer_stream.h b/chromium/media/filters/decrypting_demuxer_stream.h
index f8e9a2b7e35..762e593697d 100644
--- a/chromium/media/filters/decrypting_demuxer_stream.h
+++ b/chromium/media/filters/decrypting_demuxer_stream.h
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
+#include "base/sequence_checker.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
@@ -19,7 +20,7 @@
#include "media/base/waiting.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -35,7 +36,7 @@ class MediaLog;
class MEDIA_EXPORT DecryptingDemuxerStream : public DemuxerStream {
public:
DecryptingDemuxerStream(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log,
const WaitingCB& waiting_cb);
@@ -143,7 +144,8 @@ class MEDIA_EXPORT DecryptingDemuxerStream : public DemuxerStream {
void CompletePendingDecrypt(Decryptor::Status status);
void CompleteWaitingForDecryptionKey();
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+ SEQUENCE_CHECKER(sequence_checker_);
MediaLog* const media_log_;
WaitingCB waiting_cb_;
diff --git a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
index 8ea826b71fb..bcfb290e56c 100644
--- a/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
+++ b/chromium/media/filters/decrypting_demuxer_stream_unittest.cc
@@ -77,8 +77,8 @@ class DecryptingDemuxerStreamTest : public testing::Test {
: demuxer_stream_(new DecryptingDemuxerStream(
task_environment_.GetMainThreadTaskRunner(),
&media_log_,
- base::Bind(&DecryptingDemuxerStreamTest::OnWaiting,
- base::Unretained(this)))),
+ base::BindRepeating(&DecryptingDemuxerStreamTest::OnWaiting,
+ base::Unretained(this)))),
cdm_context_(new StrictMock<MockCdmContext>()),
decryptor_(new StrictMock<MockDecryptor>()),
is_initialized_(false),
diff --git a/chromium/media/filters/decrypting_media_resource.cc b/chromium/media/filters/decrypting_media_resource.cc
index 26bdaa0587d..3a8ca993299 100644
--- a/chromium/media/filters/decrypting_media_resource.cc
+++ b/chromium/media/filters/decrypting_media_resource.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "media/base/cdm_context.h"
#include "media/base/demuxer_stream.h"
@@ -22,7 +22,7 @@ DecryptingMediaResource::DecryptingMediaResource(
MediaResource* media_resource,
CdmContext* cdm_context,
MediaLog* media_log,
- scoped_refptr<base::SingleThreadTaskRunner> task_runner)
+ scoped_refptr<base::SequencedTaskRunner> task_runner)
: media_resource_(media_resource),
cdm_context_(cdm_context),
media_log_(media_log),
@@ -33,7 +33,7 @@ DecryptingMediaResource::DecryptingMediaResource(
DCHECK(cdm_context_->GetDecryptor());
DCHECK(cdm_context_->GetDecryptor()->CanAlwaysDecrypt());
DCHECK(media_log_);
- DCHECK(task_runner->BelongsToCurrentThread());
+ DCHECK(task_runner->RunsTasksInCurrentSequence());
}
DecryptingMediaResource::~DecryptingMediaResource() = default;
diff --git a/chromium/media/filters/decrypting_media_resource.h b/chromium/media/filters/decrypting_media_resource.h
index 90c8b409874..8261a89db7e 100644
--- a/chromium/media/filters/decrypting_media_resource.h
+++ b/chromium/media/filters/decrypting_media_resource.h
@@ -10,14 +10,10 @@
#include "base/callback.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "media/base/media_resource.h"
#include "media/base/pipeline.h"
-namespace base {
-class SingleThreadTaskRunner;
-} // namespace base
-
namespace media {
class CdmContext;
@@ -36,11 +32,10 @@ class MEDIA_EXPORT DecryptingMediaResource : public MediaResource {
public:
using InitCB = base::OnceCallback<void(bool success)>;
- DecryptingMediaResource(
- MediaResource* media_resource,
- CdmContext* cdm_context,
- MediaLog* media_log,
- scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+ DecryptingMediaResource(MediaResource* media_resource,
+ CdmContext* cdm_context,
+ MediaLog* media_log,
+ scoped_refptr<base::SequencedTaskRunner> task_runner);
~DecryptingMediaResource() override;
// MediaResource implementation:
@@ -58,7 +53,7 @@ class MEDIA_EXPORT DecryptingMediaResource : public MediaResource {
MediaResource* const media_resource_;
CdmContext* const cdm_context_;
MediaLog* const media_log_;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
// Number of DecryptingDemuxerStreams that have yet to be initialized.
int num_dds_pending_init_ = 0;
diff --git a/chromium/media/filters/decrypting_media_resource_unittest.cc b/chromium/media/filters/decrypting_media_resource_unittest.cc
index 1049434d9c6..f47f105e969 100644
--- a/chromium/media/filters/decrypting_media_resource_unittest.cc
+++ b/chromium/media/filters/decrypting_media_resource_unittest.cc
@@ -7,7 +7,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/filters/decrypting_video_decoder.cc b/chromium/media/filters/decrypting_video_decoder.cc
index b78b7a39d43..43601b56c5d 100644
--- a/chromium/media/filters/decrypting_video_decoder.cc
+++ b/chromium/media/filters/decrypting_video_decoder.cc
@@ -8,7 +8,7 @@
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "base/trace_event/trace_event.h"
#include "media/base/bind_to_current_loop.h"
@@ -22,9 +22,11 @@ namespace media {
const char DecryptingVideoDecoder::kDecoderName[] = "DecryptingVideoDecoder";
DecryptingVideoDecoder::DecryptingVideoDecoder(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log)
- : task_runner_(task_runner), media_log_(media_log) {}
+ : task_runner_(task_runner), media_log_(media_log) {
+ DETACH_FROM_SEQUENCE(sequence_checker_);
+}
std::string DecryptingVideoDecoder::GetDisplayName() const {
return kDecoderName;
@@ -38,7 +40,7 @@ void DecryptingVideoDecoder::Initialize(const VideoDecoderConfig& config,
const WaitingCB& waiting_cb) {
DVLOG(2) << __func__ << ": " << config.AsHumanReadableString();
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(state_ == kUninitialized || state_ == kIdle ||
state_ == kDecodeFinished)
<< state_;
@@ -101,7 +103,7 @@ bool DecryptingVideoDecoder::SupportsDecryption() const {
void DecryptingVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DVLOG(3) << "Decode()";
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(state_ == kIdle || state_ == kDecodeFinished || state_ == kError)
<< state_;
DCHECK(decode_cb);
@@ -127,7 +129,7 @@ void DecryptingVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void DecryptingVideoDecoder::Reset(base::OnceClosure closure) {
DVLOG(2) << "Reset() - state: " << state_;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(state_ == kIdle || state_ == kPendingDecode ||
state_ == kWaitingForKey || state_ == kDecodeFinished ||
state_ == kError)
@@ -160,7 +162,7 @@ void DecryptingVideoDecoder::Reset(base::OnceClosure closure) {
}
DecryptingVideoDecoder::~DecryptingVideoDecoder() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (state_ == kUninitialized)
return;
@@ -185,7 +187,7 @@ DecryptingVideoDecoder::~DecryptingVideoDecoder() {
void DecryptingVideoDecoder::FinishInitialization(bool success) {
DVLOG(2) << "FinishInitialization()";
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kPendingDecoderInit) << state_;
DCHECK(init_cb_);
DCHECK(!reset_cb_); // No Reset() before initialization finished.
@@ -206,7 +208,7 @@ void DecryptingVideoDecoder::FinishInitialization(bool success) {
}
void DecryptingVideoDecoder::DecodePendingBuffer() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kPendingDecode) << state_;
// Note: Traces require a unique ID per decode, if we ever support multiple
@@ -228,7 +230,7 @@ void DecryptingVideoDecoder::DecodePendingBuffer() {
void DecryptingVideoDecoder::DeliverFrame(Decryptor::Status status,
scoped_refptr<VideoFrame> frame) {
DVLOG(3) << "DeliverFrame() - status: " << status;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(state_, kPendingDecode) << state_;
DCHECK(decode_cb_);
DCHECK(pending_buffer_to_decode_.get());
@@ -321,7 +323,7 @@ void DecryptingVideoDecoder::DeliverFrame(Decryptor::Status status,
void DecryptingVideoDecoder::OnCdmContextEvent(CdmContext::Event event) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (event != CdmContext::Event::kHasAdditionalUsableKey)
return;
diff --git a/chromium/media/filters/decrypting_video_decoder.h b/chromium/media/filters/decrypting_video_decoder.h
index bdc40e5edbc..6f7c92402ec 100644
--- a/chromium/media/filters/decrypting_video_decoder.h
+++ b/chromium/media/filters/decrypting_video_decoder.h
@@ -10,6 +10,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
+#include "base/sequence_checker.h"
#include "media/base/callback_registry.h"
#include "media/base/cdm_context.h"
#include "media/base/decryptor.h"
@@ -17,7 +18,7 @@
#include "media/base/video_decoder_config.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -33,7 +34,7 @@ class MediaLog;
class MEDIA_EXPORT DecryptingVideoDecoder : public VideoDecoder {
public:
DecryptingVideoDecoder(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log);
~DecryptingVideoDecoder() override;
@@ -85,9 +86,11 @@ class MEDIA_EXPORT DecryptingVideoDecoder : public VideoDecoder {
void CompleteWaitingForDecryptionKey();
// Set in constructor.
- scoped_refptr<base::SingleThreadTaskRunner> const task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> const task_runner_;
MediaLog* const media_log_;
+ SEQUENCE_CHECKER(sequence_checker_);
+
State state_ = kUninitialized;
InitCB init_cb_;
diff --git a/chromium/media/filters/decrypting_video_decoder_unittest.cc b/chromium/media/filters/decrypting_video_decoder_unittest.cc
index caa4221e88e..525fdb247f5 100644
--- a/chromium/media/filters/decrypting_video_decoder_unittest.cc
+++ b/chromium/media/filters/decrypting_video_decoder_unittest.cc
@@ -78,16 +78,17 @@ class DecryptingVideoDecoderTest : public testing::Test {
// can succeed or fail.
void InitializeAndExpectResult(const VideoDecoderConfig& config,
bool success) {
- decoder_->Initialize(config, false, cdm_context_.get(),
- base::BindOnce(
- [](bool success, Status status) {
- EXPECT_EQ(status.is_ok(), success);
- },
- success),
- base::Bind(&DecryptingVideoDecoderTest::FrameReady,
- base::Unretained(this)),
- base::Bind(&DecryptingVideoDecoderTest::OnWaiting,
- base::Unretained(this)));
+ decoder_->Initialize(
+ config, false, cdm_context_.get(),
+ base::BindOnce(
+ [](bool success, Status status) {
+ EXPECT_EQ(status.is_ok(), success);
+ },
+ success),
+ base::BindRepeating(&DecryptingVideoDecoderTest::FrameReady,
+ base::Unretained(this)),
+ base::BindRepeating(&DecryptingVideoDecoderTest::OnWaiting,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/demuxer_perftest.cc b/chromium/media/filters/demuxer_perftest.cc
index fe3dc42d898..d444f4d06f9 100644
--- a/chromium/media/filters/demuxer_perftest.cc
+++ b/chromium/media/filters/demuxer_perftest.cc
@@ -184,7 +184,7 @@ static void RunDemuxerBenchmark(const std::string& filename) {
Demuxer::EncryptedMediaInitDataCB encrypted_media_init_data_cb =
base::BindRepeating(&OnEncryptedMediaInitData);
Demuxer::MediaTracksUpdatedCB tracks_updated_cb =
- base::Bind(&OnMediaTracksUpdated);
+ base::BindRepeating(&OnMediaTracksUpdated);
FFmpegDemuxer demuxer(base::ThreadTaskRunnerHandle::Get(), &data_source,
encrypted_media_init_data_cb, tracks_updated_cb,
&media_log_, true);
diff --git a/chromium/media/filters/fake_video_decoder.cc b/chromium/media/filters/fake_video_decoder.cc
index cfdd7ba4fa0..e2f4726f28f 100644
--- a/chromium/media/filters/fake_video_decoder.cc
+++ b/chromium/media/filters/fake_video_decoder.cc
@@ -22,13 +22,14 @@ FakeVideoDecoder::FakeVideoDecoder(const std::string& decoder_name,
hold_decode_(false),
total_bytes_decoded_(0),
fail_to_initialize_(false) {
+ DETACH_FROM_SEQUENCE(sequence_checker_);
DVLOG(1) << decoder_name_ << ": " << __func__;
DCHECK_GE(decoding_delay, 0);
}
FakeVideoDecoder::~FakeVideoDecoder() {
DVLOG(1) << decoder_name_ << ": " << __func__;
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (state_ == STATE_UNINITIALIZED)
return;
@@ -74,7 +75,7 @@ void FakeVideoDecoder::Initialize(const VideoDecoderConfig& config,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
DVLOG(1) << decoder_name_ << ": " << __func__;
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(config.IsValidConfig());
DCHECK(held_decode_callbacks_.empty())
<< "No reinitialization during pending decode.";
@@ -112,7 +113,7 @@ void FakeVideoDecoder::Initialize(const VideoDecoderConfig& config,
void FakeVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(reset_cb_.IsNull());
DCHECK_LE(decoded_frames_.size(),
decoding_delay_ + held_decode_callbacks_.size());
@@ -147,7 +148,7 @@ scoped_refptr<VideoFrame> FakeVideoDecoder::MakeVideoFrame(
}
void FakeVideoDecoder::Reset(base::OnceClosure closure) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(reset_cb_.IsNull());
reset_cb_.SetCallback(BindToCurrentLoop(std::move(closure)));
@@ -161,22 +162,22 @@ void FakeVideoDecoder::Reset(base::OnceClosure closure) {
}
void FakeVideoDecoder::HoldNextInit() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
init_cb_.HoldCallback();
}
void FakeVideoDecoder::HoldDecode() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
hold_decode_ = true;
}
void FakeVideoDecoder::HoldNextReset() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
reset_cb_.HoldCallback();
}
void FakeVideoDecoder::SatisfyInit() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(held_decode_callbacks_.empty());
DCHECK(reset_cb_.IsNull());
@@ -184,7 +185,7 @@ void FakeVideoDecoder::SatisfyInit() {
}
void FakeVideoDecoder::SatisfyDecode() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(hold_decode_);
hold_decode_ = false;
@@ -195,7 +196,7 @@ void FakeVideoDecoder::SatisfyDecode() {
}
void FakeVideoDecoder::SatisfySingleDecode() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!held_decode_callbacks_.empty());
DecodeCB decode_cb = std::move(held_decode_callbacks_.front());
@@ -207,13 +208,13 @@ void FakeVideoDecoder::SatisfySingleDecode() {
}
void FakeVideoDecoder::SatisfyReset() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(held_decode_callbacks_.empty());
reset_cb_.RunHeldCallback();
}
void FakeVideoDecoder::SimulateError() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
state_ = STATE_ERROR;
while (!held_decode_callbacks_.empty()) {
@@ -234,7 +235,7 @@ int FakeVideoDecoder::GetMaxDecodeRequests() const {
void FakeVideoDecoder::OnFrameDecoded(int buffer_size,
DecodeCB decode_cb,
Status status) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (status.is_ok()) {
total_bytes_decoded_ += buffer_size;
@@ -245,7 +246,7 @@ void FakeVideoDecoder::OnFrameDecoded(int buffer_size,
}
void FakeVideoDecoder::RunOrHoldDecode(DecodeCB decode_cb) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (hold_decode_) {
held_decode_callbacks_.push_back(std::move(decode_cb));
@@ -256,7 +257,7 @@ void FakeVideoDecoder::RunOrHoldDecode(DecodeCB decode_cb) {
}
void FakeVideoDecoder::RunDecodeCallback(DecodeCB decode_cb) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!reset_cb_.IsNull()) {
DCHECK(decoded_frames_.empty());
@@ -289,7 +290,7 @@ void FakeVideoDecoder::RunDecodeCallback(DecodeCB decode_cb) {
}
void FakeVideoDecoder::DoReset() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(held_decode_callbacks_.empty());
DCHECK(!reset_cb_.IsNull());
diff --git a/chromium/media/filters/fake_video_decoder.h b/chromium/media/filters/fake_video_decoder.h
index e8467171aab..898f4dab778 100644
--- a/chromium/media/filters/fake_video_decoder.h
+++ b/chromium/media/filters/fake_video_decoder.h
@@ -14,7 +14,7 @@
#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
-#include "base/threading/thread_checker.h"
+#include "base/sequence_checker.h"
#include "media/base/callback_holder.h"
#include "media/base/decoder_buffer.h"
#include "media/base/pipeline_status.h"
@@ -106,7 +106,7 @@ class FakeVideoDecoder : public VideoDecoder {
void DoReset();
- base::ThreadChecker thread_checker_;
+ SEQUENCE_CHECKER(sequence_checker_);
const std::string decoder_name_;
const size_t decoding_delay_;
diff --git a/chromium/media/filters/fake_video_decoder_unittest.cc b/chromium/media/filters/fake_video_decoder_unittest.cc
index 1c7fa61808e..49130158ce1 100644
--- a/chromium/media/filters/fake_video_decoder_unittest.cc
+++ b/chromium/media/filters/fake_video_decoder_unittest.cc
@@ -4,8 +4,11 @@
#include "media/filters/fake_video_decoder.h"
+#include <memory>
+#include <utility>
+
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
@@ -37,8 +40,8 @@ class FakeVideoDecoderTest
"FakeVideoDecoder",
GetParam().decoding_delay,
GetParam().max_decode_requests,
- base::Bind(&FakeVideoDecoderTest::OnBytesDecoded,
- base::Unretained(this)))),
+ base::BindRepeating(&FakeVideoDecoderTest::OnBytesDecoded,
+ base::Unretained(this)))),
num_input_buffers_(0),
num_decoded_frames_(0),
num_bytes_decoded_(0),
@@ -53,15 +56,15 @@ class FakeVideoDecoderTest
void InitializeWithConfigAndExpectResult(const VideoDecoderConfig& config,
bool success) {
- decoder_->Initialize(
- config, false, nullptr,
- base::BindOnce(
- [](bool success, Status status) {
- EXPECT_EQ(status.is_ok(), success);
- },
- success),
- base::Bind(&FakeVideoDecoderTest::FrameReady, base::Unretained(this)),
- base::NullCallback());
+ decoder_->Initialize(config, false, nullptr,
+ base::BindOnce(
+ [](bool success, Status status) {
+ EXPECT_EQ(status.is_ok(), success);
+ },
+ success),
+ base::BindRepeating(&FakeVideoDecoderTest::FrameReady,
+ base::Unretained(this)),
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
current_config_ = config;
}
@@ -158,7 +161,7 @@ class FakeVideoDecoderTest
void ReadAllFrames() {
do {
Decode();
- } while (num_input_buffers_ <= kTotalBuffers); // All input buffers + EOS.
+ } while (num_input_buffers_ <= kTotalBuffers); // All input buffers + EOS.
}
void EnterPendingReadState() {
@@ -284,10 +287,10 @@ TEST_P(FakeVideoDecoderTest, Read_DecodingDelay) {
}
TEST_P(FakeVideoDecoderTest, Read_ZeroDelay) {
- decoder_.reset(
- new FakeVideoDecoder("FakeVideoDecoder", 0, 1,
- base::Bind(&FakeVideoDecoderTest::OnBytesDecoded,
- base::Unretained(this))));
+ decoder_ = std::make_unique<FakeVideoDecoder>(
+ "FakeVideoDecoder", 0, 1,
+ base::BindRepeating(&FakeVideoDecoderTest::OnBytesDecoded,
+ base::Unretained(this)));
Initialize();
while (num_input_buffers_ < kTotalBuffers) {
diff --git a/chromium/media/filters/ffmpeg_audio_decoder.cc b/chromium/media/filters/ffmpeg_audio_decoder.cc
index 4fc03e7be31..e5daea40328 100644
--- a/chromium/media/filters/ffmpeg_audio_decoder.cc
+++ b/chromium/media/filters/ffmpeg_audio_decoder.cc
@@ -48,16 +48,18 @@ static void ReleaseAudioBufferImpl(void* opaque, uint8_t* data) {
}
FFmpegAudioDecoder::FFmpegAudioDecoder(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log)
: task_runner_(task_runner),
state_(kUninitialized),
av_sample_format_(0),
media_log_(media_log),
- pool_(new AudioBufferMemoryPool()) {}
+ pool_(new AudioBufferMemoryPool()) {
+ DETACH_FROM_SEQUENCE(sequence_checker_);
+}
FFmpegAudioDecoder::~FFmpegAudioDecoder() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (state_ != kUninitialized)
ReleaseFFmpegResources();
@@ -72,7 +74,7 @@ void FFmpegAudioDecoder::Initialize(const AudioDecoderConfig& config,
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& /* waiting_cb */) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(config.IsValidConfig());
InitCB bound_init_cb = BindToCurrentLoop(std::move(init_cb));
@@ -108,7 +110,7 @@ void FFmpegAudioDecoder::Initialize(const AudioDecoderConfig& config,
void FFmpegAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(decode_cb);
CHECK_NE(state_, kUninitialized);
DecodeCB decode_cb_bound = BindToCurrentLoop(std::move(decode_cb));
@@ -128,7 +130,7 @@ void FFmpegAudioDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
}
void FFmpegAudioDecoder::Reset(base::OnceClosure closure) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
avcodec_flush_buffers(codec_context_.get());
state_ = kNormal;
@@ -138,7 +140,7 @@ void FFmpegAudioDecoder::Reset(base::OnceClosure closure) {
void FFmpegAudioDecoder::DecodeBuffer(const DecoderBuffer& buffer,
DecodeCB decode_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_NE(state_, kUninitialized);
DCHECK_NE(state_, kDecodeFinished);
DCHECK_NE(state_, kError);
diff --git a/chromium/media/filters/ffmpeg_audio_decoder.h b/chromium/media/filters/ffmpeg_audio_decoder.h
index 0db124bf47f..2111b89a797 100644
--- a/chromium/media/filters/ffmpeg_audio_decoder.h
+++ b/chromium/media/filters/ffmpeg_audio_decoder.h
@@ -10,6 +10,7 @@
#include "base/callback.h"
#include "base/macros.h"
+#include "base/sequence_checker.h"
#include "base/time/time.h"
#include "media/base/audio_buffer.h"
#include "media/base/audio_decoder.h"
@@ -22,7 +23,7 @@ struct AVCodecContext;
struct AVFrame;
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -34,7 +35,7 @@ class FFmpegDecodingLoop;
class MEDIA_EXPORT FFmpegAudioDecoder : public AudioDecoder {
public:
FFmpegAudioDecoder(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
MediaLog* media_log);
~FFmpegAudioDecoder() override;
@@ -99,7 +100,8 @@ class MEDIA_EXPORT FFmpegAudioDecoder : public AudioDecoder {
void ResetTimestampState(const AudioDecoderConfig& config);
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+ SEQUENCE_CHECKER(sequence_checker_);
OutputCB output_cb_;
diff --git a/chromium/media/filters/ffmpeg_demuxer.cc b/chromium/media/filters/ffmpeg_demuxer.cc
index d34db63f3ef..fa39c5698ed 100644
--- a/chromium/media/filters/ffmpeg_demuxer.cc
+++ b/chromium/media/filters/ffmpeg_demuxer.cc
@@ -17,7 +17,7 @@
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "base/numerics/safe_conversions.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "base/sys_byteorder.h"
@@ -343,7 +343,7 @@ FFmpegDemuxerStream::~FFmpegDemuxerStream() {
}
void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(packet->size);
DCHECK(packet->data);
@@ -462,7 +462,7 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
}
if (packet_end - header_start < MPEG1AudioStreamParser::kHeaderSize ||
- !MPEG1AudioStreamParser::ParseHeader(nullptr, header_start,
+ !MPEG1AudioStreamParser::ParseHeader(nullptr, nullptr, header_start,
nullptr)) {
LIMITED_MEDIA_LOG(INFO, media_log_, num_discarded_packet_warnings_, 5)
<< "Discarding invalid MP3 packet, ts: "
@@ -663,13 +663,13 @@ void FFmpegDemuxerStream::EnqueuePacket(ScopedAVPacket packet) {
}
void FFmpegDemuxerStream::SetEndOfStream() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
end_of_stream_ = true;
SatisfyPendingRead();
}
void FFmpegDemuxerStream::FlushBuffers(bool preserve_packet_position) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(preserve_packet_position || !read_cb_)
<< "There should be no pending read";
@@ -697,7 +697,7 @@ void FFmpegDemuxerStream::Abort() {
}
void FFmpegDemuxerStream::Stop() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
buffer_queue_.Clear();
demuxer_ = nullptr;
stream_ = nullptr;
@@ -709,17 +709,17 @@ void FFmpegDemuxerStream::Stop() {
}
DemuxerStream::Type FFmpegDemuxerStream::type() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
return type_;
}
DemuxerStream::Liveness FFmpegDemuxerStream::liveness() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
return liveness_;
}
void FFmpegDemuxerStream::Read(ReadCB read_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
CHECK(!read_cb_) << "Overlapping reads are not supported";
read_cb_ = BindToCurrentLoop(std::move(read_cb));
@@ -748,7 +748,7 @@ void FFmpegDemuxerStream::Read(ReadCB read_cb) {
}
void FFmpegDemuxerStream::EnableBitstreamConverter() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
InitBitstreamConverter();
@@ -806,26 +806,26 @@ void FFmpegDemuxerStream::InitBitstreamConverter() {
bool FFmpegDemuxerStream::SupportsConfigChanges() { return false; }
AudioDecoderConfig FFmpegDemuxerStream::audio_decoder_config() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(type_, AUDIO);
DCHECK(audio_config_.get());
return *audio_config_;
}
VideoDecoderConfig FFmpegDemuxerStream::video_decoder_config() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(type_, VIDEO);
DCHECK(video_config_.get());
return *video_config_;
}
bool FFmpegDemuxerStream::IsEnabled() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
return is_enabled_;
}
void FFmpegDemuxerStream::SetEnabled(bool enabled, base::TimeDelta timestamp) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(demuxer_);
DCHECK(demuxer_->ffmpeg_task_runner());
if (enabled == is_enabled_)
@@ -845,7 +845,7 @@ void FFmpegDemuxerStream::SetEnabled(bool enabled, base::TimeDelta timestamp) {
}
void FFmpegDemuxerStream::SetLiveness(Liveness liveness) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK_EQ(liveness_, LIVENESS_UNKNOWN);
liveness_ = liveness;
}
@@ -855,7 +855,7 @@ Ranges<base::TimeDelta> FFmpegDemuxerStream::GetBufferedRanges() const {
}
void FFmpegDemuxerStream::SatisfyPendingRead() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (read_cb_) {
if (!buffer_queue_.IsEmpty()) {
std::move(read_cb_).Run(DemuxerStream::kOk, buffer_queue_.Pop());
@@ -901,7 +901,7 @@ base::TimeDelta FFmpegDemuxerStream::ConvertStreamTimestamp(
// FFmpegDemuxer
//
FFmpegDemuxer::FFmpegDemuxer(
- const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ const scoped_refptr<base::SequencedTaskRunner>& task_runner,
DataSource* data_source,
const EncryptedMediaInitDataCB& encrypted_media_init_data_cb,
MediaTracksUpdatedCB media_tracks_updated_cb,
@@ -944,7 +944,7 @@ std::string FFmpegDemuxer::GetDisplayName() const {
void FFmpegDemuxer::Initialize(DemuxerHost* host,
PipelineStatusCallback init_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
host_ = host;
weak_this_ = cancel_pending_seek_factory_.GetWeakPtr();
init_cb_ = std::move(init_cb);
@@ -952,7 +952,7 @@ void FFmpegDemuxer::Initialize(DemuxerHost* host,
// Give a WeakPtr to BlockingUrlProtocol since we'll need to release it on the
// blocking thread pool.
url_protocol_ = std::make_unique<BlockingUrlProtocol>(
- data_source_, BindToCurrentLoop(base::Bind(
+ data_source_, BindToCurrentLoop(base::BindRepeating(
&FFmpegDemuxer::OnDataSourceError, weak_this_)));
glue_ = std::make_unique<FFmpegGlue>(url_protocol_.get());
AVFormatContext* format_context = glue_->format_context();
@@ -978,7 +978,7 @@ void FFmpegDemuxer::Initialize(DemuxerHost* host,
}
void FFmpegDemuxer::AbortPendingReads() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
// If Stop() has been called, then drop this call.
if (stopped_)
@@ -1012,7 +1012,7 @@ void FFmpegDemuxer::AbortPendingReads() {
}
void FFmpegDemuxer::Stop() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (init_cb_)
RunInitCB(PIPELINE_ERROR_ABORT);
@@ -1043,7 +1043,7 @@ void FFmpegDemuxer::Stop() {
void FFmpegDemuxer::StartWaitingForSeek(base::TimeDelta seek_time) {}
void FFmpegDemuxer::CancelPendingSeek(base::TimeDelta seek_time) {
- if (task_runner_->BelongsToCurrentThread()) {
+ if (task_runner_->RunsTasksInCurrentSequence()) {
AbortPendingReads();
} else {
// Don't use GetWeakPtr() here since we are on the wrong thread.
@@ -1054,7 +1054,7 @@ void FFmpegDemuxer::CancelPendingSeek(base::TimeDelta seek_time) {
}
void FFmpegDemuxer::Seek(base::TimeDelta time, PipelineStatusCallback cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(!pending_seek_cb_);
TRACE_EVENT_ASYNC_BEGIN0("media", "FFmpegDemuxer::Seek", this);
pending_seek_cb_ = std::move(cb);
@@ -1064,7 +1064,7 @@ void FFmpegDemuxer::Seek(base::TimeDelta time, PipelineStatusCallback cb) {
void FFmpegDemuxer::SeekInternal(base::TimeDelta time,
base::OnceClosure seek_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
// FFmpeg requires seeks to be adjusted according to the lowest starting time.
// Since EnqueuePacket() rebased negative timestamps by the start time, we
@@ -1120,7 +1120,7 @@ base::Time FFmpegDemuxer::GetTimelineOffset() const {
}
std::vector<DemuxerStream*> FFmpegDemuxer::GetAllStreams() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
std::vector<DemuxerStream*> result;
// Put enabled streams at the beginning of the list so that
// MediaResource::GetFirstStream returns the enabled stream if there is one.
@@ -1174,12 +1174,12 @@ void FFmpegDemuxer::OnEncryptedMediaInitData(
}
void FFmpegDemuxer::NotifyCapacityAvailable() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
ReadFrameIfNeeded();
}
void FFmpegDemuxer::NotifyBufferingChanged() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
Ranges<base::TimeDelta> buffered;
bool initialized_buffered_ranges = false;
for (const auto& stream : streams_) {
@@ -1226,7 +1226,7 @@ static int CalculateBitrate(AVFormatContext* format_context,
}
void FFmpegDemuxer::OnOpenContextDone(bool result) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (stopped_) {
MEDIA_LOG(ERROR, media_log_) << GetDisplayName() << ": bad state";
RunInitCB(PIPELINE_ERROR_ABORT);
@@ -1258,7 +1258,7 @@ void FFmpegDemuxer::OnOpenContextDone(bool result) {
}
void FFmpegDemuxer::OnFindStreamInfoDone(int result) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
if (stopped_ || !data_source_) {
MEDIA_LOG(ERROR, media_log_) << GetDisplayName() << ": bad state";
RunInitCB(PIPELINE_ERROR_ABORT);
@@ -1640,7 +1640,7 @@ FFmpegDemuxerStream* FFmpegDemuxer::FindPreferredStreamForSeeking(
}
void FFmpegDemuxer::OnSeekFrameSuccess() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(pending_seek_cb_);
if (stopped_) {
@@ -1667,7 +1667,7 @@ void FFmpegDemuxer::FindAndEnableProperTracks(
base::TimeDelta curr_time,
DemuxerStream::Type track_type,
TrackChangeCB change_completed_cb) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
std::set<FFmpegDemuxerStream*> enabled_streams;
for (const auto& id : track_ids) {
@@ -1752,7 +1752,7 @@ void FFmpegDemuxer::OnSelectedVideoTrackChanged(
}
void FFmpegDemuxer::ReadFrameIfNeeded() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
// Make sure we have work to do before reading.
if (stopped_ || !StreamsHaveAvailableCapacity() || pending_read_ ||
@@ -1776,7 +1776,7 @@ void FFmpegDemuxer::ReadFrameIfNeeded() {
}
void FFmpegDemuxer::OnReadFrameDone(ScopedAVPacket packet, int result) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(pending_read_);
pending_read_ = false;
@@ -1847,7 +1847,7 @@ void FFmpegDemuxer::OnReadFrameDone(ScopedAVPacket packet, int result) {
}
bool FFmpegDemuxer::StreamsHaveAvailableCapacity() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
for (const auto& stream : streams_) {
if (stream && stream->IsEnabled() && stream->HasAvailableCapacity())
return true;
@@ -1856,9 +1856,10 @@ bool FFmpegDemuxer::StreamsHaveAvailableCapacity() {
}
bool FFmpegDemuxer::IsMaxMemoryUsageReached() const {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
- size_t memory_left = GetDemuxerMemoryLimit();
+ size_t memory_left =
+ GetDemuxerMemoryLimit(Demuxer::DemuxerTypes::kFFmpegDemuxer);
for (const auto& stream : streams_) {
if (!stream)
continue;
@@ -1872,7 +1873,7 @@ bool FFmpegDemuxer::IsMaxMemoryUsageReached() const {
}
void FFmpegDemuxer::StreamHasEnded() {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
for (const auto& stream : streams_) {
if (stream)
stream->SetEndOfStream();
@@ -1891,7 +1892,7 @@ void FFmpegDemuxer::NotifyDemuxerError(PipelineStatus status) {
}
void FFmpegDemuxer::SetLiveness(DemuxerStream::Liveness liveness) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
for (const auto& stream : streams_) {
if (stream)
stream->SetLiveness(liveness);
@@ -1899,7 +1900,7 @@ void FFmpegDemuxer::SetLiveness(DemuxerStream::Liveness liveness) {
}
void FFmpegDemuxer::RunInitCB(PipelineStatus status) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(init_cb_);
TRACE_EVENT_ASYNC_END1("media", "FFmpegDemuxer::Initialize", this, "status",
PipelineStatusToString(status));
@@ -1907,7 +1908,7 @@ void FFmpegDemuxer::RunInitCB(PipelineStatus status) {
}
void FFmpegDemuxer::RunPendingSeekCB(PipelineStatus status) {
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK(task_runner_->RunsTasksInCurrentSequence());
DCHECK(pending_seek_cb_);
TRACE_EVENT_ASYNC_END1("media", "FFmpegDemuxer::Seek", this, "status",
PipelineStatusToString(status));
diff --git a/chromium/media/filters/ffmpeg_demuxer.h b/chromium/media/filters/ffmpeg_demuxer.h
index 0c714728db1..c2fc118b3ca 100644
--- a/chromium/media/filters/ffmpeg_demuxer.h
+++ b/chromium/media/filters/ffmpeg_demuxer.h
@@ -34,7 +34,6 @@
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "base/sequenced_task_runner.h"
-#include "base/single_thread_task_runner.h"
#include "media/base/audio_decoder_config.h"
#include "media/base/decoder_buffer.h"
#include "media/base/decoder_buffer_queue.h"
@@ -173,7 +172,7 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
void InitBitstreamConverter();
FFmpegDemuxer* demuxer_;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
AVStream* stream_;
base::TimeDelta start_time_;
std::unique_ptr<AudioDecoderConfig> audio_config_;
@@ -210,7 +209,7 @@ class MEDIA_EXPORT FFmpegDemuxerStream : public DemuxerStream {
class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
public:
- FFmpegDemuxer(const scoped_refptr<base::SingleThreadTaskRunner>& task_runner,
+ FFmpegDemuxer(const scoped_refptr<base::SequencedTaskRunner>& task_runner,
DataSource* data_source,
const EncryptedMediaInitDataCB& encrypted_media_init_data_cb,
MediaTracksUpdatedCB media_tracks_updated_cb,
@@ -342,7 +341,7 @@ class MEDIA_EXPORT FFmpegDemuxer : public Demuxer {
DemuxerHost* host_ = nullptr;
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
// Task runner on which all blocking FFmpeg operations are executed; retrieved
// from base::ThreadPoolInstance.
diff --git a/chromium/media/filters/ffmpeg_demuxer_unittest.cc b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
index d871814468a..5bdd717becc 100644
--- a/chromium/media/filters/ffmpeg_demuxer_unittest.cc
+++ b/chromium/media/filters/ffmpeg_demuxer_unittest.cc
@@ -23,6 +23,7 @@
#include "base/threading/thread.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/decrypt_config.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_client.h"
@@ -337,7 +338,7 @@ class FFmpegDemuxerTest : public testing::Test {
base::BindRepeating(&FFmpegDemuxerTest::OnEncryptedMediaInitData,
base::Unretained(this));
- Demuxer::MediaTracksUpdatedCB tracks_updated_cb = base::Bind(
+ Demuxer::MediaTracksUpdatedCB tracks_updated_cb = base::BindRepeating(
&FFmpegDemuxerTest::OnMediaTracksUpdated, base::Unretained(this));
demuxer_.reset(new FFmpegDemuxer(
@@ -778,7 +779,7 @@ TEST_F(FFmpegDemuxerTest, Read_AudioNegativeStartTimeAndOpusDiscard_Sync) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
TEST_F(FFmpegDemuxerTest, TestAudioNegativeTimestamps) {
// Note: This test will _crash_ the browser if negative timestamp
// values are skipped, since this file is heavily truncated to avoid
@@ -794,7 +795,7 @@ TEST_F(FFmpegDemuxerTest, TestAudioNegativeTimestamps) {
Read(audio, FROM_HERE, 104, 77619, true);
Read(audio, FROM_HERE, 104, 103492, true);
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
// Similar to the test above, but using an opus clip plus h264 b-frames to
// ensure we don't apply chained ogg workarounds to other content.
diff --git a/chromium/media/filters/ffmpeg_glue_unittest.cc b/chromium/media/filters/ffmpeg_glue_unittest.cc
index 0e56adf9bee..53db86c0dc8 100644
--- a/chromium/media/filters/ffmpeg_glue_unittest.cc
+++ b/chromium/media/filters/ffmpeg_glue_unittest.cc
@@ -11,6 +11,7 @@
#include "base/check.h"
#include "base/macros.h"
#include "base/test/metrics/histogram_tester.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/container_names.h"
#include "media/base/mock_filters.h"
#include "media/base/test_data_util.h"
@@ -315,7 +316,7 @@ TEST_F(FFmpegGlueContainerTest, AAC) {
ExpectContainer(container_names::CONTAINER_AAC);
}
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
TEST_F(FFmpegGlueContainerTest, AVI) {
InitializeAndOpen("bear.avi");
ExpectContainer(container_names::CONTAINER_AVI);
@@ -325,7 +326,7 @@ TEST_F(FFmpegGlueContainerTest, AMR) {
InitializeAndOpen("bear.amr");
ExpectContainer(container_names::CONTAINER_AMR);
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
// Probe something unsupported to ensure we fall back to the our internal guess.
diff --git a/chromium/media/filters/ffmpeg_video_decoder.cc b/chromium/media/filters/ffmpeg_video_decoder.cc
index 721b8b697a3..d703189571a 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder.cc
@@ -12,8 +12,8 @@
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/location.h"
-#include "base/single_thread_task_runner.h"
-#include "base/threading/thread_task_runner_handle.h"
+#include "base/sequenced_task_runner.h"
+#include "base/threading/sequenced_task_runner_handle.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/decoder_buffer.h"
#include "media/base/limits.h"
@@ -86,7 +86,7 @@ bool FFmpegVideoDecoder::IsCodecSupported(VideoCodec codec) {
FFmpegVideoDecoder::FFmpegVideoDecoder(MediaLog* media_log)
: media_log_(media_log), state_(kUninitialized), decode_nalus_(false) {
DVLOG(1) << __func__;
- thread_checker_.DetachFromThread();
+ DETACH_FROM_SEQUENCE(sequence_checker_);
}
int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
@@ -207,7 +207,7 @@ void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config,
const OutputCB& output_cb,
const WaitingCB& /* waiting_cb */) {
DVLOG(1) << __func__ << ": " << config.AsHumanReadableString();
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(config.IsValidConfig());
DCHECK(output_cb);
@@ -233,7 +233,7 @@ void FFmpegVideoDecoder::Initialize(const VideoDecoderConfig& config,
void FFmpegVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DVLOG(3) << __func__;
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer.get());
DCHECK(decode_cb);
CHECK_NE(state_, kUninitialized);
@@ -286,16 +286,17 @@ void FFmpegVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
void FFmpegVideoDecoder::Reset(base::OnceClosure closure) {
DVLOG(2) << __func__;
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
avcodec_flush_buffers(codec_context_.get());
state_ = kNormal;
// PostTask() to avoid calling |closure| immediately.
- base::ThreadTaskRunnerHandle::Get()->PostTask(FROM_HERE, std::move(closure));
+ base::SequencedTaskRunnerHandle::Get()->PostTask(FROM_HERE,
+ std::move(closure));
}
FFmpegVideoDecoder::~FFmpegVideoDecoder() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (state_ != kUninitialized)
ReleaseFFmpegResources();
diff --git a/chromium/media/filters/ffmpeg_video_decoder.h b/chromium/media/filters/ffmpeg_video_decoder.h
index f13ce417f8b..4ea30459b48 100644
--- a/chromium/media/filters/ffmpeg_video_decoder.h
+++ b/chromium/media/filters/ffmpeg_video_decoder.h
@@ -11,7 +11,7 @@
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
-#include "base/threading/thread_checker.h"
+#include "base/sequence_checker.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
#include "media/base/video_frame_pool.h"
@@ -75,7 +75,8 @@ class MEDIA_EXPORT FFmpegVideoDecoder : public VideoDecoder {
// Releases resources associated with |codec_context_|.
void ReleaseFFmpegResources();
- base::ThreadChecker thread_checker_;
+ SEQUENCE_CHECKER(sequence_checker_);
+
MediaLog* media_log_;
DecoderState state_;
diff --git a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
index 2d9be601eb9..599fb12c7fe 100644
--- a/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
+++ b/chromium/media/filters/ffmpeg_video_decoder_unittest.cc
@@ -9,7 +9,6 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
@@ -81,7 +80,8 @@ class FFmpegVideoDecoderTest : public testing::Test {
EXPECT_EQ(status.is_ok(), success);
},
success),
- base::Bind(&FFmpegVideoDecoderTest::FrameReady, base::Unretained(this)),
+ base::BindRepeating(&FFmpegVideoDecoderTest::FrameReady,
+ base::Unretained(this)),
base::NullCallback());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/filters/frame_buffer_pool.cc b/chromium/media/filters/frame_buffer_pool.cc
index 03740545fc0..c6aa7d03b4a 100644
--- a/chromium/media/filters/frame_buffer_pool.cc
+++ b/chromium/media/filters/frame_buffer_pool.cc
@@ -108,14 +108,14 @@ uint8_t* FrameBufferPool::AllocateAlphaPlaneForFrameBuffer(size_t min_size,
return frame_buffer->alpha_data.get();
}
-base::Closure FrameBufferPool::CreateFrameCallback(void* fb_priv) {
+base::OnceClosure FrameBufferPool::CreateFrameCallback(void* fb_priv) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
auto* frame_buffer = static_cast<FrameBuffer*>(fb_priv);
++frame_buffer->held_by_frame;
- return base::Bind(&FrameBufferPool::OnVideoFrameDestroyed, this,
- base::SequencedTaskRunnerHandle::Get(), frame_buffer);
+ return base::BindOnce(&FrameBufferPool::OnVideoFrameDestroyed, this,
+ base::SequencedTaskRunnerHandle::Get(), frame_buffer);
}
bool FrameBufferPool::OnMemoryDump(
diff --git a/chromium/media/filters/frame_buffer_pool.h b/chromium/media/filters/frame_buffer_pool.h
index b108dd24baf..9dddec6fb5d 100644
--- a/chromium/media/filters/frame_buffer_pool.h
+++ b/chromium/media/filters/frame_buffer_pool.h
@@ -47,7 +47,7 @@ class MEDIA_EXPORT FrameBufferPool
// Generates a "no_longer_needed" closure that holds a reference to this pool;
// |fb_priv| must be a value previously returned by GetFrameBuffer().
- base::Closure CreateFrameCallback(void* fb_priv);
+ base::OnceClosure CreateFrameCallback(void* fb_priv);
size_t get_pool_size_for_testing() const { return frame_buffers_.size(); }
diff --git a/chromium/media/filters/frame_buffer_pool_unittest.cc b/chromium/media/filters/frame_buffer_pool_unittest.cc
index 409029a6f1b..8644ce37092 100644
--- a/chromium/media/filters/frame_buffer_pool_unittest.cc
+++ b/chromium/media/filters/frame_buffer_pool_unittest.cc
@@ -53,7 +53,7 @@ TEST(FrameBufferPool, BasicFunctionality) {
memset(alpha, 0, kBufferSize);
// This will release all memory since we're in the shutdown state.
- frame_release_cb.Run();
+ std::move(frame_release_cb).Run();
EXPECT_EQ(0u, pool->get_pool_size_for_testing());
}
@@ -74,7 +74,7 @@ TEST(FrameBufferPool, DeferredDestruction) {
auto frame_release_cb = pool->CreateFrameCallback(priv1);
pool->ReleaseFrameBuffer(priv1);
priv1 = buf1 = nullptr;
- frame_release_cb.Run();
+ std::move(frame_release_cb).Run();
// Frame buffers should not be immediately deleted upon return.
EXPECT_EQ(3u, pool->get_pool_size_for_testing());
@@ -87,7 +87,7 @@ TEST(FrameBufferPool, DeferredDestruction) {
frame_release_cb = pool->CreateFrameCallback(priv2);
pool->ReleaseFrameBuffer(priv2);
priv2 = buf2 = nullptr;
- frame_release_cb.Run();
+ std::move(frame_release_cb).Run();
EXPECT_EQ(3u, pool->get_pool_size_for_testing());
test_clock.Advance(
@@ -97,7 +97,7 @@ TEST(FrameBufferPool, DeferredDestruction) {
frame_release_cb = pool->CreateFrameCallback(priv3);
pool->ReleaseFrameBuffer(priv3);
priv3 = buf3 = nullptr;
- frame_release_cb.Run();
+ std::move(frame_release_cb).Run();
EXPECT_EQ(1u, pool->get_pool_size_for_testing());
pool->Shutdown();
diff --git a/chromium/media/filters/fuchsia/DEPS b/chromium/media/filters/fuchsia/DEPS
new file mode 100644
index 00000000000..03a54a2749f
--- /dev/null
+++ b/chromium/media/filters/fuchsia/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+components/viz/common/gpu/raster_context_provider.h",
+] \ No newline at end of file
diff --git a/chromium/media/filters/fuchsia/DIR_METADATA b/chromium/media/filters/fuchsia/DIR_METADATA
new file mode 100644
index 00000000000..e88f62328ca
--- /dev/null
+++ b/chromium/media/filters/fuchsia/DIR_METADATA
@@ -0,0 +1,10 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+team_email: "cr-fuchsia@chromium.org"
+os: FUCHSIA \ No newline at end of file
diff --git a/chromium/media/filters/fuchsia/OWNERS b/chromium/media/filters/fuchsia/OWNERS
index c1b584511a6..3ebcc4268bd 100644
--- a/chromium/media/filters/fuchsia/OWNERS
+++ b/chromium/media/filters/fuchsia/OWNERS
@@ -1,4 +1 @@
-file://build/fuchsia/OWNERS
-# COMPONENT: Fuchsia
-# OS: Fuchsia
-# TEAM: cr-fuchsia@chromium.org
+file://build/fuchsia/OWNERS \ No newline at end of file
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
index 280e089faa2..d17b79cb657 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder.cc
@@ -24,6 +24,7 @@
#include "base/memory/weak_ptr.h"
#include "base/process/process_metrics.h"
#include "base/threading/sequenced_task_runner_handle.h"
+#include "components/viz/common/gpu/raster_context_provider.h"
#include "gpu/command_buffer/client/context_support.h"
#include "gpu/command_buffer/client/shared_image_interface.h"
#include "gpu/command_buffer/common/shared_image_usage.h"
@@ -51,31 +52,49 @@ namespace media {
namespace {
-// Maximum number of frames we expect to keep while playing video. Higher values
-// require more memory for output buffers. Lower values make it more likely that
-// renderer will stall because decoded frames are not available on time.
-const uint32_t kMaxUsedOutputFrames = 6;
+// Number of output buffers allocated "for camping". This value is passed to
+// sysmem to ensure that we get one output buffer for the frame currently
+// displayed on the screen.
+const uint32_t kOutputBuffersForCamping = 1;
+
+// Maximum number of frames we expect to have queued up while playing video.
+// Higher values require more memory for output buffers. Lower values make it
+// more likely that renderer will stall because decoded frames are not available
+// on time.
+const uint32_t kMaxUsedOutputBuffers = 5;
+
+// Use 2 buffers for decoder input. Limiting total number of buffers to 2 allows
+// to minimize required memory without significant effect on performance.
+const size_t kNumInputBuffers = 2;
+
+// Some codecs do not support splitting video frames across multiple input
+// buffers, so the buffers need to be large enough to fit all video frames. The
+// buffer size is calculated to fit 1080p frame with MinCR=2 (per H264 spec),
+// plus 128KiB for SEI/SPS/PPS. (note that the same size is used for all codecs,
+// not just H264).
+const size_t kInputBufferSize = 1920 * 1080 * 3 / 2 / 2 + 128 * 1024;
// Helper used to hold mailboxes for the output textures. OutputMailbox may
// outlive FuchsiaVideoDecoder if is referenced by a VideoFrame.
class OutputMailbox {
public:
- OutputMailbox(gpu::SharedImageInterface* shared_image_interface,
- gpu::ContextSupport* gpu_context_support,
- std::unique_ptr<gfx::GpuMemoryBuffer> gmb)
- : shared_image_interface_(shared_image_interface),
- gpu_context_support_(gpu_context_support),
- weak_factory_(this) {
+ OutputMailbox(
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider,
+ std::unique_ptr<gfx::GpuMemoryBuffer> gmb)
+ : raster_context_provider_(raster_context_provider), weak_factory_(this) {
uint32_t usage = gpu::SHARED_IMAGE_USAGE_RASTER |
gpu::SHARED_IMAGE_USAGE_OOP_RASTERIZATION |
gpu::SHARED_IMAGE_USAGE_DISPLAY |
gpu::SHARED_IMAGE_USAGE_SCANOUT;
- mailbox_ = shared_image_interface_->CreateSharedImage(
- gmb.get(), nullptr, gfx::ColorSpace(), kTopLeft_GrSurfaceOrigin,
- kPremul_SkAlphaType, usage);
+ mailbox_ =
+ raster_context_provider_->SharedImageInterface()->CreateSharedImage(
+ gmb.get(), nullptr, gfx::ColorSpace(), kTopLeft_GrSurfaceOrigin,
+ kPremul_SkAlphaType, usage);
}
+
~OutputMailbox() {
- shared_image_interface_->DestroySharedImage(sync_token_, mailbox_);
+ raster_context_provider_->SharedImageInterface()->DestroySharedImage(
+ sync_token_, mailbox_);
}
const gpu::Mailbox& mailbox() { return mailbox_; }
@@ -94,7 +113,8 @@ class OutputMailbox {
gpu::MailboxHolder mailboxes[VideoFrame::kMaxPlanes];
mailboxes[0].mailbox = mailbox_;
- mailboxes[0].sync_token = shared_image_interface_->GenUnverifiedSyncToken();
+ mailboxes[0].sync_token = raster_context_provider_->SharedImageInterface()
+ ->GenUnverifiedSyncToken();
auto frame = VideoFrame::WrapNativeTextures(
pixel_format, mailboxes,
@@ -132,7 +152,7 @@ class OutputMailbox {
return;
}
- gpu_context_support_->SignalSyncToken(
+ raster_context_provider_->ContextSupport()->SignalSyncToken(
sync_token_,
BindToCurrentLoop(base::BindOnce(&OutputMailbox::OnSyncTokenSignaled,
weak_factory_.GetWeakPtr())));
@@ -143,8 +163,7 @@ class OutputMailbox {
std::move(reuse_callback_).Run();
}
- gpu::SharedImageInterface* const shared_image_interface_;
- gpu::ContextSupport* const gpu_context_support_;
+ const scoped_refptr<viz::RasterContextProvider> raster_context_provider_;
gpu::Mailbox mailbox_;
gpu::SyncToken sync_token_;
@@ -169,9 +188,9 @@ struct InputDecoderPacket {
class FuchsiaVideoDecoder : public VideoDecoder,
public FuchsiaSecureStreamDecryptor::Client {
public:
- FuchsiaVideoDecoder(gpu::SharedImageInterface* shared_image_interface,
- gpu::ContextSupport* gpu_context_support,
- bool enable_sw_decoding);
+ FuchsiaVideoDecoder(
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider,
+ bool enable_sw_decoding);
~FuchsiaVideoDecoder() override;
// Decoder implementation.
@@ -196,6 +215,7 @@ class FuchsiaVideoDecoder : public VideoDecoder,
bool InitializeDecryptor(CdmContext* cdm_context);
// FuchsiaSecureStreamDecryptor::Client implementation.
+ size_t GetInputBufferSize() override;
void OnDecryptorOutputPacket(StreamProcessorHelper::IoPacket packet) override;
void OnDecryptorEndOfStreamPacket() override;
void OnDecryptorError() override;
@@ -248,8 +268,7 @@ class FuchsiaVideoDecoder : public VideoDecoder,
void ReleaseInputBuffers();
void ReleaseOutputBuffers();
- gpu::SharedImageInterface* const shared_image_interface_;
- gpu::ContextSupport* const gpu_context_support_;
+ const scoped_refptr<viz::RasterContextProvider> raster_context_provider_;
const bool enable_sw_decoding_;
const bool use_overlays_for_video_;
@@ -291,7 +310,6 @@ class FuchsiaVideoDecoder : public VideoDecoder,
uint64_t input_buffer_lifetime_ordinal_ = 1;
std::unique_ptr<SysmemBufferPool::Creator> input_buffer_collection_creator_;
std::unique_ptr<SysmemBufferPool> input_buffer_collection_;
- size_t num_input_buffers_ = 0;
base::flat_map<size_t, InputDecoderPacket> in_flight_input_packets_;
// Output buffers for |decoder_|.
@@ -301,8 +319,7 @@ class FuchsiaVideoDecoder : public VideoDecoder,
gfx::SysmemBufferCollectionId output_buffer_collection_id_;
std::vector<OutputMailbox*> output_mailboxes_;
- int num_used_output_buffers_ = 0;
- int max_used_output_buffers_ = 0;
+ size_t num_used_output_buffers_ = 0;
base::WeakPtr<FuchsiaVideoDecoder> weak_this_;
base::WeakPtrFactory<FuchsiaVideoDecoder> weak_factory_;
@@ -311,17 +328,15 @@ class FuchsiaVideoDecoder : public VideoDecoder,
};
FuchsiaVideoDecoder::FuchsiaVideoDecoder(
- gpu::SharedImageInterface* shared_image_interface,
- gpu::ContextSupport* gpu_context_support,
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider,
bool enable_sw_decoding)
- : shared_image_interface_(shared_image_interface),
- gpu_context_support_(gpu_context_support),
+ : raster_context_provider_(raster_context_provider),
enable_sw_decoding_(enable_sw_decoding),
use_overlays_for_video_(base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseOverlaysForVideo)),
client_native_pixmap_factory_(ui::CreateClientNativePixmapFactoryOzone()),
weak_factory_(this) {
- DCHECK(shared_image_interface_);
+ DCHECK(raster_context_provider_);
weak_this_ = weak_factory_.GetWeakPtr();
}
@@ -495,13 +510,19 @@ bool FuchsiaVideoDecoder::NeedsBitstreamConversion() const {
}
bool FuchsiaVideoDecoder::CanReadWithoutStalling() const {
- return num_used_output_buffers_ < max_used_output_buffers_;
+ return num_used_output_buffers_ < kMaxUsedOutputBuffers;
}
int FuchsiaVideoDecoder::GetMaxDecodeRequests() const {
- // Add one extra request to be able to send new InputBuffer immediately after
- // OnFreeInputPacket().
- return num_input_buffers_ + 1;
+ if (!decryptor_) {
+ // Add one extra request to be able to send a new InputBuffer immediately
+ // after OnFreeInputPacket().
+ return input_writer_queue_.num_buffers() + 1;
+ }
+
+ // For encrypted streams we need enough decode requests to fill the
+ // decryptor's queue and all decoder buffers. Add one extra same as above.
+ return decryptor_->GetMaxDecryptRequests() + kNumInputBuffers + 1;
}
bool FuchsiaVideoDecoder::InitializeDecryptor(CdmContext* cdm_context) {
@@ -527,6 +548,10 @@ bool FuchsiaVideoDecoder::InitializeDecryptor(CdmContext* cdm_context) {
return true;
}
+size_t FuchsiaVideoDecoder::GetInputBufferSize() {
+ return kInputBufferSize;
+}
+
void FuchsiaVideoDecoder::OnDecryptorOutputPacket(
StreamProcessorHelper::IoPacket packet) {
SendInputPacket(nullptr, std::move(packet));
@@ -571,10 +596,18 @@ void FuchsiaVideoDecoder::OnInputConstraints(
// output and decoder input. It is not used directly.
num_tokens = 2;
buffer_constraints.usage.none = fuchsia::sysmem::noneUsage;
+ buffer_constraints.min_buffer_count = kNumInputBuffers;
+ buffer_constraints.has_buffer_memory_constraints = true;
+ buffer_constraints.buffer_memory_constraints.min_size_bytes =
+ kInputBufferSize;
+ buffer_constraints.buffer_memory_constraints.ram_domain_supported = true;
+ buffer_constraints.buffer_memory_constraints.cpu_domain_supported = true;
+ buffer_constraints.buffer_memory_constraints.inaccessible_domain_supported =
+ true;
} else {
num_tokens = 1;
auto writer_constraints = SysmemBufferWriter::GetRecommendedConstraints(
- decoder_input_constraints_.value());
+ kNumInputBuffers, kInputBufferSize);
if (!writer_constraints.has_value()) {
OnError();
return;
@@ -599,29 +632,18 @@ void FuchsiaVideoDecoder::OnInputBufferPoolCreated(
}
input_buffer_collection_ = std::move(pool);
- num_input_buffers_ =
- decoder_input_constraints_->default_settings().packet_count_for_server() +
- decoder_input_constraints_->default_settings().packet_count_for_client();
fuchsia::media::StreamBufferPartialSettings settings;
settings.set_buffer_lifetime_ordinal(input_buffer_lifetime_ordinal_);
settings.set_buffer_constraints_version_ordinal(
decoder_input_constraints_->buffer_constraints_version_ordinal());
settings.set_single_buffer_mode(false);
- settings.set_packet_count_for_server(
- decoder_input_constraints_->default_settings().packet_count_for_server());
- settings.set_packet_count_for_client(
- decoder_input_constraints_->default_settings().packet_count_for_client());
settings.set_sysmem_token(input_buffer_collection_->TakeToken());
decoder_->SetInputBufferPartialSettings(std::move(settings));
if (decryptor_) {
decryptor_->SetOutputBufferCollectionToken(
- input_buffer_collection_->TakeToken(),
- decoder_input_constraints_->default_settings()
- .packet_count_for_client(),
- decoder_input_constraints_->default_settings()
- .packet_count_for_server());
+ input_buffer_collection_->TakeToken());
} else {
input_buffer_collection_->CreateWriter(base::BindOnce(
&FuchsiaVideoDecoder::OnWriterCreated, base::Unretained(this)));
@@ -733,35 +755,12 @@ void FuchsiaVideoDecoder::OnOutputConstraints(
return;
}
- if (!output_constraints.has_buffer_constraints()) {
- DLOG(ERROR) << "Received OnOutputConstraints() which requires buffer "
- "constraints action, but without buffer constraints.";
- OnError();
- return;
- }
-
- const fuchsia::media::StreamBufferConstraints& buffer_constraints =
- output_constraints.buffer_constraints();
-
- if (!buffer_constraints.has_default_settings() ||
- !buffer_constraints.has_packet_count_for_client_max() ||
- !buffer_constraints.default_settings().has_packet_count_for_server() ||
- !buffer_constraints.default_settings().has_packet_count_for_client()) {
- DLOG(ERROR)
- << "Received OnOutputConstraints() with missing required fields.";
- OnError();
- return;
- }
-
ReleaseOutputBuffers();
// mediacodec API expects odd buffer lifetime ordinal, which is incremented by
// 2 for each buffer generation.
output_buffer_lifetime_ordinal_ += 2;
- max_used_output_buffers_ = std::min(
- kMaxUsedOutputFrames, buffer_constraints.packet_count_for_client_max());
-
// Create a new sysmem buffer collection token for the output buffers.
fuchsia::sysmem::BufferCollectionTokenPtr collection_token;
sysmem_allocator_.raw()->AllocateSharedCollection(
@@ -889,10 +888,10 @@ void FuchsiaVideoDecoder::OnOutputPacket(fuchsia::media::Packet output_packet,
buffer_format, gfx::BufferUsage::GPU_READ,
gpu::GpuMemoryBufferImpl::DestructionCallback());
- output_mailboxes_[buffer_index] = new OutputMailbox(
- shared_image_interface_, gpu_context_support_, std::move(gmb));
+ output_mailboxes_[buffer_index] =
+ new OutputMailbox(raster_context_provider_, std::move(gmb));
} else {
- shared_image_interface_->UpdateSharedImage(
+ raster_context_provider_->SharedImageInterface()->UpdateSharedImage(
gpu::SyncToken(), output_mailboxes_[buffer_index]->mailbox());
}
@@ -1021,27 +1020,27 @@ void FuchsiaVideoDecoder::InitializeOutputBufferCollection(
fuchsia::sysmem::BufferCollectionTokenPtr collection_token_for_gpu) {
fuchsia::sysmem::BufferCollectionConstraints buffer_constraints;
buffer_constraints.usage.none = fuchsia::sysmem::noneUsage;
- buffer_constraints.min_buffer_count_for_camping = max_used_output_buffers_;
+ buffer_constraints.min_buffer_count_for_camping = kOutputBuffersForCamping;
+ buffer_constraints.min_buffer_count_for_shared_slack =
+ kMaxUsedOutputBuffers - kOutputBuffersForCamping;
output_buffer_collection_->SetConstraints(
/*has_constraints=*/true, std::move(buffer_constraints));
// Register the new collection with the GPU process.
DCHECK(!output_buffer_collection_id_);
output_buffer_collection_id_ = gfx::SysmemBufferCollectionId::Create();
- shared_image_interface_->RegisterSysmemBufferCollection(
- output_buffer_collection_id_,
- collection_token_for_gpu.Unbind().TakeChannel(),
- gfx::BufferFormat::YUV_420_BIPLANAR, gfx::BufferUsage::GPU_READ,
- true /*register_with_image_pipe*/);
+ raster_context_provider_->SharedImageInterface()
+ ->RegisterSysmemBufferCollection(
+ output_buffer_collection_id_,
+ collection_token_for_gpu.Unbind().TakeChannel(),
+ gfx::BufferFormat::YUV_420_BIPLANAR, gfx::BufferUsage::GPU_READ,
+ use_overlays_for_video_ /*register_with_image_pipe*/);
// Pass new output buffer settings to the codec.
fuchsia::media::StreamBufferPartialSettings settings;
settings.set_buffer_lifetime_ordinal(output_buffer_lifetime_ordinal_);
settings.set_buffer_constraints_version_ordinal(
constraints.buffer_constraints_version_ordinal());
- settings.set_packet_count_for_client(max_used_output_buffers_);
- settings.set_packet_count_for_server(
- constraints.packet_count_for_server_recommended());
settings.set_sysmem_token(std::move(collection_token_for_codec));
decoder_->SetOutputBufferPartialSettings(std::move(settings));
decoder_->CompleteOutputBufferPartialSettings(
@@ -1056,7 +1055,6 @@ void FuchsiaVideoDecoder::ReleaseInputBuffers() {
input_writer_queue_.ResetBuffers();
input_buffer_collection_creator_.reset();
input_buffer_collection_.reset();
- num_input_buffers_ = 0;
// |in_flight_input_packets_| must be destroyed after
// |input_writer_queue_.ResetBuffers()|. Otherwise |input_writer_queue_| may
@@ -1081,8 +1079,8 @@ void FuchsiaVideoDecoder::ReleaseOutputBuffers() {
// Tell the GPU process to drop the buffer collection.
if (output_buffer_collection_id_) {
- shared_image_interface_->ReleaseSysmemBufferCollection(
- output_buffer_collection_id_);
+ raster_context_provider_->SharedImageInterface()
+ ->ReleaseSysmemBufferCollection(output_buffer_collection_id_);
output_buffer_collection_id_ = {};
}
}
@@ -1091,7 +1089,7 @@ void FuchsiaVideoDecoder::OnReuseMailbox(uint32_t buffer_index,
uint32_t packet_index) {
DCHECK(decoder_);
- DCHECK_GT(num_used_output_buffers_, 0);
+ DCHECK_GT(num_used_output_buffers_, 0U);
num_used_output_buffers_--;
fuchsia::media::PacketHeader header;
@@ -1101,19 +1099,17 @@ void FuchsiaVideoDecoder::OnReuseMailbox(uint32_t buffer_index,
}
std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoder(
- gpu::SharedImageInterface* shared_image_interface,
- gpu::ContextSupport* gpu_context_support) {
- return std::make_unique<FuchsiaVideoDecoder>(shared_image_interface,
- gpu_context_support,
- /*enable_sw_decoding=*/false);
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider) {
+ return std::make_unique<FuchsiaVideoDecoder>(
+ std::move(raster_context_provider),
+ /*enable_sw_decoding=*/false);
}
std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoderForTests(
- gpu::SharedImageInterface* shared_image_interface,
- gpu::ContextSupport* gpu_context_support,
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider,
bool enable_sw_decoding) {
return std::make_unique<FuchsiaVideoDecoder>(
- shared_image_interface, gpu_context_support, enable_sw_decoding);
+ std::move(raster_context_provider), enable_sw_decoding);
}
} // namespace media
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder.h b/chromium/media/filters/fuchsia/fuchsia_video_decoder.h
index d5a59637580..483458179d2 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder.h
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder.h
@@ -7,12 +7,12 @@
#include <memory>
+#include "base/memory/scoped_refptr.h"
#include "media/base/media_export.h"
-namespace gpu {
-class ContextSupport;
-class SharedImageInterface;
-} // namespace gpu
+namespace viz {
+class RasterContextProvider;
+} // namespace viz
namespace media {
@@ -20,17 +20,14 @@ class VideoDecoder;
// Creates VideoDecoder that uses fuchsia.mediacodec API. The returned
// VideoDecoder instance will only try to use hardware video codecs.
-// |shared_image_interface| and |gpu_context_support| must outlive the decoder.
MEDIA_EXPORT std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoder(
- gpu::SharedImageInterface* shared_image_interface,
- gpu::ContextSupport* gpu_context_support);
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider);
// Same as above, but also allows to enable software codecs. This is useful for
// FuchsiaVideoDecoder tests that run on systems that don't have hardware
// decoder support.
MEDIA_EXPORT std::unique_ptr<VideoDecoder> CreateFuchsiaVideoDecoderForTests(
- gpu::SharedImageInterface* shared_image_interface,
- gpu::ContextSupport* gpu_context_support,
+ scoped_refptr<viz::RasterContextProvider> raster_context_provider,
bool enable_sw_decoding);
} // namespace media
diff --git a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
index cf6ca25801e..5e4c36998e3 100644
--- a/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
+++ b/chromium/media/filters/fuchsia/fuchsia_video_decoder_unittest.cc
@@ -8,14 +8,17 @@
#include <lib/sys/cpp/component_context.h>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/containers/flat_map.h"
#include "base/containers/flat_set.h"
#include "base/fuchsia/fuchsia_logging.h"
#include "base/fuchsia/process_context.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
+#include "components/viz/common/gpu/raster_context_provider.h"
#include "components/viz/test/test_context_support.h"
#include "gpu/command_buffer/client/shared_image_interface.h"
+#include "gpu/config/gpu_feature_info.h"
#include "media/base/test_data_util.h"
#include "media/base/test_helpers.h"
#include "media/base/video_decoder.h"
@@ -89,7 +92,7 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
SkAlphaType alpha_type,
uint32_t usage,
gpu::SurfaceHandle surface_handle) override {
- NOTREACHED();
+ ADD_FAILURE();
return gpu::Mailbox();
}
@@ -101,7 +104,7 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
SkAlphaType alpha_type,
uint32_t usage,
base::span<const uint8_t> pixel_data) override {
- NOTREACHED();
+ ADD_FAILURE();
return gpu::Mailbox();
}
@@ -128,12 +131,12 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
void UpdateSharedImage(const gpu::SyncToken& sync_token,
const gpu::Mailbox& mailbox) override {
- NOTREACHED();
+ ADD_FAILURE();
}
void UpdateSharedImage(const gpu::SyncToken& sync_token,
std::unique_ptr<gfx::GpuFence> acquire_fence,
const gpu::Mailbox& mailbox) override {
- NOTREACHED();
+ ADD_FAILURE();
}
void DestroySharedImage(const gpu::SyncToken& sync_token,
@@ -147,12 +150,12 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
GrSurfaceOrigin surface_origin,
SkAlphaType alpha_type,
uint32_t usage) override {
- NOTREACHED();
+ ADD_FAILURE();
return SwapChainMailboxes();
}
void PresentSwapChain(const gpu::SyncToken& sync_token,
const gpu::Mailbox& mailbox) override {
- NOTREACHED();
+ ADD_FAILURE();
}
void RegisterSysmemBufferCollection(gfx::SysmemBufferCollectionId id,
@@ -173,7 +176,7 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
}
gpu::SyncToken GenVerifiedSyncToken() override {
- NOTREACHED();
+ ADD_FAILURE();
return gpu::SyncToken();
}
gpu::SyncToken GenUnverifiedSyncToken() override {
@@ -182,10 +185,10 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
}
void WaitSyncToken(const gpu::SyncToken& sync_token) override {
- NOTREACHED();
+ ADD_FAILURE();
}
- void Flush() override { NOTREACHED(); }
+ void Flush() override { ADD_FAILURE(); }
scoped_refptr<gfx::NativePixmap> GetNativePixmap(
const gpu::Mailbox& mailbox) override {
@@ -200,16 +203,93 @@ class TestSharedImageInterface : public gpu::SharedImageInterface {
base::flat_set<gpu::Mailbox> mailboxes_;
};
+class TestRasterContextProvider
+ : public base::RefCountedThreadSafe<TestRasterContextProvider>,
+ public viz::RasterContextProvider {
+ public:
+ TestRasterContextProvider() {}
+
+ TestRasterContextProvider(TestRasterContextProvider&) = delete;
+ TestRasterContextProvider& operator=(TestRasterContextProvider&) = delete;
+
+ void SetOnDestroyedClosure(base::Closure on_destroyed) {
+ on_destroyed_ = on_destroyed;
+ }
+
+ // viz::RasterContextProvider implementation;
+ void AddRef() const override {
+ base::RefCountedThreadSafe<TestRasterContextProvider>::AddRef();
+ }
+ void Release() const override {
+ base::RefCountedThreadSafe<TestRasterContextProvider>::Release();
+ }
+ gpu::ContextResult BindToCurrentThread() override {
+ ADD_FAILURE();
+ return gpu::ContextResult::kFatalFailure;
+ }
+ void AddObserver(viz::ContextLostObserver* obs) override { ADD_FAILURE(); }
+ void RemoveObserver(viz::ContextLostObserver* obs) override { ADD_FAILURE(); }
+ base::Lock* GetLock() override {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ viz::ContextCacheController* CacheController() override {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ gpu::ContextSupport* ContextSupport() override {
+ return &gpu_context_support_;
+ }
+ class GrDirectContext* GrContext() override {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ gpu::SharedImageInterface* SharedImageInterface() override {
+ return &shared_image_interface_;
+ }
+ const gpu::Capabilities& ContextCapabilities() const override {
+ ADD_FAILURE();
+ static gpu::Capabilities dummy_caps;
+ return dummy_caps;
+ }
+ const gpu::GpuFeatureInfo& GetGpuFeatureInfo() const override {
+ ADD_FAILURE();
+ static gpu::GpuFeatureInfo dummy_feature_info;
+ return dummy_feature_info;
+ }
+ gpu::gles2::GLES2Interface* ContextGL() override {
+ ADD_FAILURE();
+ return nullptr;
+ }
+ gpu::raster::RasterInterface* RasterInterface() override {
+ ADD_FAILURE();
+ return nullptr;
+ }
+
+ private:
+ friend class base::RefCountedThreadSafe<TestRasterContextProvider>;
+
+ ~TestRasterContextProvider() override {
+ if (on_destroyed_)
+ std::move(on_destroyed_).Run();
+ }
+
+ TestSharedImageInterface shared_image_interface_;
+ viz::TestContextSupport gpu_context_support_;
+
+ base::Closure on_destroyed_;
+};
+
} // namespace
class FuchsiaVideoDecoderTest : public testing::Test {
public:
- FuchsiaVideoDecoderTest() {
- decoder_ = CreateFuchsiaVideoDecoderForTests(&shared_image_interface_,
- &gpu_context_support_,
-
- /*enable_sw_decoding=*/true);
- }
+ FuchsiaVideoDecoderTest()
+ : raster_context_provider_(
+ base::MakeRefCounted<TestRasterContextProvider>()),
+ decoder_(
+ CreateFuchsiaVideoDecoderForTests(raster_context_provider_.get(),
+ /*enable_sw_decoding=*/true)) {}
~FuchsiaVideoDecoderTest() override = default;
bool InitializeDecoder(VideoDecoderConfig config) WARN_UNUSED_RESULT {
@@ -285,8 +365,7 @@ class FuchsiaVideoDecoderTest : public testing::Test {
base::test::SingleThreadTaskEnvironment task_environment_{
base::test::SingleThreadTaskEnvironment::MainThreadType::IO};
- TestSharedImageInterface shared_image_interface_;
- viz::TestContextSupport gpu_context_support_;
+ scoped_refptr<TestRasterContextProvider> raster_context_provider_;
std::unique_ptr<VideoDecoder> decoder_;
@@ -396,4 +475,35 @@ TEST_F(FuchsiaVideoDecoderTest, ResetAndReinitializeH264) {
EXPECT_EQ(num_output_frames_, 4U);
}
+// Verifies that the decoder keeps reference to the RasterContextProvider.
+TEST_F(FuchsiaVideoDecoderTest, RasterContextLifetime) {
+ bool context_destroyed = false;
+ raster_context_provider_->SetOnDestroyedClosure(base::BindLambdaForTesting(
+ [&context_destroyed]() { context_destroyed = true; }));
+ ASSERT_TRUE(InitializeDecoder(TestVideoConfig::NormalH264()));
+ ASSERT_FALSE(context_destroyed);
+
+ // Decoder should keep reference to RasterContextProvider.
+ raster_context_provider_.reset();
+ ASSERT_FALSE(context_destroyed);
+
+ // Feed some frames to decoder to get decoded video frames.
+ for (int i = 0; i < 4; ++i) {
+ DecodeBuffer(GetH264Frame(i));
+ }
+ ASSERT_NO_FATAL_FAILURE(WaitDecodeDone());
+
+ // Destroy the decoder. RasterContextProvider will not be destroyed since
+ // it's still referenced by frames in |output_frames_|.
+ decoder_.reset();
+ task_environment_.RunUntilIdle();
+ ASSERT_FALSE(context_destroyed);
+
+ // RasterContextProvider reference should be dropped once all frames are
+ // dropped.
+ output_frames_.clear();
+ task_environment_.RunUntilIdle();
+ ASSERT_TRUE(context_destroyed);
+}
+
} // namespace media
diff --git a/chromium/media/filters/gav1_video_decoder.cc b/chromium/media/filters/gav1_video_decoder.cc
index 806f6fee6b7..8e0e69c3aac 100644
--- a/chromium/media/filters/gav1_video_decoder.cc
+++ b/chromium/media/filters/gav1_video_decoder.cc
@@ -7,8 +7,8 @@
#include <stdint.h>
#include <numeric>
-#include "base/bind_helpers.h"
#include "base/bits.h"
+#include "base/callback_helpers.h"
#include "base/numerics/safe_conversions.h"
#include "base/system/sys_info.h"
#include "base/threading/sequenced_task_runner_handle.h"
diff --git a/chromium/media/filters/gav1_video_decoder_unittest.cc b/chromium/media/filters/gav1_video_decoder_unittest.cc
index c12a6be4590..4169f610d4f 100644
--- a/chromium/media/filters/gav1_video_decoder_unittest.cc
+++ b/chromium/media/filters/gav1_video_decoder_unittest.cc
@@ -8,7 +8,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/hash/md5.h"
#include "base/run_loop.h"
#include "base/strings/string_piece.h"
diff --git a/chromium/media/filters/h264_bitstream_buffer.cc b/chromium/media/filters/h264_bitstream_buffer.cc
index a37def808df..ea18646cb99 100644
--- a/chromium/media/filters/h264_bitstream_buffer.cc
+++ b/chromium/media/filters/h264_bitstream_buffer.cc
@@ -8,18 +8,18 @@
namespace media {
-H264BitstreamBuffer::H264BitstreamBuffer() : data_(NULL) {
+H264BitstreamBuffer::H264BitstreamBuffer() : data_(nullptr) {
Reset();
}
H264BitstreamBuffer::~H264BitstreamBuffer() {
free(data_);
- data_ = NULL;
+ data_ = nullptr;
}
void H264BitstreamBuffer::Reset() {
free(data_);
- data_ = NULL;
+ data_ = nullptr;
capacity_ = 0;
pos_ = 0;
diff --git a/chromium/media/filters/media_file_checker.cc b/chromium/media/filters/media_file_checker.cc
index c65d8081dc0..a0f2848bbc6 100644
--- a/chromium/media/filters/media_file_checker.cc
+++ b/chromium/media/filters/media_file_checker.cc
@@ -47,7 +47,7 @@ bool MediaFileChecker::Start(base::TimeDelta check_time) {
bool read_ok = true;
media::BlockingUrlProtocol protocol(
- &source, base::Bind(&OnMediaFileCheckerError, &read_ok));
+ &source, base::BindRepeating(&OnMediaFileCheckerError, &read_ok));
media::FFmpegGlue glue(&protocol);
AVFormatContext* format_context = glue.format_context();
diff --git a/chromium/media/filters/memory_data_source.h b/chromium/media/filters/memory_data_source.h
index 0f87e39d326..716cb776157 100644
--- a/chromium/media/filters/memory_data_source.h
+++ b/chromium/media/filters/memory_data_source.h
@@ -14,7 +14,7 @@ namespace media {
// Basic data source that treats the URL as a file path, and uses the file
// system to read data for a media pipeline.
-class MEDIA_EXPORT MemoryDataSource : public DataSource {
+class MEDIA_EXPORT MemoryDataSource final : public DataSource {
public:
// Construct MemoryDataSource with |data| and |size|. The data is guaranteed
// to be valid during the lifetime of MemoryDataSource.
diff --git a/chromium/media/filters/offloading_video_decoder.cc b/chromium/media/filters/offloading_video_decoder.cc
index 2758ebfd72f..242190f8009 100644
--- a/chromium/media/filters/offloading_video_decoder.cc
+++ b/chromium/media/filters/offloading_video_decoder.cc
@@ -5,7 +5,7 @@
#include "media/filters/offloading_video_decoder.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/sequenced_task_runner.h"
#include "base/synchronization/atomic_flag.h"
#include "base/task/post_task.h"
@@ -63,11 +63,11 @@ OffloadingVideoDecoder::OffloadingVideoDecoder(
: min_offloading_width_(min_offloading_width),
supported_codecs_(std::move(supported_codecs)),
helper_(std::make_unique<CancellationHelper>(std::move(decoder))) {
- DETACH_FROM_THREAD(thread_checker_);
+ DETACH_FROM_SEQUENCE(sequence_checker_);
}
OffloadingVideoDecoder::~OffloadingVideoDecoder() {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// The |helper_| must always be destroyed on the |offload_task_runner_| since
// we may still have tasks posted to it.
@@ -86,7 +86,7 @@ void OffloadingVideoDecoder::Initialize(const VideoDecoderConfig& config,
InitCB init_cb,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(config.IsValidConfig());
const bool disable_offloading =
@@ -152,7 +152,7 @@ void OffloadingVideoDecoder::Initialize(const VideoDecoderConfig& config,
void OffloadingVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buffer);
DCHECK(decode_cb);
@@ -169,7 +169,7 @@ void OffloadingVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
}
void OffloadingVideoDecoder::Reset(base::OnceClosure reset_cb) {
- DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
base::OnceClosure bound_reset_cb = BindToCurrentLoop(std::move(reset_cb));
if (!offload_task_runner_) {
diff --git a/chromium/media/filters/offloading_video_decoder.h b/chromium/media/filters/offloading_video_decoder.h
index 3b85489fb07..0ff14bb3d45 100644
--- a/chromium/media/filters/offloading_video_decoder.h
+++ b/chromium/media/filters/offloading_video_decoder.h
@@ -8,7 +8,7 @@
#include "base/callback_forward.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
-#include "base/threading/thread_checker.h"
+#include "base/sequence_checker.h"
#include "media/base/video_codecs.h"
#include "media/base/video_decoder.h"
#include "media/base/video_decoder_config.h"
@@ -110,7 +110,7 @@ class MEDIA_EXPORT OffloadingVideoDecoder : public VideoDecoder {
// Indicates if Initialize() has been called.
bool initialized_ = false;
- THREAD_CHECKER(thread_checker_);
+ SEQUENCE_CHECKER(sequence_checker_);
// A helper class for managing Decode() and Reset() calls to the offloaded
// decoder; it owns the given OffloadableVideoDecoder and is always destructed
diff --git a/chromium/media/filters/offloading_video_decoder_unittest.cc b/chromium/media/filters/offloading_video_decoder_unittest.cc
index 53b435438bf..29813ad3120 100644
--- a/chromium/media/filters/offloading_video_decoder_unittest.cc
+++ b/chromium/media/filters/offloading_video_decoder_unittest.cc
@@ -5,7 +5,7 @@
#include "media/filters/offloading_video_decoder.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
@@ -91,8 +91,8 @@ class OffloadingVideoDecoderTest : public testing::Test {
VideoDecoder::OutputCB ExpectOutputCB() {
EXPECT_CALL(*this, OutputDone(_))
.WillOnce(VerifyOn(task_env_.GetMainThreadTaskRunner()));
- return base::Bind(&OffloadingVideoDecoderTest::OutputDone,
- base::Unretained(this));
+ return base::BindRepeating(&OffloadingVideoDecoderTest::OutputDone,
+ base::Unretained(this));
}
VideoDecoder::DecodeCB ExpectDecodeCB(StatusCode status) {
@@ -131,7 +131,7 @@ class OffloadingVideoDecoderTest : public testing::Test {
// Verify decode works and is called on the right thread.
EXPECT_CALL(*decoder_, Decode_(_, _))
.WillOnce(DoAll(VerifyOn(task_env_.GetMainThreadTaskRunner()),
- RunClosure(base::Bind(output_cb, nullptr)),
+ RunOnceClosure(base::BindOnce(output_cb, nullptr)),
RunOnceCallback<1>(DecodeStatus::OK)));
offloading_decoder_->Decode(DecoderBuffer::CreateEOSBuffer(),
ExpectDecodeCB(DecodeStatus::OK));
@@ -176,7 +176,7 @@ class OffloadingVideoDecoderTest : public testing::Test {
ExpectDecodeCB(DecodeStatus::OK));
EXPECT_CALL(*decoder_, Decode_(_, _))
.WillOnce(DoAll(VerifyNotOn(task_env_.GetMainThreadTaskRunner()),
- RunClosure(base::Bind(output_cb, nullptr)),
+ RunOnceClosure(base::BindOnce(output_cb, nullptr)),
RunOnceCallback<1>(DecodeStatus::OK)));
task_env_.RunUntilIdle();
@@ -242,8 +242,8 @@ TEST_F(OffloadingVideoDecoderTest, OffloadingAfterNoOffloading) {
VideoDecoder::OutputCB output_cb;
offloading_decoder_->Initialize(
TestVideoConfig::Normal(kCodecVP9), false, nullptr, ExpectInitCB(true),
- base::Bind(&OffloadingVideoDecoderTest::OutputDone,
- base::Unretained(this)),
+ base::BindRepeating(&OffloadingVideoDecoderTest::OutputDone,
+ base::Unretained(this)),
base::NullCallback());
EXPECT_CALL(*decoder_, Detach())
.WillOnce(VerifyNotOn(task_env_.GetMainThreadTaskRunner()));
diff --git a/chromium/media/filters/pipeline_controller_unittest.cc b/chromium/media/filters/pipeline_controller_unittest.cc
index ffc509be02d..0bced42367d 100644
--- a/chromium/media/filters/pipeline_controller_unittest.cc
+++ b/chromium/media/filters/pipeline_controller_unittest.cc
@@ -7,7 +7,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/notreached.h"
diff --git a/chromium/media/filters/source_buffer_state_unittest.cc b/chromium/media/filters/source_buffer_state_unittest.cc
index d5eb3811d37..f0fd900f1fe 100644
--- a/chromium/media/filters/source_buffer_state_unittest.cc
+++ b/chromium/media/filters/source_buffer_state_unittest.cc
@@ -84,16 +84,17 @@ class SourceBufferStateTest : public ::testing::Test {
auto ignore_text_track, auto encrypted_media_init_data_cb,
auto new_segment_cb, auto end_of_segment_cb,
auto media_log) { new_config_cb_ = config_cb; });
- sbs->Init(base::BindOnce(&SourceBufferStateTest::SourceInitDone,
- base::Unretained(this)),
- expected_codecs,
- base::BindRepeating(
- &SourceBufferStateTest::StreamParserEncryptedInitData,
- base::Unretained(this)),
- base::Bind(&SourceBufferStateTest::StreamParserNewTextTrack,
- base::Unretained(this)));
-
- sbs->SetTracksWatcher(base::Bind(
+ sbs->Init(
+ base::BindOnce(&SourceBufferStateTest::SourceInitDone,
+ base::Unretained(this)),
+ expected_codecs,
+ base::BindRepeating(
+ &SourceBufferStateTest::StreamParserEncryptedInitData,
+ base::Unretained(this)),
+ base::BindRepeating(&SourceBufferStateTest::StreamParserNewTextTrack,
+ base::Unretained(this)));
+
+ sbs->SetTracksWatcher(base::BindRepeating(
&SourceBufferStateTest::OnMediaTracksUpdated, base::Unretained(this)));
// These tests are not expected to issue any parse warnings.
diff --git a/chromium/media/filters/source_buffer_stream.cc b/chromium/media/filters/source_buffer_stream.cc
index 53a9012e7ff..488f9e2a62b 100644
--- a/chromium/media/filters/source_buffer_stream.cc
+++ b/chromium/media/filters/source_buffer_stream.cc
@@ -165,9 +165,10 @@ SourceBufferStream::SourceBufferStream(const AudioDecoderConfig& audio_config,
highest_output_buffer_timestamp_(kNoTimestamp),
max_interbuffer_distance_(
base::TimeDelta::FromMilliseconds(kMinimumInterbufferDistanceInMs)),
- memory_limit_(GetDemuxerStreamAudioMemoryLimit()) {
+ memory_limit_(GetDemuxerStreamAudioMemoryLimit(&audio_config)) {
DCHECK(audio_config.IsValidConfig());
audio_configs_.push_back(audio_config);
+ DVLOG(2) << __func__ << ": audio_buffer_size= " << memory_limit_;
}
SourceBufferStream::SourceBufferStream(const VideoDecoderConfig& video_config,
@@ -179,9 +180,12 @@ SourceBufferStream::SourceBufferStream(const VideoDecoderConfig& video_config,
highest_output_buffer_timestamp_(kNoTimestamp),
max_interbuffer_distance_(
base::TimeDelta::FromMilliseconds(kMinimumInterbufferDistanceInMs)),
- memory_limit_(GetDemuxerStreamVideoMemoryLimit()) {
+ memory_limit_(
+ GetDemuxerStreamVideoMemoryLimit(Demuxer::DemuxerTypes::kChunkDemuxer,
+ &video_config)) {
DCHECK(video_config.IsValidConfig());
video_configs_.push_back(video_config);
+ DVLOG(2) << __func__ << ": video_buffer_size= " << memory_limit_;
}
SourceBufferStream::SourceBufferStream(const TextTrackConfig& text_config,
@@ -194,7 +198,8 @@ SourceBufferStream::SourceBufferStream(const TextTrackConfig& text_config,
highest_output_buffer_timestamp_(kNoTimestamp),
max_interbuffer_distance_(
base::TimeDelta::FromMilliseconds(kMinimumInterbufferDistanceInMs)),
- memory_limit_(GetDemuxerStreamAudioMemoryLimit()) {}
+ memory_limit_(
+ GetDemuxerStreamAudioMemoryLimit(nullptr /*audio_config*/)) {}
SourceBufferStream::~SourceBufferStream() = default;
diff --git a/chromium/media/filters/source_buffer_stream.h b/chromium/media/filters/source_buffer_stream.h
index fa70e4882d9..718ae0cb84c 100644
--- a/chromium/media/filters/source_buffer_stream.h
+++ b/chromium/media/filters/source_buffer_stream.h
@@ -484,6 +484,11 @@ class MEDIA_EXPORT SourceBufferStream {
base::MemoryPressureListener::MEMORY_PRESSURE_LEVEL_NONE;
// The maximum amount of data in bytes the stream will keep in memory.
+ // |memory_limit_| is initialized based on the audio/video configuration in
+ // the constructor, but either user-setting of |memory_limit_| or
+ // memory-pressure-based adjustment to determine effective limit in the
+ // eviction heuristic can cause the result to vary from the value set in
+ // constructor.
size_t memory_limit_;
// Indicates that a kConfigChanged status has been reported by GetNextBuffer()
diff --git a/chromium/media/filters/source_buffer_stream_unittest.cc b/chromium/media/filters/source_buffer_stream_unittest.cc
index a7b81d902de..83effce41a9 100644
--- a/chromium/media/filters/source_buffer_stream_unittest.cc
+++ b/chromium/media/filters/source_buffer_stream_unittest.cc
@@ -10,7 +10,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/numerics/safe_conversions.h"
diff --git a/chromium/media/filters/stream_parser_factory.cc b/chromium/media/filters/stream_parser_factory.cc
index 4598b8ede90..126c1814a16 100644
--- a/chromium/media/filters/stream_parser_factory.cc
+++ b/chromium/media/filters/stream_parser_factory.cc
@@ -373,7 +373,7 @@ static StreamParser* BuildMP2TParser(const std::vector<std::string>& codecs,
}
}
- return new media::mp2t::Mp2tStreamParser(has_sbr);
+ return new media::mp2t::Mp2tStreamParser(codecs, has_sbr);
}
#endif // ENABLE_MSE_MPEG2TS_STREAM_PARSER
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
diff --git a/chromium/media/filters/video_renderer_algorithm.cc b/chromium/media/filters/video_renderer_algorithm.cc
index 08a95cbb2bf..0cef397855b 100644
--- a/chromium/media/filters/video_renderer_algorithm.cc
+++ b/chromium/media/filters/video_renderer_algorithm.cc
@@ -243,42 +243,37 @@ size_t VideoRendererAlgorithm::RemoveExpiredFrames(base::TimeTicks deadline) {
// Even though we may not be able to remove anything due to having only one
// frame, correct any estimates which may have been set during EnqueueFrame().
UpdateFrameStatistics();
+ UpdateEffectiveFramesQueued();
// We always leave at least one frame in the queue, so if there's only one
// frame there's nothing we can expire.
- if (frame_queue_.size() == 1) {
- UpdateEffectiveFramesQueued();
+ if (frame_queue_.size() == 1)
return 0;
- }
DCHECK_GT(average_frame_duration_, base::TimeDelta());
- // Finds and removes all frames which are too old to be used; I.e., the end of
- // their render interval is further than |max_acceptable_drift_| from the
- // given |deadline|. We also always expire anything inserted before the last
- // rendered frame.
+ // Expire everything before the first good frame or everything but the last
+ // frame if there is no good frame.
+ const int first_good_frame = FindFirstGoodFrame();
+ const size_t frames_to_expire =
+ first_good_frame < 0 ? frame_queue_.size() - 1 : first_good_frame;
+ if (!frames_to_expire)
+ return 0;
+
size_t frames_dropped_without_rendering = 0;
- size_t frames_to_expire = 0;
- const base::TimeTicks minimum_start_time =
- deadline - max_acceptable_drift_ - average_frame_duration_;
- for (; frames_to_expire < frame_queue_.size() - 1; ++frames_to_expire) {
- const ReadyFrame& frame = frame_queue_[frames_to_expire];
- if (frame.start_time >= minimum_start_time)
- break;
- if (frame.render_count == frame.drop_count)
- ++frames_dropped_without_rendering;
- }
+ for (size_t i = 0; i < frames_to_expire; ++i) {
+ const ReadyFrame& frame = frame_queue_[i];
- if (!frames_to_expire) {
- UpdateEffectiveFramesQueued();
- return 0;
+ // Don't count frames that are intentionally dropped by cadence as dropped.
+ if (frame.render_count == frame.drop_count &&
+ (!cadence_estimator_.has_cadence() || frame.ideal_render_count)) {
+ ++frames_dropped_without_rendering;
+ }
}
cadence_frame_counter_ += frames_to_expire;
frame_queue_.erase(frame_queue_.begin(),
frame_queue_.begin() + frames_to_expire);
-
- UpdateEffectiveFramesQueued();
return frames_dropped_without_rendering;
}
@@ -759,33 +754,38 @@ void VideoRendererAlgorithm::UpdateEffectiveFramesQueued() {
std::max(min_frames_queued, CountEffectiveFramesQueued());
}
-size_t VideoRendererAlgorithm::CountEffectiveFramesQueued() const {
- // If we don't have cadence, subtract off any frames which are before
- // the last rendered frame or are past their expected rendering time.
- if (!cadence_estimator_.has_cadence()) {
- size_t expired_frames = 0;
- for (; expired_frames < frame_queue_.size(); ++expired_frames) {
- const ReadyFrame& frame = frame_queue_[expired_frames];
- if (frame.end_time.is_null() || frame.end_time > last_deadline_max_)
- break;
+int VideoRendererAlgorithm::FindFirstGoodFrame() const {
+ const auto minimum_start_time =
+ cadence_estimator_.has_cadence()
+ ? last_deadline_max_ - max_acceptable_drift_
+ : last_deadline_max_;
+
+ size_t start_index = 0;
+ for (; start_index < frame_queue_.size(); ++start_index) {
+ const ReadyFrame& frame = frame_queue_[start_index];
+ if ((!cadence_estimator_.has_cadence() ||
+ frame.render_count < frame.ideal_render_count) &&
+ (frame.end_time.is_null() || frame.end_time > minimum_start_time)) {
+ break;
}
- return frame_queue_.size() - expired_frames;
}
- // Find the first usable frame to start counting from.
- const int start_index = FindBestFrameByCadence();
+ return start_index == frame_queue_.size() ? -1 : start_index;
+}
+
+size_t VideoRendererAlgorithm::CountEffectiveFramesQueued() const {
+ const int start_index = FindFirstGoodFrame();
if (start_index < 0)
return 0;
- const base::TimeTicks minimum_start_time =
- last_deadline_max_ - max_acceptable_drift_;
+ if (!cadence_estimator_.has_cadence())
+ return frame_queue_.size() - start_index;
+
+ // We should ignore zero cadence frames in our effective frame count.
size_t renderable_frame_count = 0;
for (size_t i = start_index; i < frame_queue_.size(); ++i) {
- const ReadyFrame& frame = frame_queue_[i];
- if (frame.render_count < frame.ideal_render_count &&
- (frame.end_time.is_null() || frame.end_time > minimum_start_time)) {
+ if (frame_queue_[i].ideal_render_count)
++renderable_frame_count;
- }
}
return renderable_frame_count;
}
diff --git a/chromium/media/filters/video_renderer_algorithm.h b/chromium/media/filters/video_renderer_algorithm.h
index a5e74f447e7..573c397fc4f 100644
--- a/chromium/media/filters/video_renderer_algorithm.h
+++ b/chromium/media/filters/video_renderer_algorithm.h
@@ -267,6 +267,9 @@ class MEDIA_EXPORT VideoRendererAlgorithm {
base::TimeDelta CalculateAbsoluteDriftForFrame(base::TimeTicks deadline_min,
int frame_index) const;
+ // Returns the index of the first usable frame or -1 if no usable frames.
+ int FindFirstGoodFrame() const;
+
// Updates |effective_frames_queued_| which is typically called far more
// frequently (~4x) than the value changes. This must be called whenever
// frames are added or removed from the queue or when any property of a
diff --git a/chromium/media/filters/video_renderer_algorithm_unittest.cc b/chromium/media/filters/video_renderer_algorithm_unittest.cc
index 6707f4eb751..b3e7b5b9d99 100644
--- a/chromium/media/filters/video_renderer_algorithm_unittest.cc
+++ b/chromium/media/filters/video_renderer_algorithm_unittest.cc
@@ -9,7 +9,7 @@
#include <tuple>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/memory/ref_counted.h"
#include "base/stl_util.h"
#include "base/strings/stringprintf.h"
@@ -73,8 +73,8 @@ class VideoRendererAlgorithmTest : public testing::Test {
public:
VideoRendererAlgorithmTest()
: tick_clock_(new base::SimpleTestTickClock()),
- algorithm_(base::Bind(&WallClockTimeSource::GetWallClockTimes,
- base::Unretained(&time_source_)),
+ algorithm_(base::BindRepeating(&WallClockTimeSource::GetWallClockTimes,
+ base::Unretained(&time_source_)),
&media_log_) {
// Always start the TickClock at a non-zero value since null values have
// special connotations.
@@ -1245,12 +1245,12 @@ TEST_F(VideoRendererAlgorithmTest, RemoveExpiredFrames) {
tg.step(2);
// Two frames are removed, one displayed frame (which should not be counted as
// dropped) and one undisplayed one.
- ASSERT_EQ(1u, algorithm_.RemoveExpiredFrames(tg.current()));
+ ASSERT_EQ(2u, algorithm_.RemoveExpiredFrames(tg.current()));
// Since we just removed the last rendered frame, OnLastFrameDropped() should
// be ignored.
algorithm_.OnLastFrameDropped();
frame = RenderAndStep(&tg, &frames_dropped);
- EXPECT_EQ(1u, frames_dropped);
+ EXPECT_EQ(0u, frames_dropped);
EXPECT_EQ(2u, frames_queued());
EXPECT_EQ(1u, EffectiveFramesQueued());
ASSERT_TRUE(frame);
@@ -1336,6 +1336,45 @@ TEST_F(VideoRendererAlgorithmTest, RemoveExpiredFramesCadence) {
EXPECT_EQ(0u, EffectiveFramesQueued());
}
+TEST_F(VideoRendererAlgorithmTest, RemoveExpiredFramesFractionalCadence) {
+ TickGenerator frame_tg(base::TimeTicks(), 60);
+ TickGenerator display_tg(tick_clock_->NowTicks(), 30);
+ disable_cadence_hysteresis();
+
+ constexpr size_t kFrameCount = 5;
+ for (size_t i = 0; i < kFrameCount; ++i)
+ algorithm_.EnqueueFrame(CreateFrame(frame_tg.interval(i)));
+
+ ASSERT_EQ(0u, algorithm_.RemoveExpiredFrames(display_tg.current()));
+ EXPECT_EQ(kFrameCount, EffectiveFramesQueued());
+
+ time_source_.StartTicking();
+
+ size_t frames_dropped = 0;
+ scoped_refptr<VideoFrame> frame = RenderAndStep(&display_tg, &frames_dropped);
+ ASSERT_TRUE(frame);
+ EXPECT_EQ(frame_tg.interval(0), frame->timestamp());
+ EXPECT_EQ(0u, frames_dropped);
+ ASSERT_TRUE(is_using_cadence());
+ EXPECT_EQ((kFrameCount - 1) / 2, EffectiveFramesQueued());
+ EXPECT_EQ(kFrameCount, frames_queued());
+
+ // Advance expiry enough that some frames are removed, but one remains and is
+ // still counted as effective. 1 undisplayed and 1 displayed frame will be
+ // expired.
+ ASSERT_EQ(1u, algorithm_.RemoveExpiredFrames(display_tg.current() +
+ display_tg.interval(1) +
+ max_acceptable_drift() * 1.25));
+ EXPECT_EQ(1u, frames_queued());
+ EXPECT_EQ(1u, EffectiveFramesQueued());
+
+ // Advancing expiry once more should mark the frame as ineffective.
+ display_tg.step(3);
+ ASSERT_EQ(0u, algorithm_.RemoveExpiredFrames(display_tg.current()));
+ EXPECT_EQ(1u, frames_queued());
+ EXPECT_EQ(0u, EffectiveFramesQueued());
+}
+
class VideoRendererAlgorithmCadenceTest
: public VideoRendererAlgorithmTest,
public ::testing::WithParamInterface<::testing::tuple<double, double>> {};
diff --git a/chromium/media/filters/vp9_parser.cc b/chromium/media/filters/vp9_parser.cc
index 6c205c62fe8..69d3c684676 100644
--- a/chromium/media/filters/vp9_parser.cc
+++ b/chromium/media/filters/vp9_parser.cc
@@ -14,7 +14,7 @@
#include <algorithm>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/containers/circular_deque.h"
#include "base/logging.h"
#include "base/numerics/ranges.h"
diff --git a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
index 577fa38e97d..85cc43f6adc 100644
--- a/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
+++ b/chromium/media/filters/vpx_video_decoder_fuzzertest.cc
@@ -9,7 +9,7 @@
#include "base/at_exit.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/logging.h"
#include "base/run_loop.h"
@@ -104,7 +104,7 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
decoder.Initialize(
config, true /* low_delay */, nullptr /* cdm_context */,
base::BindOnce(&OnInitDone, run_loop.QuitClosure(), &success),
- base::Bind(&OnOutputComplete), base::NullCallback());
+ base::BindRepeating(&OnOutputComplete), base::NullCallback());
run_loop.Run();
if (!success)
return 0;
diff --git a/chromium/media/filters/vpx_video_decoder_unittest.cc b/chromium/media/filters/vpx_video_decoder_unittest.cc
index 379f2f854e7..be6824bf297 100644
--- a/chromium/media/filters/vpx_video_decoder_unittest.cc
+++ b/chromium/media/filters/vpx_video_decoder_unittest.cc
@@ -6,7 +6,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
#include "build/build_config.h"
@@ -38,15 +38,15 @@ class VpxVideoDecoderTest : public testing::Test {
void InitializeWithConfigWithResult(const VideoDecoderConfig& config,
bool success) {
- decoder_->Initialize(
- config, false, nullptr,
- base::BindOnce(
- [](bool success, Status status) {
- EXPECT_EQ(status.is_ok(), success);
- },
- success),
- base::Bind(&VpxVideoDecoderTest::FrameReady, base::Unretained(this)),
- base::NullCallback());
+ decoder_->Initialize(config, false, nullptr,
+ base::BindOnce(
+ [](bool success, Status status) {
+ EXPECT_EQ(status.is_ok(), success);
+ },
+ success),
+ base::BindRepeating(&VpxVideoDecoderTest::FrameReady,
+ base::Unretained(this)),
+ base::NullCallback());
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc b/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc
index 0a4cb156a6e..4cc67f92956 100644
--- a/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc
+++ b/chromium/media/formats/mp2t/es_parser_mpeg1audio.cc
@@ -120,8 +120,10 @@ bool EsParserMpeg1Audio::LookForMpeg1AudioFrame(
int remaining_size = es_size - offset;
DCHECK_GE(remaining_size, MPEG1AudioStreamParser::kHeaderSize);
MPEG1AudioStreamParser::Header header;
- if (!MPEG1AudioStreamParser::ParseHeader(media_log_, cur_buf, &header))
+ if (!MPEG1AudioStreamParser::ParseHeader(
+ media_log_, &mp3_parse_error_limit_, cur_buf, &header)) {
continue;
+ }
if (remaining_size < header.frame_size) {
// Not a full frame: will resume when we have more data.
@@ -160,8 +162,8 @@ bool EsParserMpeg1Audio::LookForMpeg1AudioFrame(
bool EsParserMpeg1Audio::UpdateAudioConfiguration(
const uint8_t* mpeg1audio_header) {
MPEG1AudioStreamParser::Header header;
- if (!MPEG1AudioStreamParser::ParseHeader(media_log_, mpeg1audio_header,
- &header)) {
+ if (!MPEG1AudioStreamParser::ParseHeader(media_log_, &mp3_parse_error_limit_,
+ mpeg1audio_header, &header)) {
return false;
}
diff --git a/chromium/media/formats/mp2t/es_parser_mpeg1audio.h b/chromium/media/formats/mp2t/es_parser_mpeg1audio.h
index 6a3d9c5db10..c367a21ddf6 100644
--- a/chromium/media/formats/mp2t/es_parser_mpeg1audio.h
+++ b/chromium/media/formats/mp2t/es_parser_mpeg1audio.h
@@ -68,6 +68,8 @@ class MEDIA_EXPORT EsParserMpeg1Audio : public EsParser {
MediaLog* media_log_;
+ size_t mp3_parse_error_limit_ = 0;
+
// Callbacks:
// - to signal a new audio configuration,
// - to send ES buffers.
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.cc b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
index 8b0f2f96e2e..b118f05ae6a 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.cc
@@ -192,17 +192,56 @@ Mp2tStreamParser::BufferQueueWithConfig::BufferQueueWithConfig(
Mp2tStreamParser::BufferQueueWithConfig::~BufferQueueWithConfig() {
}
-Mp2tStreamParser::Mp2tStreamParser(bool sbr_in_mimetype)
- : sbr_in_mimetype_(sbr_in_mimetype),
- selected_audio_pid_(-1),
- selected_video_pid_(-1),
- is_initialized_(false),
- segment_started_(false) {
-}
+Mp2tStreamParser::Mp2tStreamParser(
+ const std::vector<std::string>& allowed_codecs,
+ bool sbr_in_mimetype)
+ : sbr_in_mimetype_(sbr_in_mimetype),
+ selected_audio_pid_(-1),
+ selected_video_pid_(-1),
+ is_initialized_(false),
+ segment_started_(false) {
+ for (const std::string& codec_name : allowed_codecs) {
+ switch (StringToVideoCodec(codec_name)) {
+ case VideoCodec::kCodecH264:
+ allowed_stream_types_.insert(kStreamTypeAVC);
+#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
+ allowed_stream_types_.insert(kStreamTypeAVCWithSampleAES);
+#endif
+ continue;
+ case VideoCodec::kUnknownVideoCodec:
+ // Probably audio.
+ break;
+ default:
+ DLOG(WARNING) << "Unsupported video codec " << codec_name;
+ continue;
+ }
-Mp2tStreamParser::~Mp2tStreamParser() {
+ switch (StringToAudioCodec(codec_name)) {
+ case AudioCodec::kCodecAAC:
+ allowed_stream_types_.insert(kStreamTypeAAC);
+#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
+ allowed_stream_types_.insert(kStreamTypeAACWithSampleAES);
+#endif
+ continue;
+ case AudioCodec::kCodecMP3:
+ allowed_stream_types_.insert(kStreamTypeMpeg1Audio);
+ allowed_stream_types_.insert(kStreamTypeMpeg2Audio);
+ continue;
+ case AudioCodec::kUnknownAudioCodec:
+ // Neither audio, nor video.
+ break;
+ default:
+ DLOG(WARNING) << "Unsupported audio codec " << codec_name;
+ continue;
+ }
+
+ // Failed to parse as an audio or a video codec.
+ DLOG(WARNING) << "Unknown codec " << codec_name;
+ }
}
+Mp2tStreamParser::~Mp2tStreamParser() {}
+
void Mp2tStreamParser::Init(
InitCB init_cb,
const NewConfigCB& config_cb,
@@ -481,6 +520,15 @@ void Mp2tStreamParser::RegisterPes(int pes_pid,
if (it != pids_.end())
return;
+ // Ignore stream types not specified in the creation of the SourceBuffer.
+ // See https://crbug.com/1169393.
+ // TODO(https://crbug.com/535738): Remove this hack when MSE stream/mime type
+ // checks have been relaxed.
+ if (allowed_stream_types_.find(stream_type) == allowed_stream_types_.end()) {
+ DVLOG(1) << "Stream type not allowed for this parser: " << stream_type;
+ return;
+ }
+
// Create a stream parser corresponding to the stream type.
bool is_audio = true;
std::unique_ptr<EsParser> es_parser;
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser.h b/chromium/media/formats/mp2t/mp2t_stream_parser.h
index f0e8356d4ed..db5be654035 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser.h
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser.h
@@ -34,7 +34,8 @@ class PidState;
class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
public:
- explicit Mp2tStreamParser(bool sbr_in_mimetype);
+ explicit Mp2tStreamParser(const std::vector<std::string>& allowed_codecs,
+ bool sbr_in_mimetype);
~Mp2tStreamParser() override;
// StreamParser implementation.
@@ -144,6 +145,9 @@ class MEDIA_EXPORT Mp2tStreamParser : public StreamParser {
EndMediaSegmentCB end_of_segment_cb_;
MediaLog* media_log_;
+ // List of allowed stream types for this parser.
+ std::set<int> allowed_stream_types_;
+
// True when AAC SBR extension is signalled in the mimetype
// (mp4a.40.5 in the codecs parameter).
bool sbr_in_mimetype_;
diff --git a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
index 3119e4f7bca..f144cfcde86 100644
--- a/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp2t/mp2t_stream_parser_unittest.cc
@@ -12,7 +12,7 @@
#include <string>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/strings/string_number_conversions.h"
@@ -164,6 +164,7 @@ class Mp2tStreamParserTest : public testing::Test {
config_count_(0),
audio_frame_count_(0),
video_frame_count_(0),
+ has_audio_(true),
has_video_(true),
audio_min_dts_(kNoDecodeTimestamp()),
audio_max_dts_(kNoDecodeTimestamp()),
@@ -175,7 +176,7 @@ class Mp2tStreamParserTest : public testing::Test {
current_video_config_(),
capture_buffers(false) {
bool has_sbr = false;
- parser_.reset(new Mp2tStreamParser(has_sbr));
+ parser_.reset(new Mp2tStreamParser({"avc1.64001e", "mp3", "aac"}, has_sbr));
}
protected:
@@ -185,6 +186,7 @@ class Mp2tStreamParserTest : public testing::Test {
int config_count_;
int audio_frame_count_;
int video_frame_count_;
+ bool has_audio_;
bool has_video_;
DecodeTimestamp audio_min_dts_;
DecodeTimestamp audio_max_dts_;
@@ -256,7 +258,7 @@ class Mp2tStreamParserTest : public testing::Test {
EXPECT_TRUE(false);
}
}
- EXPECT_TRUE(found_audio_track);
+ EXPECT_EQ(has_audio_, found_audio_track);
EXPECT_EQ(has_video_, found_video_track);
config_count_++;
return true;
@@ -384,7 +386,9 @@ TEST_F(Mp2tStreamParserTest, UnalignedAppend17) {
InitializeParser();
ParseMpeg2TsFile("bear-1280x720.ts", 17);
parser_->Flush();
+ EXPECT_EQ(audio_frame_count_, 119);
EXPECT_EQ(video_frame_count_, 82);
+
// This stream has no mid-stream configuration change.
EXPECT_EQ(config_count_, 1);
EXPECT_EQ(segment_count_, 1);
@@ -395,7 +399,9 @@ TEST_F(Mp2tStreamParserTest, UnalignedAppend512) {
InitializeParser();
ParseMpeg2TsFile("bear-1280x720.ts", 512);
parser_->Flush();
+ EXPECT_EQ(audio_frame_count_, 119);
EXPECT_EQ(video_frame_count_, 82);
+
// This stream has no mid-stream configuration change.
EXPECT_EQ(config_count_, 1);
EXPECT_EQ(segment_count_, 1);
@@ -405,6 +411,7 @@ TEST_F(Mp2tStreamParserTest, AppendAfterFlush512) {
InitializeParser();
ParseMpeg2TsFile("bear-1280x720.ts", 512);
parser_->Flush();
+ EXPECT_EQ(audio_frame_count_, 119);
EXPECT_EQ(video_frame_count_, 82);
EXPECT_EQ(config_count_, 1);
EXPECT_EQ(segment_count_, 1);
@@ -412,6 +419,7 @@ TEST_F(Mp2tStreamParserTest, AppendAfterFlush512) {
ResetStats();
ParseMpeg2TsFile("bear-1280x720.ts", 512);
parser_->Flush();
+ EXPECT_EQ(audio_frame_count_, 119);
EXPECT_EQ(video_frame_count_, 82);
EXPECT_EQ(config_count_, 1);
EXPECT_EQ(segment_count_, 1);
@@ -462,6 +470,22 @@ TEST_F(Mp2tStreamParserTest, AudioInPrivateStream1) {
EXPECT_EQ(segment_count_, 1);
}
+// Checks the allowed_codecs argument filters streams using disallowed codecs.
+TEST_F(Mp2tStreamParserTest, DisableAudioStream) {
+ // Reset the parser with no audio codec allowed.
+ parser_.reset(new Mp2tStreamParser({"avc1.64001e"}, true));
+ has_audio_ = false;
+
+ InitializeParser();
+ ParseMpeg2TsFile("bear-1280x720.ts", 512);
+ parser_->Flush();
+ EXPECT_EQ(audio_frame_count_, 0);
+ EXPECT_EQ(video_frame_count_, 82);
+
+ // There should be a single configuration, with no audio.
+ EXPECT_EQ(config_count_, 1);
+}
+
#if BUILDFLAG(ENABLE_HLS_SAMPLE_AES)
TEST_F(Mp2tStreamParserTest, HLSSampleAES) {
std::vector<std::string> decrypted_video_buffers;
@@ -487,7 +511,7 @@ TEST_F(Mp2tStreamParserTest, HLSSampleAES) {
decrypted_audio_buffers.push_back(decrypted_audio_buffer);
}
- parser_.reset(new Mp2tStreamParser(false));
+ parser_.reset(new Mp2tStreamParser({"avc1.64001e", "mp3", "aac"}, false));
ResetStats();
InitializeParser();
video_buffer_capture_.clear();
diff --git a/chromium/media/formats/mp4/box_definitions.cc b/chromium/media/formats/mp4/box_definitions.cc
index 50a981fe935..6bc50c65f7a 100644
--- a/chromium/media/formats/mp4/box_definitions.cc
+++ b/chromium/media/formats/mp4/box_definitions.cc
@@ -1003,6 +1003,8 @@ bool SMPTE2086MasteringDisplayMetadataBox::Parse(BoxReader* reader) {
constexpr float kLuminanceMaxUnit = 1 << 8;
constexpr float kLuminanceMinUnit = 1 << 14;
+ RCHECK(reader->ReadFullBoxHeader());
+
// Technically the color coordinates may be in any order. The spec recommends
// RGB and it is assumed that the color coordinates are in such order.
RCHECK(
@@ -1034,6 +1036,11 @@ bool ContentLightLevelInformation::Parse(BoxReader* reader) {
reader->Read2(&max_pic_average_light_level);
}
+bool ContentLightLevel::Parse(BoxReader* reader) {
+ RCHECK(reader->ReadFullBoxHeader());
+ return ContentLightLevelInformation::Parse(reader);
+}
+
FourCC ContentLightLevel::BoxType() const {
return FOURCC_COLL;
}
diff --git a/chromium/media/formats/mp4/box_definitions.h b/chromium/media/formats/mp4/box_definitions.h
index e625ae59724..8cc2c8379c7 100644
--- a/chromium/media/formats/mp4/box_definitions.h
+++ b/chromium/media/formats/mp4/box_definitions.h
@@ -310,6 +310,7 @@ struct MEDIA_EXPORT ContentLightLevelInformation : Box {
// Same as ContentLightLevelInformation, but with a different fourcc.
struct MEDIA_EXPORT ContentLightLevel : ContentLightLevelInformation {
+ bool Parse(BoxReader* reader) override;
FourCC BoxType() const override;
};
diff --git a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc
index 13b8a0f8aa0..f3c4e8ef0e8 100644
--- a/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc
+++ b/chromium/media/formats/mp4/h264_annex_b_to_avc_bitstream_converter.cc
@@ -58,6 +58,9 @@ Status H264AnnexBToAvcBitstreamConverter::ConvertChunk(
"Failed to parse H.264 stream");
switch (nalu.nal_unit_type) {
+ case H264NALU::kAUD: {
+ break;
+ }
case H264NALU::kSPS: {
int sps_id = -1;
result = parser_.ParseSPS(&sps_id);
@@ -201,6 +204,6 @@ Status H264AnnexBToAvcBitstreamConverter::ConvertChunk(
*config_changed_out = config_changed;
return Status();
-} // namespace media
+}
} // namespace media \ No newline at end of file
diff --git a/chromium/media/formats/mp4/mp4_stream_parser.cc b/chromium/media/formats/mp4/mp4_stream_parser.cc
index 8899fda6d0a..8c8a0347d99 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser.cc
@@ -60,17 +60,17 @@ EncryptionScheme GetEncryptionScheme(const ProtectionSchemeInfo& sinf) {
return EncryptionScheme::kUnencrypted;
}
-gl::MasteringMetadata ConvertMdcvToMasteringMetadata(
+gfx::MasteringMetadata ConvertMdcvToMasteringMetadata(
const MasteringDisplayColorVolume& mdcv) {
- gl::MasteringMetadata mastering_metadata;
+ gfx::MasteringMetadata mastering_metadata;
- mastering_metadata.primary_r = gl::MasteringMetadata::Chromaticity(
+ mastering_metadata.primary_r = gfx::MasteringMetadata::Chromaticity(
mdcv.display_primaries_rx, mdcv.display_primaries_ry);
- mastering_metadata.primary_g = gl::MasteringMetadata::Chromaticity(
+ mastering_metadata.primary_g = gfx::MasteringMetadata::Chromaticity(
mdcv.display_primaries_gx, mdcv.display_primaries_gy);
- mastering_metadata.primary_b = gl::MasteringMetadata::Chromaticity(
+ mastering_metadata.primary_b = gfx::MasteringMetadata::Chromaticity(
mdcv.display_primaries_bx, mdcv.display_primaries_by);
- mastering_metadata.white_point = gl::MasteringMetadata::Chromaticity(
+ mastering_metadata.white_point = gfx::MasteringMetadata::Chromaticity(
mdcv.white_point_x, mdcv.white_point_y);
mastering_metadata.luminance_max = mdcv.max_display_mastering_luminance;
@@ -544,7 +544,7 @@ bool MP4StreamParser::ParseMoov(BoxReader* reader) {
if (entry.mastering_display_color_volume ||
entry.content_light_level_information) {
- gl::HDRMetadata hdr_metadata;
+ gfx::HDRMetadata hdr_metadata;
if (entry.mastering_display_color_volume) {
hdr_metadata.mastering_metadata = ConvertMdcvToMasteringMetadata(
*entry.mastering_display_color_volume);
diff --git a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
index cb479c40dc5..18ea82e2884 100644
--- a/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
+++ b/chromium/media/formats/mp4/mp4_stream_parser_unittest.cc
@@ -13,7 +13,7 @@
#include <tuple>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/ref_counted.h"
#include "base/test/metrics/histogram_tester.h"
@@ -712,8 +712,29 @@ TEST_F(MP4StreamParserTest, Vp9) {
VideoColorSpace::MatrixID::BT2020_NCL,
gfx::ColorSpace::RangeID::LIMITED));
- // TODO(crbug.com/1123430): We need a test file that actually has HDR metadata
- // to test the SmDm and CoLL parsing.
+ ASSERT_TRUE(video_decoder_config_.hdr_metadata().has_value());
+
+ const auto& hdr_metadata = *video_decoder_config_.hdr_metadata();
+ EXPECT_EQ(hdr_metadata.max_content_light_level, 1000u);
+ EXPECT_EQ(hdr_metadata.max_frame_average_light_level, 640u);
+
+ const auto& mastering_metadata = hdr_metadata.mastering_metadata;
+
+ constexpr float kColorCoordinateUnit = 1 / 16.0f;
+ EXPECT_NEAR(mastering_metadata.primary_r.x(), 0.68, kColorCoordinateUnit);
+ EXPECT_NEAR(mastering_metadata.primary_r.y(), 0.31998, kColorCoordinateUnit);
+ EXPECT_NEAR(mastering_metadata.primary_g.x(), 0.26496, kColorCoordinateUnit);
+ EXPECT_NEAR(mastering_metadata.primary_g.y(), 0.68998, kColorCoordinateUnit);
+ EXPECT_NEAR(mastering_metadata.primary_b.x(), 0.15, kColorCoordinateUnit);
+ EXPECT_NEAR(mastering_metadata.primary_b.y(), 0.05998, kColorCoordinateUnit);
+ EXPECT_NEAR(mastering_metadata.white_point.x(), 0.314, kColorCoordinateUnit);
+ EXPECT_NEAR(mastering_metadata.white_point.y(), 0.351, kColorCoordinateUnit);
+
+ constexpr float kLuminanceMaxUnit = 1 / 8.0f;
+ EXPECT_NEAR(mastering_metadata.luminance_max, 1000.0f, kLuminanceMaxUnit);
+
+ constexpr float kLuminanceMinUnit = 1 / 14.0;
+ EXPECT_NEAR(mastering_metadata.luminance_min, 0.01f, kLuminanceMinUnit);
}
TEST_F(MP4StreamParserTest, FourCCToString) {
diff --git a/chromium/media/formats/mp4/track_run_iterator.cc b/chromium/media/formats/mp4/track_run_iterator.cc
index e8205d05185..c909c4a6d5a 100644
--- a/chromium/media/formats/mp4/track_run_iterator.cc
+++ b/chromium/media/formats/mp4/track_run_iterator.cc
@@ -15,6 +15,7 @@
#include "base/stl_util.h"
#include "build/chromecast_buildflags.h"
#include "media/base/decrypt_config.h"
+#include "media/base/demuxer.h"
#include "media/base/demuxer_memory_limit.h"
#include "media/base/encryption_pattern.h"
#include "media/base/encryption_scheme.h"
@@ -424,7 +425,8 @@ bool TrackRunIterator::Init(const MovieFragment& moof) {
// Avoid allocating insane sample counts for invalid media.
const size_t max_sample_count =
- GetDemuxerMemoryLimit() / sizeof(decltype(tri.samples)::value_type);
+ GetDemuxerMemoryLimit(Demuxer::DemuxerTypes::kChunkDemuxer) /
+ sizeof(decltype(tri.samples)::value_type);
RCHECK_MEDIA_LOGGED(
base::strict_cast<size_t>(trun.sample_count) <= max_sample_count,
media_log_, "Metadata overhead exceeds storage limit.");
diff --git a/chromium/media/formats/mpeg/adts_stream_parser.cc b/chromium/media/formats/mpeg/adts_stream_parser.cc
index c629ab63f9b..a47d2242dde 100644
--- a/chromium/media/formats/mpeg/adts_stream_parser.cc
+++ b/chromium/media/formats/mpeg/adts_stream_parser.cc
@@ -13,7 +13,7 @@
namespace media {
-static const uint32_t kADTSStartCodeMask = 0xfff00000;
+constexpr uint32_t kADTSStartCodeMask = 0xfff00000;
ADTSStreamParser::ADTSStreamParser()
: MPEGAudioStreamParserBase(kADTSStartCodeMask, kCodecAAC, 0) {}
@@ -27,7 +27,7 @@ int ADTSStreamParser::ParseFrameHeader(const uint8_t* data,
ChannelLayout* channel_layout,
int* sample_count,
bool* metadata_frame,
- std::vector<uint8_t>* extra_data) const {
+ std::vector<uint8_t>* extra_data) {
DCHECK(data);
DCHECK_GE(size, 0);
@@ -62,11 +62,8 @@ int ADTSStreamParser::ParseFrameHeader(const uint8_t* data,
return -1;
}
- DVLOG(2) << "Header data :" << std::hex
- << " sync 0x" << sync
- << " version 0x" << version
- << " layer 0x" << layer
- << " profile 0x" << profile
+ DVLOG(2) << "Header data :" << std::hex << " sync 0x" << sync << " version 0x"
+ << version << " layer 0x" << layer << " profile 0x" << profile
<< " sample_rate_index 0x" << sample_rate_index
<< " channel_layout_index 0x" << channel_layout_index;
@@ -75,7 +72,7 @@ int ADTSStreamParser::ParseFrameHeader(const uint8_t* data,
sample_rate_index >= kADTSFrequencyTableSize ||
channel_layout_index >= kADTSChannelLayoutTableSize) {
if (media_log()) {
- MEDIA_LOG(DEBUG, media_log())
+ LIMITED_MEDIA_LOG(DEBUG, media_log(), adts_parse_error_limit_, 5)
<< "Invalid header data :" << std::hex << " sync 0x" << sync
<< " version 0x" << version << " layer 0x" << layer
<< " sample_rate_index 0x" << sample_rate_index
diff --git a/chromium/media/formats/mpeg/adts_stream_parser.h b/chromium/media/formats/mpeg/adts_stream_parser.h
index b25f72a0cdf..5015c79626b 100644
--- a/chromium/media/formats/mpeg/adts_stream_parser.h
+++ b/chromium/media/formats/mpeg/adts_stream_parser.h
@@ -26,9 +26,11 @@ class MEDIA_EXPORT ADTSStreamParser : public MPEGAudioStreamParserBase {
ChannelLayout* channel_layout,
int* sample_count,
bool* metadata_frame,
- std::vector<uint8_t>* extra_data) const override;
+ std::vector<uint8_t>* extra_data) override;
private:
+ size_t adts_parse_error_limit_ = 0;
+
DISALLOW_COPY_AND_ASSIGN(ADTSStreamParser);
};
diff --git a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
index 2a636076f6c..05a84624583 100644
--- a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
+++ b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.cc
@@ -8,87 +8,82 @@
namespace media {
-static const uint32_t kMPEG1StartCodeMask = 0xffe00000;
+namespace {
+
+constexpr uint32_t kMPEG1StartCodeMask = 0xffe00000;
// Map that determines which bitrate_index & channel_mode combinations
// are allowed.
// Derived from: http://mpgedit.org/mpgedit/mpeg_format/MP3Format.html
-static const bool kIsAllowed[17][4] = {
- { true, true, true, true }, // free
- { true, false, false, false }, // 32
- { true, false, false, false }, // 48
- { true, false, false, false }, // 56
- { true, true, true, true }, // 64
- { true, false, false, false }, // 80
- { true, true, true, true }, // 96
- { true, true, true, true }, // 112
- { true, true, true, true }, // 128
- { true, true, true, true }, // 160
- { true, true, true, true }, // 192
- { false, true, true, true }, // 224
- { false, true, true, true }, // 256
- { false, true, true, true }, // 320
- { false, true, true, true }, // 384
- { false, false, false, false } // bad
+constexpr bool kIsAllowed[17][4] = {
+ {true, true, true, true}, // free
+ {true, false, false, false}, // 32
+ {true, false, false, false}, // 48
+ {true, false, false, false}, // 56
+ {true, true, true, true}, // 64
+ {true, false, false, false}, // 80
+ {true, true, true, true}, // 96
+ {true, true, true, true}, // 112
+ {true, true, true, true}, // 128
+ {true, true, true, true}, // 160
+ {true, true, true, true}, // 192
+ {false, true, true, true}, // 224
+ {false, true, true, true}, // 256
+ {false, true, true, true}, // 320
+ {false, true, true, true}, // 384
+ {false, false, false, false} // bad
};
// Maps version and layer information in the frame header
// into an index for the |kBitrateMap|.
// Derived from: http://mpgedit.org/mpgedit/mpeg_format/MP3Format.html
-static const int kVersionLayerMap[4][4] = {
- // { reserved, L3, L2, L1 }
- { 5, 4, 4, 3 }, // MPEG 2.5
- { 5, 5, 5, 5 }, // reserved
- { 5, 4, 4, 3 }, // MPEG 2
- { 5, 2, 1, 0 } // MPEG 1
+constexpr int kVersionLayerMap[4][4] = {
+ // { reserved, L3, L2, L1 }
+ {5, 4, 4, 3}, // MPEG 2.5
+ {5, 5, 5, 5}, // reserved
+ {5, 4, 4, 3}, // MPEG 2
+ {5, 2, 1, 0} // MPEG 1
};
// Maps the bitrate index field in the header and an index
// from |kVersionLayerMap| to a frame bitrate.
// Derived from: http://mpgedit.org/mpgedit/mpeg_format/MP3Format.html
-static const int kBitrateMap[16][6] = {
- // { V1L1, V1L2, V1L3, V2L1, V2L2 & V2L3, reserved }
- { 0, 0, 0, 0, 0, 0 },
- { 32, 32, 32, 32, 8, 0 },
- { 64, 48, 40, 48, 16, 0 },
- { 96, 56, 48, 56, 24, 0 },
- { 128, 64, 56, 64, 32, 0 },
- { 160, 80, 64, 80, 40, 0 },
- { 192, 96, 80, 96, 48, 0 },
- { 224, 112, 96, 112, 56, 0 },
- { 256, 128, 112, 128, 64, 0 },
- { 288, 160, 128, 144, 80, 0 },
- { 320, 192, 160, 160, 96, 0 },
- { 352, 224, 192, 176, 112, 0 },
- { 384, 256, 224, 192, 128, 0 },
- { 416, 320, 256, 224, 144, 0 },
- { 448, 384, 320, 256, 160, 0 },
- { 0, 0, 0, 0, 0}
-};
+constexpr int kBitrateMap[16][6] = {
+ // { V1L1, V1L2, V1L3, V2L1, V2L2 & V2L3, reserved }
+ {0, 0, 0, 0, 0, 0}, {32, 32, 32, 32, 8, 0},
+ {64, 48, 40, 48, 16, 0}, {96, 56, 48, 56, 24, 0},
+ {128, 64, 56, 64, 32, 0}, {160, 80, 64, 80, 40, 0},
+ {192, 96, 80, 96, 48, 0}, {224, 112, 96, 112, 56, 0},
+ {256, 128, 112, 128, 64, 0}, {288, 160, 128, 144, 80, 0},
+ {320, 192, 160, 160, 96, 0}, {352, 224, 192, 176, 112, 0},
+ {384, 256, 224, 192, 128, 0}, {416, 320, 256, 224, 144, 0},
+ {448, 384, 320, 256, 160, 0}, {0, 0, 0, 0, 0}};
// Maps the sample rate index and version fields from the frame header
// to a sample rate.
// Derived from: http://mpgedit.org/mpgedit/mpeg_format/MP3Format.html
-static const int kSampleRateMap[4][4] = {
- // { V2.5, reserved, V2, V1 }
- { 11025, 0, 22050, 44100 },
- { 12000, 0, 24000, 48000 },
- { 8000, 0, 16000, 32000 },
- { 0, 0, 0, 0 }
-};
+constexpr int kSampleRateMap[4][4] = {
+ // { V2.5, reserved, V2, V1 }
+ {11025, 0, 22050, 44100},
+ {12000, 0, 24000, 48000},
+ {8000, 0, 16000, 32000},
+ {0, 0, 0, 0}};
// Offset in bytes from the end of the MP3 header to "Xing" or "Info" tags which
// indicate a frame is silent metadata frame. Values taken from FFmpeg.
-static const int kXingHeaderMap[2][2] = {{32, 17}, {17, 9}};
+constexpr int kXingHeaderMap[2][2] = {{32, 17}, {17, 9}};
// Frame header field constants.
-static const int kBitrateFree = 0;
-static const int kBitrateBad = 0xf;
-static const int kSampleRateReserved = 3;
-static const int kCodecDelay = 529;
+constexpr int kBitrateFree = 0;
+constexpr int kBitrateBad = 0xf;
+constexpr int kSampleRateReserved = 3;
+constexpr int kCodecDelay = 529;
+
+} // namespace
// static
bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
+ size_t* media_log_limit,
const uint8_t* data,
Header* header) {
BitReader reader(data, kHeaderSize);
@@ -103,32 +98,25 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
int channel_mode;
int other_flags;
- if (!reader.ReadBits(11, &sync) ||
- !reader.ReadBits(2, &version) ||
- !reader.ReadBits(2, &layer) ||
- !reader.ReadBits(1, &is_protected) ||
+ if (!reader.ReadBits(11, &sync) || !reader.ReadBits(2, &version) ||
+ !reader.ReadBits(2, &layer) || !reader.ReadBits(1, &is_protected) ||
!reader.ReadBits(4, &bitrate_index) ||
!reader.ReadBits(2, &sample_rate_index) ||
- !reader.ReadBits(1, &has_padding) ||
- !reader.ReadBits(1, &is_private) ||
- !reader.ReadBits(2, &channel_mode) ||
- !reader.ReadBits(6, &other_flags)) {
+ !reader.ReadBits(1, &has_padding) || !reader.ReadBits(1, &is_private) ||
+ !reader.ReadBits(2, &channel_mode) || !reader.ReadBits(6, &other_flags)) {
return false;
}
- DVLOG(2) << "Header data :" << std::hex
- << " sync 0x" << sync
- << " version 0x" << version
- << " layer 0x" << layer
- << " bitrate_index 0x" << bitrate_index
- << " sample_rate_index 0x" << sample_rate_index
+ DVLOG(2) << "Header data :" << std::hex << " sync 0x" << sync << " version 0x"
+ << version << " layer 0x" << layer << " bitrate_index 0x"
+ << bitrate_index << " sample_rate_index 0x" << sample_rate_index
<< " channel_mode 0x" << channel_mode;
if (sync != 0x7ff || version == kVersionReserved || layer == kLayerReserved ||
bitrate_index == kBitrateFree || bitrate_index == kBitrateBad ||
sample_rate_index == kSampleRateReserved) {
if (media_log) {
- MEDIA_LOG(ERROR, media_log)
+ LIMITED_MEDIA_LOG(DEBUG, media_log, *media_log_limit, 5)
<< "Invalid MP3 header data :" << std::hex << " sync 0x" << sync
<< " version 0x" << version << " layer 0x" << layer
<< " bitrate_index 0x" << bitrate_index << " sample_rate_index 0x"
@@ -139,7 +127,7 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
if (layer == kLayer2 && !kIsAllowed[bitrate_index][channel_mode]) {
if (media_log) {
- MEDIA_LOG(ERROR, media_log)
+ LIMITED_MEDIA_LOG(DEBUG, media_log, *media_log_limit, 5)
<< "Invalid MP3 (bitrate_index, channel_mode)"
<< " combination :" << std::hex << " bitrate_index " << bitrate_index
<< " channel_mode " << channel_mode;
@@ -151,7 +139,7 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
if (bitrate == 0) {
if (media_log) {
- MEDIA_LOG(ERROR, media_log)
+ LIMITED_MEDIA_LOG(DEBUG, media_log, *media_log_limit, 5)
<< "Invalid MP3 bitrate :" << std::hex << " version " << version
<< " layer " << layer << " bitrate_index " << bitrate_index;
}
@@ -163,7 +151,7 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
int frame_sample_rate = kSampleRateMap[sample_rate_index][version];
if (frame_sample_rate == 0) {
if (media_log) {
- MEDIA_LOG(ERROR, media_log)
+ LIMITED_MEDIA_LOG(DEBUG, media_log, *media_log_limit, 5)
<< "Invalid MP3 sample rate :" << std::hex << " version " << version
<< " sample_rate_index " << sample_rate_index;
}
@@ -183,10 +171,11 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
break;
case kLayer3:
- if (version == kVersion2 || version == kVersion2_5)
+ if (version == kVersion2 || version == kVersion2_5) {
samples_per_frame = 576;
- else
+ } else {
samples_per_frame = 1152;
+ }
break;
default:
@@ -226,21 +215,19 @@ bool MPEG1AudioStreamParser::ParseHeader(MediaLog* media_log,
return true;
}
-
MPEG1AudioStreamParser::MPEG1AudioStreamParser()
: MPEGAudioStreamParserBase(kMPEG1StartCodeMask, kCodecMP3, kCodecDelay) {}
MPEG1AudioStreamParser::~MPEG1AudioStreamParser() = default;
-int MPEG1AudioStreamParser::ParseFrameHeader(
- const uint8_t* data,
- int size,
- int* frame_size,
- int* sample_rate,
- ChannelLayout* channel_layout,
- int* sample_count,
- bool* metadata_frame,
- std::vector<uint8_t>* extra_data) const {
+int MPEG1AudioStreamParser::ParseFrameHeader(const uint8_t* data,
+ int size,
+ int* frame_size,
+ int* sample_rate,
+ ChannelLayout* channel_layout,
+ int* sample_count,
+ bool* metadata_frame,
+ std::vector<uint8_t>* extra_data) {
DCHECK(data);
DCHECK_GE(size, 0);
DCHECK(frame_size);
@@ -249,7 +236,7 @@ int MPEG1AudioStreamParser::ParseFrameHeader(
return 0;
Header header;
- if (!ParseHeader(media_log(), data, &header))
+ if (!ParseHeader(media_log(), &mp3_parse_error_limit_, data, &header))
return -1;
*frame_size = header.frame_size;
diff --git a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h
index f162bd10555..de86e69661a 100644
--- a/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h
+++ b/chromium/media/formats/mpeg/mpeg1_audio_stream_parser.h
@@ -20,9 +20,7 @@ namespace media {
class MEDIA_EXPORT MPEG1AudioStreamParser : public MPEGAudioStreamParserBase {
public:
// Size of an MPEG-1 frame header in bytes.
- enum {
- kHeaderSize = 4,
- };
+ static constexpr int kHeaderSize = 4;
// Versions and layers as defined in ISO/IEC 11172-3.
enum Version {
@@ -65,6 +63,7 @@ class MEDIA_EXPORT MPEG1AudioStreamParser : public MPEGAudioStreamParserBase {
// Assumption: size of array |data| should be at least |kHeaderSize|.
// Returns false if the header is not valid.
static bool ParseHeader(MediaLog* media_log,
+ size_t* media_log_limit,
const uint8_t* data,
Header* header);
@@ -80,7 +79,9 @@ class MEDIA_EXPORT MPEG1AudioStreamParser : public MPEGAudioStreamParserBase {
ChannelLayout* channel_layout,
int* sample_count,
bool* metadata_frame,
- std::vector<uint8_t>* extra_data) const override;
+ std::vector<uint8_t>* extra_data) override;
+
+ size_t mp3_parse_error_limit_ = 0;
DISALLOW_COPY_AND_ASSIGN(MPEG1AudioStreamParser);
};
diff --git a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
index b5fed3de239..761ae64d438 100644
--- a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
+++ b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.cc
@@ -347,7 +347,7 @@ bool MPEGAudioStreamParserBase::ParseSyncSafeInt(BitReader* reader,
}
int MPEGAudioStreamParserBase::FindNextValidStartCode(const uint8_t* data,
- int size) const {
+ int size) {
const uint8_t* start = data;
const uint8_t* end = data + size;
diff --git a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
index 42495dcc059..425d7079aa3 100644
--- a/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
+++ b/chromium/media/formats/mpeg/mpeg_audio_stream_parser_base.h
@@ -84,7 +84,7 @@ class MEDIA_EXPORT MPEGAudioStreamParserBase : public StreamParser {
ChannelLayout* channel_layout,
int* sample_count,
bool* metadata_frame,
- std::vector<uint8_t>* extra_data) const = 0;
+ std::vector<uint8_t>* extra_data) = 0;
MediaLog* media_log() const { return media_log_; }
@@ -125,7 +125,7 @@ class MEDIA_EXPORT MPEGAudioStreamParserBase : public StreamParser {
// next start code..
// 0 : If a valid start code was not found and more data is needed.
// < 0 : An error was encountered during parsing.
- int FindNextValidStartCode(const uint8_t* data, int size) const;
+ int FindNextValidStartCode(const uint8_t* data, int size);
// Sends the buffers in |buffers| to |new_buffers_cb_| and then clears
// |buffers|.
diff --git a/chromium/media/formats/webm/webm_colour_parser.cc b/chromium/media/formats/webm/webm_colour_parser.cc
index 966f9548221..134f4f7583a 100644
--- a/chromium/media/formats/webm/webm_colour_parser.cc
+++ b/chromium/media/formats/webm/webm_colour_parser.cc
@@ -192,7 +192,7 @@ WebMColorMetadata WebMColourParser::GetWebMColorMetadata() const {
if (max_content_light_level_ != -1 || max_frame_average_light_level_ != -1 ||
mastering_metadata_parsed_) {
- color_metadata.hdr_metadata = gl::HDRMetadata();
+ color_metadata.hdr_metadata = gfx::HDRMetadata();
if (max_content_light_level_ != -1) {
color_metadata.hdr_metadata->max_content_light_level =
diff --git a/chromium/media/formats/webm/webm_colour_parser.h b/chromium/media/formats/webm/webm_colour_parser.h
index ae474ca724c..dedd824f43b 100644
--- a/chromium/media/formats/webm/webm_colour_parser.h
+++ b/chromium/media/formats/webm/webm_colour_parser.h
@@ -9,7 +9,7 @@
#include "base/optional.h"
#include "media/base/video_color_space.h"
#include "media/formats/webm/webm_parser.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
@@ -26,7 +26,7 @@ struct MEDIA_EXPORT WebMColorMetadata {
VideoColorSpace color_space;
- base::Optional<gl::HDRMetadata> hdr_metadata;
+ base::Optional<gfx::HDRMetadata> hdr_metadata;
WebMColorMetadata();
WebMColorMetadata(const WebMColorMetadata& rhs);
@@ -39,7 +39,7 @@ class WebMMasteringMetadataParser : public WebMParserClient {
WebMMasteringMetadataParser();
~WebMMasteringMetadataParser() override;
- gl::MasteringMetadata GetMasteringMetadata() const {
+ gfx::MasteringMetadata GetMasteringMetadata() const {
return mastering_metadata_;
}
@@ -47,7 +47,7 @@ class WebMMasteringMetadataParser : public WebMParserClient {
// WebMParserClient implementation.
bool OnFloat(int id, double val) override;
- gl::MasteringMetadata mastering_metadata_;
+ gfx::MasteringMetadata mastering_metadata_;
DISALLOW_COPY_AND_ASSIGN(WebMMasteringMetadataParser);
};
diff --git a/chromium/media/formats/webm/webm_stream_parser_unittest.cc b/chromium/media/formats/webm/webm_stream_parser_unittest.cc
index 438a8718b68..d29d6641624 100644
--- a/chromium/media/formats/webm/webm_stream_parser_unittest.cc
+++ b/chromium/media/formats/webm/webm_stream_parser_unittest.cc
@@ -176,12 +176,12 @@ TEST_F(WebMStreamParserTest, ColourElement) {
gfx::ColorSpace::RangeID::FULL);
EXPECT_EQ(video_config.color_space_info(), expected_color_space);
- base::Optional<gl::HDRMetadata> hdr_metadata = video_config.hdr_metadata();
+ base::Optional<gfx::HDRMetadata> hdr_metadata = video_config.hdr_metadata();
EXPECT_TRUE(hdr_metadata.has_value());
EXPECT_EQ(hdr_metadata->max_content_light_level, 11u);
EXPECT_EQ(hdr_metadata->max_frame_average_light_level, 12u);
- const gl::MasteringMetadata& mmdata = hdr_metadata->mastering_metadata;
+ const gfx::MasteringMetadata& mmdata = hdr_metadata->mastering_metadata;
EXPECT_FLOAT_EQ(mmdata.primary_r.x(), 0.1f);
EXPECT_FLOAT_EQ(mmdata.primary_r.y(), 0.2f);
EXPECT_FLOAT_EQ(mmdata.primary_g.x(), 0.1f);
diff --git a/chromium/media/fuchsia/OWNERS b/chromium/media/fuchsia/OWNERS
index fa1a1eb92fd..a72652c8a16 100644
--- a/chromium/media/fuchsia/OWNERS
+++ b/chromium/media/fuchsia/OWNERS
@@ -1,6 +1,3 @@
sergeyu@chromium.org
-yucliu@chromium.org
wez@chromium.org
-
-per-file *.cmx=set noparent
-per-file *.cmx=file://fuchsia/SECURITY_OWNERS
+yucliu@chromium.org
diff --git a/chromium/media/fuchsia/audio/BUILD.gn b/chromium/media/fuchsia/audio/BUILD.gn
index b455c993917..eb547274da2 100644
--- a/chromium/media/fuchsia/audio/BUILD.gn
+++ b/chromium/media/fuchsia/audio/BUILD.gn
@@ -15,6 +15,8 @@ source_set("audio") {
sources = [
"fuchsia_audio_capturer_source.cc",
"fuchsia_audio_capturer_source.h",
+ "fuchsia_audio_output_device.cc",
+ "fuchsia_audio_output_device.h",
"fuchsia_audio_renderer.cc",
"fuchsia_audio_renderer.h",
]
@@ -39,6 +41,7 @@ source_set("unittests") {
deps = [
":audio",
+ ":test_support",
"//base",
"//base/test:test_support",
"//media",
@@ -46,5 +49,8 @@ source_set("unittests") {
"//third_party/fuchsia-sdk/sdk/fidl/fuchsia.media.audio",
]
- sources = [ "fuchsia_audio_capturer_source_test.cc" ]
+ sources = [
+ "fuchsia_audio_capturer_source_test.cc",
+ "fuchsia_audio_output_device_test.cc",
+ ]
}
diff --git a/chromium/media/fuchsia/audio/fake_audio_consumer.cc b/chromium/media/fuchsia/audio/fake_audio_consumer.cc
index f8368980332..3babfc3df0f 100644
--- a/chromium/media/fuchsia/audio/fake_audio_consumer.cc
+++ b/chromium/media/fuchsia/audio/fake_audio_consumer.cc
@@ -11,16 +11,10 @@
namespace media {
-namespace {
-
-// Lead time range returned from WatchStatus();
-constexpr base::TimeDelta kMinLeadTime = base::TimeDelta::FromMilliseconds(100);
-constexpr base::TimeDelta kMaxLeadTime = base::TimeDelta::FromMilliseconds(500);
-
-} // namespace
-
-// Buffering delay.
-constexpr base::TimeDelta kBufferDelay = base::TimeDelta::FromMilliseconds(30);
+const base::TimeDelta FakeAudioConsumer::kMinLeadTime =
+ base::TimeDelta::FromMilliseconds(100);
+const base::TimeDelta FakeAudioConsumer::kMaxLeadTime =
+ base::TimeDelta::FromMilliseconds(500);
FakeAudioConsumer::FakeAudioConsumer(
uint64_t session_id,
@@ -61,7 +55,7 @@ void FakeAudioConsumer::Start(fuchsia::media::AudioConsumerStartFlags flags,
if (reference_time != fuchsia::media::NO_TIMESTAMP) {
reference_time_ = base::TimeTicks::FromZxTime(reference_time);
} else {
- reference_time_ = base::TimeTicks::Now() + kBufferDelay;
+ reference_time_ = base::TimeTicks::Now() + kMinLeadTime;
}
if (media_time != fuchsia::media::NO_TIMESTAMP) {
@@ -79,7 +73,7 @@ void FakeAudioConsumer::Start(fuchsia::media::AudioConsumerStartFlags flags,
}
void FakeAudioConsumer::Stop() {
- CHECK(state_ != State::kPlaying);
+ CHECK(state_ != State::kStopped);
state_ = State::kStopped;
OnStatusUpdate();
diff --git a/chromium/media/fuchsia/audio/fake_audio_consumer.h b/chromium/media/fuchsia/audio/fake_audio_consumer.h
index 56615269cbd..c42469a0e90 100644
--- a/chromium/media/fuchsia/audio/fake_audio_consumer.h
+++ b/chromium/media/fuchsia/audio/fake_audio_consumer.h
@@ -30,6 +30,10 @@ class FakeAudioConsumer
public fuchsia::media::testing::StreamSink_TestBase,
public fuchsia::media::audio::testing::VolumeControl_TestBase {
public:
+ // Lead time range returned from WatchStatus().
+ static const base::TimeDelta kMinLeadTime;
+ static const base::TimeDelta kMaxLeadTime;
+
FakeAudioConsumer(
uint64_t session_id,
fidl::InterfaceRequest<fuchsia::media::AudioConsumer> request);
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc
index 38ec54121b3..80c28b2a6e3 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_capturer_source.cc
@@ -95,8 +95,8 @@ void FuchsiaAudioCapturerSource::Initialize(const AudioParameters& params,
// Map the buffer.
uint64_t addr;
status = zx::vmar::root_self()->map(
- /*vmar_offset=*/0, buffer_vmo, /*vmo_offset=*/0, capture_buffer_size_,
- ZX_VM_PERM_READ, &addr);
+ ZX_VM_PERM_READ, /*vmar_offset=*/0, buffer_vmo, /*vmo_offset=*/0,
+ capture_buffer_size_, &addr);
if (status != ZX_OK) {
ZX_DLOG(ERROR, status) << "zx_vmar_map";
ReportError("Failed to map capture buffer");
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc b/chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc
new file mode 100644
index 00000000000..50429b46f06
--- /dev/null
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_output_device.cc
@@ -0,0 +1,492 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/fuchsia/audio/fuchsia_audio_output_device.h"
+
+#include "base/fuchsia/fuchsia_logging.h"
+#include "base/logging.h"
+#include "base/memory/shared_memory_mapping.h"
+#include "base/memory/writable_shared_memory_region.h"
+#include "base/no_destructor.h"
+#include "base/strings/stringprintf.h"
+#include "base/threading/thread.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/audio_timestamp_helper.h"
+
+namespace media {
+
+namespace {
+
+// Total number of buffers used for AudioConsumer.
+constexpr size_t kNumBuffers = 4;
+
+// Extra lead time added to min_lead_time reported by AudioConsumer when
+// scheduling PumpSamples() timer. This is necessary to make it more likely
+// that each packet is sent on time, even if the timer is delayed. Higher values
+// increase playback latency, but make underflow less likely. 20ms allows to
+// keep latency reasonably low, while making playback reliable under normal
+// conditions.
+//
+// TODO(crbug.com/1153909): It may be possible to reduce this value to reduce
+// total latency, but that requires that an elevated scheduling profile is
+// applied to this thread.
+constexpr base::TimeDelta kLeadTimeExtra =
+ base::TimeDelta::FromMilliseconds(20);
+
+class DefaultAudioThread {
+ public:
+ DefaultAudioThread() : thread_("FuchsiaAudioOutputDevice") {
+ // TODO(crbug.com/1153909): Consider applying media-specific scheduling
+ // policy to the thread.
+ thread_.StartWithOptions(
+ base::Thread::Options(base::MessagePumpType::IO, 0));
+ }
+ ~DefaultAudioThread() = default;
+
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner() {
+ return thread_.task_runner();
+ }
+
+ private:
+ base::Thread thread_;
+};
+
+scoped_refptr<base::SingleThreadTaskRunner> GetDefaultAudioTaskRunner() {
+ static base::NoDestructor<DefaultAudioThread> default_audio_thread;
+ return default_audio_thread->task_runner();
+}
+
+} // namespace
+
+// static
+scoped_refptr<FuchsiaAudioOutputDevice> FuchsiaAudioOutputDevice::Create(
+ fidl::InterfaceHandle<fuchsia::media::AudioConsumer> audio_consumer_handle,
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
+ scoped_refptr<FuchsiaAudioOutputDevice> result(
+ new FuchsiaAudioOutputDevice(task_runner));
+ task_runner->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::BindAudioConsumerOnAudioThread,
+ result, std::move(audio_consumer_handle)));
+ return result;
+}
+
+// static
+scoped_refptr<FuchsiaAudioOutputDevice>
+FuchsiaAudioOutputDevice::CreateOnDefaultThread(
+ fidl::InterfaceHandle<fuchsia::media::AudioConsumer>
+ audio_consumer_handle) {
+ return Create(std::move(audio_consumer_handle), GetDefaultAudioTaskRunner());
+}
+
+FuchsiaAudioOutputDevice::FuchsiaAudioOutputDevice(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner)
+ : task_runner_(std::move(task_runner)) {}
+
+FuchsiaAudioOutputDevice::~FuchsiaAudioOutputDevice() = default;
+
+void FuchsiaAudioOutputDevice::Initialize(const AudioParameters& params,
+ RenderCallback* callback) {
+ DCHECK(callback);
+
+ // Save |callback| synchronously here to handle the case when Stop() is called
+ // before the DoInitialize() task is processed.
+ {
+ base::AutoLock auto_lock(callback_lock_);
+ DCHECK(!callback_);
+ callback_ = callback;
+ }
+
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::InitializeOnAudioThread, this,
+ params));
+}
+
+void FuchsiaAudioOutputDevice::Start() {
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::StartOnAudioThread, this));
+}
+
+void FuchsiaAudioOutputDevice::Stop() {
+ {
+ base::AutoLock auto_lock(callback_lock_);
+ callback_ = nullptr;
+ }
+
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::StopOnAudioThread, this));
+}
+
+void FuchsiaAudioOutputDevice::Pause() {
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::PauseOnAudioThread, this));
+}
+
+void FuchsiaAudioOutputDevice::Play() {
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::PlayOnAudioThread, this));
+}
+
+void FuchsiaAudioOutputDevice::Flush() {
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::FlushOnAudioThread, this));
+}
+
+bool FuchsiaAudioOutputDevice::SetVolume(double volume) {
+ task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(&FuchsiaAudioOutputDevice::SetVolumeOnAudioThread, this,
+ volume));
+ return true;
+}
+
+OutputDeviceInfo FuchsiaAudioOutputDevice::GetOutputDeviceInfo() {
+ // AudioConsumer doesn't provider any information about the output device.
+ //
+ // TODO(crbug.com/852834): Update this method when that functionality is
+ // implemented.
+ return OutputDeviceInfo(
+ std::string(), OUTPUT_DEVICE_STATUS_OK,
+ AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY,
+ CHANNEL_LAYOUT_STEREO, 48000, 480));
+}
+
+void FuchsiaAudioOutputDevice::GetOutputDeviceInfoAsync(
+ OutputDeviceInfoCB info_cb) {
+ std::move(info_cb).Run(GetOutputDeviceInfo());
+}
+
+bool FuchsiaAudioOutputDevice::IsOptimizedForHardwareParameters() {
+ // AudioConsumer doesn't provide device parameters (since target device may
+ // change).
+ return false;
+}
+
+bool FuchsiaAudioOutputDevice::CurrentThreadIsRenderingThread() {
+ return task_runner_->BelongsToCurrentThread();
+}
+
+void FuchsiaAudioOutputDevice::BindAudioConsumerOnAudioThread(
+ fidl::InterfaceHandle<fuchsia::media::AudioConsumer>
+ audio_consumer_handle) {
+ DCHECK(CurrentThreadIsRenderingThread());
+ DCHECK(!audio_consumer_);
+
+ audio_consumer_.Bind(std::move(audio_consumer_handle));
+ audio_consumer_.set_error_handler([this](zx_status_t status) {
+ ZX_LOG(ERROR, status) << "AudioConsumer disconnected.";
+ ReportError();
+ });
+}
+
+void FuchsiaAudioOutputDevice::InitializeOnAudioThread(
+ const AudioParameters& params) {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ params_ = params;
+ audio_bus_ = AudioBus::Create(params_);
+
+ UpdateVolume();
+
+ WatchAudioConsumerStatus();
+}
+
+void FuchsiaAudioOutputDevice::StartOnAudioThread() {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ if (!audio_consumer_)
+ return;
+
+ CreateStreamSink();
+
+ media_pos_frames_ = 0;
+ audio_consumer_->Start(fuchsia::media::AudioConsumerStartFlags::LOW_LATENCY,
+ fuchsia::media::NO_TIMESTAMP, 0);
+
+ // When AudioConsumer handles the Start() message sent above, it will update
+ // its state and sent WatchStatus() response. OnAudioConsumerStatusChanged()
+ // will then call SchedulePumpSamples() to start sending audio packets.
+}
+
+void FuchsiaAudioOutputDevice::StopOnAudioThread() {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ if (!audio_consumer_)
+ return;
+
+ audio_consumer_->Stop();
+ pump_samples_timer_.Stop();
+
+ audio_consumer_.Unbind();
+ stream_sink_.Unbind();
+ volume_control_.Unbind();
+}
+
+void FuchsiaAudioOutputDevice::PauseOnAudioThread() {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ if (!audio_consumer_)
+ return;
+
+ paused_ = true;
+ audio_consumer_->SetRate(0.0);
+ pump_samples_timer_.Stop();
+}
+
+void FuchsiaAudioOutputDevice::PlayOnAudioThread() {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ if (!audio_consumer_)
+ return;
+
+ paused_ = false;
+ audio_consumer_->SetRate(1.0);
+}
+
+void FuchsiaAudioOutputDevice::FlushOnAudioThread() {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ if (!stream_sink_)
+ return;
+
+ stream_sink_->DiscardAllPacketsNoReply();
+}
+
+void FuchsiaAudioOutputDevice::SetVolumeOnAudioThread(double volume) {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ volume_ = volume;
+ if (audio_consumer_)
+ UpdateVolume();
+}
+
+void FuchsiaAudioOutputDevice::CreateStreamSink() {
+ DCHECK(CurrentThreadIsRenderingThread());
+ DCHECK(audio_consumer_);
+
+ // Allocate buffers for the StreamSink.
+ size_t buffer_size = params_.GetBytesPerBuffer(kSampleFormatF32);
+ stream_sink_buffers_.reserve(kNumBuffers);
+ available_buffers_indices_.clear();
+ std::vector<zx::vmo> vmos_for_stream_sink;
+ vmos_for_stream_sink.reserve(kNumBuffers);
+ for (size_t i = 0; i < kNumBuffers; ++i) {
+ auto region = base::WritableSharedMemoryRegion::Create(buffer_size);
+ auto mapping = region.Map();
+ if (!mapping.IsValid()) {
+ LOG(WARNING) << "Failed to allocate VMO of size " << buffer_size;
+ ReportError();
+ return;
+ }
+ stream_sink_buffers_.push_back(std::move(mapping));
+ available_buffers_indices_.push_back(i);
+
+ auto read_only_region =
+ base::WritableSharedMemoryRegion::ConvertToReadOnly(std::move(region));
+
+ vmos_for_stream_sink.push_back(
+ base::ReadOnlySharedMemoryRegion::TakeHandleForSerialization(
+ std::move(read_only_region))
+ .PassPlatformHandle());
+ }
+
+ // Configure StreamSink.
+ fuchsia::media::AudioStreamType stream_type;
+ stream_type.channels = params_.channels();
+ stream_type.frames_per_second = params_.sample_rate();
+ stream_type.sample_format = fuchsia::media::AudioSampleFormat::FLOAT;
+ audio_consumer_->CreateStreamSink(std::move(vmos_for_stream_sink),
+ std::move(stream_type), nullptr,
+ stream_sink_.NewRequest());
+ stream_sink_.set_error_handler([this](zx_status_t status) {
+ ZX_LOG(ERROR, status) << "StreamSink disconnected.";
+ ReportError();
+ });
+}
+
+void FuchsiaAudioOutputDevice::UpdateVolume() {
+ DCHECK(CurrentThreadIsRenderingThread());
+ DCHECK(audio_consumer_);
+ if (!volume_control_) {
+ audio_consumer_->BindVolumeControl(volume_control_.NewRequest());
+ volume_control_.set_error_handler([](zx_status_t status) {
+ ZX_LOG(ERROR, status) << "VolumeControl disconnected.";
+ });
+ }
+ volume_control_->SetVolume(volume_);
+}
+
+void FuchsiaAudioOutputDevice::WatchAudioConsumerStatus() {
+ DCHECK(CurrentThreadIsRenderingThread());
+ audio_consumer_->WatchStatus(fit::bind_member(
+ this, &FuchsiaAudioOutputDevice::OnAudioConsumerStatusChanged));
+}
+
+void FuchsiaAudioOutputDevice::OnAudioConsumerStatusChanged(
+ fuchsia::media::AudioConsumerStatus status) {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ if (!status.has_min_lead_time()) {
+ DLOG(ERROR) << "AudioConsumerStatus.min_lead_time isn't set.";
+ ReportError();
+ return;
+ }
+
+ min_lead_time_ = base::TimeDelta::FromNanoseconds(status.min_lead_time());
+
+ if (status.has_presentation_timeline()) {
+ timeline_reference_time_ = base::TimeTicks::FromZxTime(
+ status.presentation_timeline().reference_time);
+ timeline_subject_time_ = base::TimeDelta::FromNanoseconds(
+ status.presentation_timeline().subject_time);
+ timeline_reference_delta_ = status.presentation_timeline().reference_delta;
+ timeline_subject_delta_ = status.presentation_timeline().subject_delta;
+ } else {
+ // Reset |timeline_reference_time_| to null value, which is used to indicate
+ // that there is no presentation timeline.
+ timeline_reference_time_ = base::TimeTicks();
+ }
+
+ // Reschedule the timer for the new timeline.
+ pump_samples_timer_.Stop();
+ SchedulePumpSamples();
+
+ WatchAudioConsumerStatus();
+}
+
+void FuchsiaAudioOutputDevice::SchedulePumpSamples() {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ if (paused_ || timeline_reference_time_.is_null() ||
+ pump_samples_timer_.IsRunning() || available_buffers_indices_.empty()) {
+ return;
+ }
+
+ // Current position in the stream.
+ auto media_pos = AudioTimestampHelper::FramesToTime(media_pos_frames_,
+ params_.sample_rate());
+
+ // Calculate expected playback time for the next sample based on the
+ // presentation timeline provided by the AudioConsumer.
+ // See https://fuchsia.dev/reference/fidl/fuchsia.media#formulas .
+ // AudioConsumer uses monotonic clock (aka base::TimeTicks) as a reference
+ // timeline. Subject timeline corresponds to position within the stream, which
+ // is stored as |media_pos_frames_| and then passed in the |pts| field in each
+ // packet produced in PumpSamples().
+ auto playback_time = timeline_reference_time_ +
+ (media_pos - timeline_subject_time_) *
+ timeline_reference_delta_ / timeline_subject_delta_;
+
+ base::TimeTicks now = base::TimeTicks::Now();
+
+ int skipped_frames = 0;
+
+ // Target time for when PumpSamples() should run.
+ base::TimeTicks target_time = playback_time - min_lead_time_ - kLeadTimeExtra;
+
+ // Check if it's too late to send the next packet. If it is, then advance
+ // current stream position, adding kLeadTimeExtra to ensure the next packet
+ // doesn't miss the deadline.
+ auto lead_time = playback_time - now;
+ if (lead_time < min_lead_time_) {
+ auto new_playback_time = now + min_lead_time_ + kLeadTimeExtra;
+ auto skipped_time = new_playback_time - playback_time;
+ skipped_frames =
+ AudioTimestampHelper::TimeToFrames(skipped_time, params_.sample_rate());
+ media_pos_frames_ += skipped_frames;
+ target_time = now;
+ playback_time += skipped_time;
+ }
+
+ base::TimeDelta delay = target_time - now;
+ pump_samples_timer_.Start(
+ FROM_HERE, delay,
+ base::BindOnce(&FuchsiaAudioOutputDevice::PumpSamples, this,
+ playback_time, skipped_frames));
+}
+
+void FuchsiaAudioOutputDevice::PumpSamples(base::TimeTicks playback_time,
+ int frames_skipped) {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ auto now = base::TimeTicks::Now();
+
+ // Check if the timer has missed the deadline. It doesn't make sense to try
+ // sending the packet in that case (it's likely to arrive too late).
+ // Reschedule the timer. In this case SchedulePumpSamples() is expected to
+ // schedule PumpSamples() to run immediately with frames_skipped > 0.
+ auto lead_time = playback_time - now;
+ if (lead_time < min_lead_time_) {
+ SchedulePumpSamples();
+ return;
+ }
+
+ int frames_filled;
+ {
+ base::AutoLock auto_lock(callback_lock_);
+
+ // |callback_| may be reset in Stop(). No need to keep rendering the stream
+ // in that case.
+ if (!callback_)
+ return;
+
+ frames_filled = callback_->Render(playback_time - now, now, frames_skipped,
+ audio_bus_.get());
+ }
+
+ if (frames_filled) {
+ DCHECK(!available_buffers_indices_.empty());
+ int buffer_index = available_buffers_indices_.back();
+ available_buffers_indices_.pop_back();
+
+ audio_bus_->ToInterleaved<Float32SampleTypeTraitsNoClip>(
+ frames_filled,
+ static_cast<float*>(stream_sink_buffers_[buffer_index].memory()));
+
+ fuchsia::media::StreamPacket packet;
+ packet.payload_buffer_id = buffer_index;
+ packet.pts = AudioTimestampHelper::FramesToTime(media_pos_frames_,
+ params_.sample_rate())
+ .InNanoseconds();
+ packet.payload_offset = 0;
+ packet.payload_size = frames_filled * sizeof(float) * params_.channels();
+
+ stream_sink_->SendPacket(std::move(packet), [this, buffer_index]() {
+ OnStreamSendDone(buffer_index);
+ });
+
+ media_pos_frames_ += frames_filled;
+ }
+
+ SchedulePumpSamples();
+}
+
+void FuchsiaAudioOutputDevice::OnStreamSendDone(size_t buffer_index) {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ available_buffers_indices_.push_back(buffer_index);
+ SchedulePumpSamples();
+}
+
+void FuchsiaAudioOutputDevice::ReportError() {
+ DCHECK(CurrentThreadIsRenderingThread());
+
+ audio_consumer_.Unbind();
+ stream_sink_.Unbind();
+ volume_control_.Unbind();
+ pump_samples_timer_.Stop();
+ {
+ base::AutoLock auto_lock(callback_lock_);
+ if (callback_)
+ callback_->OnRenderError();
+ }
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_output_device.h b/chromium/media/fuchsia/audio/fuchsia_audio_output_device.h
new file mode 100644
index 00000000000..a08617ade56
--- /dev/null
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_output_device.h
@@ -0,0 +1,169 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_FUCHSIA_AUDIO_FUCHSIA_AUDIO_OUTPUT_DEVICE_H_
+#define MEDIA_FUCHSIA_AUDIO_FUCHSIA_AUDIO_OUTPUT_DEVICE_H_
+
+#include <fuchsia/media/cpp/fidl.h>
+
+#include <memory>
+#include <vector>
+
+#include "base/memory/ref_counted.h"
+#include "base/memory/shared_memory_mapping.h"
+#include "base/synchronization/lock.h"
+#include "base/time/time.h"
+#include "base/timer/timer.h"
+#include "media/base/audio_renderer_sink.h"
+
+namespace base {
+class SingleThreadTaskRunner;
+class WritableSharedMemoryMapping;
+} // namespace base
+
+namespace media {
+
+// AudioRendererSink implementation for Fuchsia. It sends audio to AudioConsumer
+// provided by the OS. Unlike AudioOutputDevice (used by default on other
+// platforms) this class sends to the system directly from the renderer process,
+// without additional IPC layer to the audio service.
+// All work is performed on the TaskRunner passed to Create(). It must be an IO
+// thread to allow FIDL usage. AudioRendererSink can be used on a different
+// thread.
+class FuchsiaAudioOutputDevice : public AudioRendererSink {
+ public:
+ static scoped_refptr<FuchsiaAudioOutputDevice> Create(
+ fidl::InterfaceHandle<fuchsia::media::AudioConsumer>
+ audio_consumer_handle,
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+
+ // Same as above, but creates a FuchsiaAudioOutputDevice that runs on the
+ // default audio thread.
+ static scoped_refptr<FuchsiaAudioOutputDevice> CreateOnDefaultThread(
+ fidl::InterfaceHandle<fuchsia::media::AudioConsumer>
+ audio_consumer_handle);
+
+ // AudioRendererSink implementation.
+ void Initialize(const AudioParameters& params,
+ RenderCallback* callback) override;
+ void Start() override;
+ void Stop() override;
+ void Pause() override;
+ void Play() override;
+ void Flush() override;
+ bool SetVolume(double volume) override;
+ OutputDeviceInfo GetOutputDeviceInfo() override;
+ void GetOutputDeviceInfoAsync(OutputDeviceInfoCB info_cb) override;
+ bool IsOptimizedForHardwareParameters() override;
+ bool CurrentThreadIsRenderingThread() override;
+
+ private:
+ friend class FuchsiaAudioOutputDeviceTest;
+
+ explicit FuchsiaAudioOutputDevice(
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner);
+ ~FuchsiaAudioOutputDevice() override;
+
+ void BindAudioConsumerOnAudioThread(
+ fidl::InterfaceHandle<fuchsia::media::AudioConsumer>
+ audio_consumer_handle);
+
+ // AudioRendererSink handlers for the audio thread.
+ void InitializeOnAudioThread(const AudioParameters& params);
+ void StartOnAudioThread();
+ void StopOnAudioThread();
+ void PauseOnAudioThread();
+ void PlayOnAudioThread();
+ void FlushOnAudioThread();
+ void SetVolumeOnAudioThread(double volume);
+
+ // Initializes |stream_sink_|.
+ void CreateStreamSink();
+
+ // Sends current volume to |volume_control_|.
+ void UpdateVolume();
+
+ // Polls current |audio_consumer_| status.
+ void WatchAudioConsumerStatus();
+
+ // Callback for AudioConsumer::WatchStatus().
+ void OnAudioConsumerStatusChanged(fuchsia::media::AudioConsumerStatus status);
+
+ // Schedules next PumpSamples() to pump next audio packet.
+ void SchedulePumpSamples();
+
+ // Pumps a single packet to AudioConsumer and calls SchedulePumpSamples() to
+ // pump the next packet.
+ void PumpSamples(base::TimeTicks playback_time, int frames_skipped);
+
+ // Callback for StreamSink::SendPacket().
+ void OnStreamSendDone(size_t buffer_index);
+
+ // Reports an error and shuts down the AudioConsumer connection.
+ void ReportError();
+
+ // Task runner used for all activity. Normally this is the audio thread owned
+ // by FuchsiaAudioDeviceFactory. All AudioRendererSink methods are called on
+ // another thread (normally the main renderer thread on which this object is
+ // created).
+ scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+
+ fuchsia::media::AudioConsumerPtr audio_consumer_;
+ fuchsia::media::StreamSinkPtr stream_sink_;
+ fuchsia::media::audio::VolumeControlPtr volume_control_;
+
+ AudioParameters params_;
+
+ // Lock used to control access to |callback_|.
+ base::Lock callback_lock_;
+
+ // Callback passed to Initialize(). It's set on the main thread (that calls
+ // Initialize() and Stop()), but used on the audio thread (which corresponds
+ // to the |task_runner_|). This is necessary because AudioRendererSink must
+ // guarantee that the callback is not called after Stop(). |callback_lock_| is
+ // used to synchronize access to the |callback_|.
+ RenderCallback* callback_ GUARDED_BY(callback_lock_) = nullptr;
+
+ // Mapped memory for buffers shared with |stream_sink_|.
+ std::vector<base::WritableSharedMemoryMapping> stream_sink_buffers_;
+
+ // Indices of unused buffers in |stream_sink_buffers_|.
+ std::vector<size_t> available_buffers_indices_;
+
+ float volume_ = 1.0;
+
+ // Current position in the stream in samples since the stream was started.
+ size_t media_pos_frames_ = 0;
+
+ // Current minimum lead time returned by the |audio_consumer_|.
+ base::TimeDelta min_lead_time_;
+
+ // Current timeline parameters provided by the |audio_consumer_| in the last
+ // AudioConsumerStatus. See
+ // https://fuchsia.dev/reference/fidl/fuchsia.media#TimelineFunction for
+ // details on how these parameters are used. |timeline_reference_time_| is set
+ // to null value when there is no presentation timeline (i.e. playback isn't
+ // active).
+ base::TimeTicks timeline_reference_time_;
+ base::TimeDelta timeline_subject_time_;
+ uint32_t timeline_reference_delta_;
+ uint32_t timeline_subject_delta_;
+
+ // Set to true between DoPause() and DoPlay(). AudioConsumer implementations
+ // should drop |presentation_timeline| when the stream is paused, but the
+ // state is updated asynchronously. This flag is used to avoid sending packets
+ // until the state is updated.
+ bool paused_ = false;
+
+ // Timer for PumpSamples().
+ base::OneShotTimer pump_samples_timer_;
+
+ // AudioBus used in PumpSamples(). Stored here to avoid re-allocating it for
+ // every packet.
+ std::unique_ptr<AudioBus> audio_bus_;
+};
+
+} // namespace media
+
+#endif // MEDIA_FUCHSIA_AUDIO_FUCHSIA_AUDIO_OUTPUT_DEVICE_H_
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc b/chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc
new file mode 100644
index 00000000000..d058dc76f68
--- /dev/null
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_output_device_test.cc
@@ -0,0 +1,228 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/fuchsia/audio/fuchsia_audio_output_device.h"
+
+#include "base/logging.h"
+#include "base/memory/ref_counted.h"
+#include "base/test/task_environment.h"
+#include "base/threading/thread.h"
+#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/audio_renderer_sink.h"
+#include "media/base/audio_timestamp_helper.h"
+#include "media/fuchsia/audio/fake_audio_consumer.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+constexpr int kSampleRate = 44100;
+constexpr ChannelLayout kChannelLayout = CHANNEL_LAYOUT_STEREO;
+constexpr int kNumChannels = 2;
+constexpr uint64_t kTestSessionId = 42;
+constexpr base::TimeDelta kPeriod = base::TimeDelta::FromMilliseconds(10);
+constexpr int kFramesPerPeriod = 441;
+
+class TestRenderer : public AudioRendererSink::RenderCallback {
+ public:
+ TestRenderer() = default;
+ ~TestRenderer() override = default;
+
+ // AudioRendererSink::Renderer interface.
+ int Render(base::TimeDelta delay,
+ base::TimeTicks delay_timestamp,
+ int prior_frames_skipped,
+ AudioBus* dest) override {
+ EXPECT_EQ(dest->channels(), kNumChannels);
+ frames_skipped_ += prior_frames_skipped;
+ frames_rendered_ += dest->frames();
+
+ EXPECT_GT(delay, base::TimeDelta());
+ auto presentation_time = delay_timestamp + delay;
+ EXPECT_GT(presentation_time, last_presentation_time_);
+ last_presentation_time_ = presentation_time;
+
+ return dest->frames();
+ }
+ void OnRenderError() override { num_render_errors_++; }
+
+ int frames_rendered() const { return frames_rendered_; }
+ void reset_frames_rendered() { frames_rendered_ = 0; }
+
+ int frames_skipped() const { return frames_skipped_; }
+ int num_render_errors() const { return num_render_errors_; }
+
+ base::TimeTicks last_presentation_time() const {
+ return last_presentation_time_;
+ }
+
+ private:
+ int frames_rendered_ = 0;
+ int frames_skipped_ = 0;
+ int num_render_errors_ = 0;
+ base::TimeTicks last_presentation_time_;
+};
+
+class FuchsiaAudioOutputDeviceTest : public testing::Test {
+ public:
+ FuchsiaAudioOutputDeviceTest() {
+ fidl::InterfaceHandle<fuchsia::media::AudioConsumer> audio_consumer;
+ fake_audio_consumer_ = std::make_unique<FakeAudioConsumer>(
+ kTestSessionId, audio_consumer.NewRequest());
+
+ output_device_ = FuchsiaAudioOutputDevice::Create(
+ std::move(audio_consumer), base::ThreadTaskRunnerHandle::Get());
+ }
+
+ ~FuchsiaAudioOutputDeviceTest() override { output_device_->Stop(); }
+
+ protected:
+ void Initialize() {
+ output_device_->Initialize(
+ AudioParameters(AudioParameters::AUDIO_PCM_LOW_LATENCY, kChannelLayout,
+ kSampleRate, kFramesPerPeriod),
+ &renderer_);
+
+ task_environment_.RunUntilIdle();
+ EXPECT_EQ(renderer_.frames_rendered(), 0);
+ }
+
+ void InitializeAndStart() {
+ Initialize();
+
+ // As soon as Start() is processed FuchsiaAudioOutputDevice is expected to
+ // start rendering some samples.
+ output_device_->Start();
+ task_environment_.RunUntilIdle();
+ EXPECT_GT(renderer_.frames_rendered(), 0);
+ }
+
+ void CallPumpSamples() {
+ output_device_->PumpSamples(
+ base::TimeTicks::Now() + base::TimeDelta::FromMilliseconds(200), 0);
+ }
+
+ void ValidatePresentationTime() {
+ // Verify that the current renderer lead time is in the
+ // [min_lead_time, min_lead_time + 30ms] range. 30ms is chosen to allow
+ // FuchsiaAudioOutputDevice to pre-render slightely ahead of the target
+ // time, while keeping latency reasonably low.
+ auto lead_time =
+ renderer_.last_presentation_time() - base::TimeTicks::Now();
+ EXPECT_GT(lead_time, FakeAudioConsumer::kMinLeadTime);
+ EXPECT_LT(lead_time, FakeAudioConsumer::kMinLeadTime +
+ base::TimeDelta::FromMilliseconds(30));
+ }
+
+ base::test::SingleThreadTaskEnvironment task_environment_{
+ base::test::SingleThreadTaskEnvironment::MainThreadType::IO,
+ base::test::TaskEnvironment::TimeSource::MOCK_TIME};
+
+ std::unique_ptr<FakeAudioConsumer> fake_audio_consumer_;
+ TestRenderer renderer_;
+ scoped_refptr<FuchsiaAudioOutputDevice> output_device_;
+};
+
+TEST_F(FuchsiaAudioOutputDeviceTest, Start) {
+ Initialize();
+
+ // Verify that playback doesn't start before Start().
+ task_environment_.FastForwardBy(base::TimeDelta::FromSeconds(2));
+ EXPECT_EQ(renderer_.frames_rendered(), 0);
+
+ // Rendering should start after Start().
+ output_device_->Start();
+ task_environment_.RunUntilIdle();
+ EXPECT_GT(renderer_.frames_rendered(), 0);
+
+ ValidatePresentationTime();
+}
+
+TEST_F(FuchsiaAudioOutputDeviceTest, StartAndPlay) {
+ InitializeAndStart();
+
+ renderer_.reset_frames_rendered();
+
+ // Try advancing time and verify that FuchsiaAudioOutputDevice keeps calling
+ // Render().
+ for (int i = 0; i < 3; ++i) {
+ task_environment_.FastForwardBy(kPeriod);
+ EXPECT_EQ(renderer_.frames_rendered(), kFramesPerPeriod);
+ EXPECT_EQ(renderer_.frames_skipped(), 0);
+ renderer_.reset_frames_rendered();
+ }
+}
+
+TEST_F(FuchsiaAudioOutputDeviceTest, Pause) {
+ InitializeAndStart();
+
+ renderer_.reset_frames_rendered();
+
+ // Advancing time and verify that FuchsiaAudioOutputDevice keeps calling
+ // Render().
+ task_environment_.FastForwardBy(kPeriod);
+ EXPECT_EQ(renderer_.frames_rendered(), kFramesPerPeriod);
+ EXPECT_EQ(renderer_.frames_skipped(), 0);
+ renderer_.reset_frames_rendered();
+
+ // Render() should not be called while paused.
+ output_device_->Pause();
+ task_environment_.FastForwardBy(base::TimeDelta::FromSeconds(10));
+ EXPECT_EQ(renderer_.frames_rendered(), 0);
+
+ // Unpause the stream and verify that Render() is being called now.
+ output_device_->Play();
+ task_environment_.FastForwardBy(kPeriod);
+ EXPECT_GT(renderer_.frames_rendered(), 0);
+ EXPECT_EQ(renderer_.frames_skipped(), 0);
+}
+
+TEST_F(FuchsiaAudioOutputDeviceTest, Underflow) {
+ InitializeAndStart();
+
+ renderer_.reset_frames_rendered();
+
+ // Missing the timer once should not cause any issues. Timer tasks can't
+ // always run at the exact scheduled time. FuchsiaAudioOutputDevice should
+ // be resilient to small delays.
+ task_environment_.AdvanceClock(kPeriod * 2);
+ task_environment_.RunUntilIdle();
+ EXPECT_EQ(renderer_.frames_rendered(), kFramesPerPeriod * 2);
+ EXPECT_EQ(renderer_.frames_skipped(), 0);
+ renderer_.reset_frames_rendered();
+
+ // Advance time by 100ms, causing some frames to be skipped.
+ task_environment_.AdvanceClock(kPeriod * 10);
+ task_environment_.RunUntilIdle();
+ EXPECT_EQ(renderer_.frames_rendered(), kFramesPerPeriod);
+ EXPECT_EQ(renderer_.frames_skipped(), kFramesPerPeriod * 9);
+ renderer_.reset_frames_rendered();
+
+ ValidatePresentationTime();
+}
+
+TEST_F(FuchsiaAudioOutputDeviceTest, Error) {
+ InitializeAndStart();
+
+ renderer_.reset_frames_rendered();
+
+ fake_audio_consumer_.reset();
+ task_environment_.RunUntilIdle();
+
+ EXPECT_EQ(renderer_.num_render_errors(), 1);
+ EXPECT_EQ(renderer_.frames_rendered(), 0);
+}
+
+TEST_F(FuchsiaAudioOutputDeviceTest, Stop) {
+ InitializeAndStart();
+
+ renderer_.reset_frames_rendered();
+
+ // Call Stop() and then PumpSamples() immediately after that. The callback
+ // should not be called.
+ output_device_->Stop();
+ CallPumpSamples();
+ EXPECT_EQ(renderer_.frames_rendered(), 0);
+}
+
+} // namespace media
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
index 14a82f5b668..e71abab05ec 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.cc
@@ -113,6 +113,8 @@ void FuchsiaAudioRenderer::Initialize(DemuxerStream* stream,
OnError(AUDIO_RENDERER_ERROR);
});
+ UpdateVolume();
+
audio_consumer_.events().OnEndOfStream = [this]() { OnEndOfStream(); };
RequestAudioConsumerStatus();
@@ -145,6 +147,18 @@ void FuchsiaAudioRenderer::Initialize(DemuxerStream* stream,
std::move(init_cb_).Run(PIPELINE_OK);
}
+void FuchsiaAudioRenderer::UpdateVolume() {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+ DCHECK(audio_consumer_);
+ if (!volume_control_) {
+ audio_consumer_->BindVolumeControl(volume_control_.NewRequest());
+ volume_control_.set_error_handler([](zx_status_t status) {
+ ZX_LOG(ERROR, status) << "VolumeControl disconnected.";
+ });
+ }
+ volume_control_->SetVolume(volume_);
+}
+
void FuchsiaAudioRenderer::InitializeStreamSink(
const AudioDecoderConfig& config) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
@@ -228,13 +242,9 @@ void FuchsiaAudioRenderer::StartPlaying() {
void FuchsiaAudioRenderer::SetVolume(float volume) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- if (!volume_control_) {
- audio_consumer_->BindVolumeControl(volume_control_.NewRequest());
- volume_control_.set_error_handler([](zx_status_t status) {
- ZX_LOG(ERROR, status) << "VolumeControl disconnected.";
- });
- }
- volume_control_->SetVolume(volume);
+ volume_ = volume;
+ if (audio_consumer_)
+ UpdateVolume();
}
void FuchsiaAudioRenderer::SetLatencyHint(
@@ -275,14 +285,19 @@ void FuchsiaAudioRenderer::StopTicking() {
audio_consumer_->Stop();
base::AutoLock lock(timeline_lock_);
+ UpdateTimelineAfterStop();
SetPlaybackState(PlaybackState::kStopped);
- media_pos_ = CurrentMediaTimeLocked();
}
void FuchsiaAudioRenderer::SetPlaybackRate(double playback_rate) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
audio_consumer_->SetRate(playback_rate);
+
+ if (playback_rate == 0.0) {
+ base::AutoLock lock(timeline_lock_);
+ UpdateTimelineAfterStop();
+ }
}
void FuchsiaAudioRenderer::SetMediaTime(base::TimeDelta time) {
@@ -292,6 +307,12 @@ void FuchsiaAudioRenderer::SetMediaTime(base::TimeDelta time) {
{
base::AutoLock lock(timeline_lock_);
media_pos_ = time;
+
+ // Reset reference timestamp. This is necessary to ensure that the correct
+ // value is returned from GetWallClockTimes() until playback is resumed:
+ // the interface requires to return 0 wall clock between SetMediaTime() and
+ // StartTicking().
+ reference_time_ = base::TimeTicks();
}
FlushInternal();
@@ -300,10 +321,8 @@ void FuchsiaAudioRenderer::SetMediaTime(base::TimeDelta time) {
base::TimeDelta FuchsiaAudioRenderer::CurrentMediaTime() {
base::AutoLock lock(timeline_lock_);
- if (state_ != PlaybackState::kPlaying &&
- state_ != PlaybackState::kEndOfStream) {
+ if (!IsTimeMoving())
return media_pos_;
- }
return CurrentMediaTimeLocked();
}
@@ -316,12 +335,10 @@ bool FuchsiaAudioRenderer::GetWallClockTimes(
base::AutoLock lock(timeline_lock_);
- const bool is_time_moving = (state_ == PlaybackState::kPlaying ||
- state_ == PlaybackState::kEndOfStream) &&
- (media_delta_ > 0);
+ const bool is_time_moving = IsTimeMoving();
if (media_timestamps.empty()) {
- wall_clock_times->push_back(is_time_moving ? now : base::TimeTicks());
+ wall_clock_times->push_back(is_time_moving ? now : reference_time_);
return is_time_moving;
}
@@ -513,6 +530,12 @@ void FuchsiaAudioRenderer::OnDemuxerStreamReadDone(
SetPlaybackState(PlaybackState::kEndOfStream);
}
stream_sink_->EndOfStream();
+
+ // No more data is going to be biffered. Update buffering state to ensure
+ // RendererImpl starts playback in case it was waiting for buffering to
+ // finish.
+ SetBufferState(BUFFERING_HAVE_ENOUGH);
+
return;
}
@@ -603,7 +626,24 @@ void FuchsiaAudioRenderer::OnEndOfStream() {
client_->OnEnded();
}
+bool FuchsiaAudioRenderer::IsTimeMoving() {
+ return (state_ == PlaybackState::kPlaying ||
+ state_ == PlaybackState::kEndOfStream) &&
+ (media_delta_ > 0);
+}
+
+void FuchsiaAudioRenderer::UpdateTimelineAfterStop() {
+ if (!IsTimeMoving())
+ return;
+
+ media_pos_ = CurrentMediaTimeLocked();
+ reference_time_ = base::TimeTicks::Now();
+ media_delta_ = 0;
+}
+
base::TimeDelta FuchsiaAudioRenderer::CurrentMediaTimeLocked() {
+ DCHECK(IsTimeMoving());
+
// Calculate media position using formula specified by the TimelineFunction.
// See https://fuchsia.dev/reference/fidl/fuchsia.media#formulas .
return media_pos_ + (base::TimeTicks::Now() - reference_time_) *
diff --git a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
index 1a873caa0a2..faa75cc9ab9 100644
--- a/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
+++ b/chromium/media/fuchsia/audio/fuchsia_audio_renderer.h
@@ -85,6 +85,10 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
// Resets AudioConsumer and reports error to the |client_|.
void OnError(PipelineStatus Status);
+ // Connects |volume_control_|, if it hasn't been connected, and then sets
+ // |volume_|.
+ void UpdateVolume();
+
// Initializes |stream_sink_|. Called during initialization and every time
// configuration changes.
void InitializeStreamSink(const AudioDecoderConfig& config);
@@ -114,8 +118,18 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
// End-of-stream event handler for |audio_consumer_|.
void OnEndOfStream();
+ // Returns true if media clock is ticking and the rate is above 0.0.
+ bool IsTimeMoving() EXCLUSIVE_LOCKS_REQUIRED(timeline_lock_);
+
+ // Updates TimelineFunction parameters after StopTicking() or
+ // SetPlaybackRate(0.0). Normally these parameters are provided by
+ // AudioConsumer, but this happens asynchronously, while we need to make sure
+ // that StopTicking() and SetPlaybackRate(0.0) stop the media clock
+ // synchronously.
+ void UpdateTimelineAfterStop() EXCLUSIVE_LOCKS_REQUIRED(timeline_lock_);
+
// Calculates media position based on the TimelineFunction returned from
- // AudioConsumer.
+ // AudioConsumer. Should be called only when IsTimeMoving() is true.
base::TimeDelta CurrentMediaTimeLocked()
EXCLUSIVE_LOCKS_REQUIRED(timeline_lock_);
@@ -129,6 +143,8 @@ class FuchsiaAudioRenderer : public AudioRenderer, public TimeSource {
fuchsia::media::StreamSinkPtr stream_sink_;
fuchsia::media::audio::VolumeControlPtr volume_control_;
+ float volume_ = 1.0;
+
DemuxerStream* demuxer_stream_ = nullptr;
bool is_demuxer_read_pending_ = false;
bool drop_next_demuxer_read_result_ = false;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.cc b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
index 4c62e80c61e..fea4129883f 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.cc
@@ -24,6 +24,10 @@ namespace media {
namespace {
+// Audio packets are normally smaller than 128kB (more than enough for 2 seconds
+// at 320kb/s).
+const size_t kAudioStreamBufferSize = 128 * 1024;
+
std::string GetInitDataTypeName(EmeInitDataType type) {
switch (type) {
case EmeInitDataType::WEBM:
@@ -245,7 +249,7 @@ FuchsiaCdm::FuchsiaCdm(fuchsia::media::drm::ContentDecryptionModulePtr cdm,
: cdm_(std::move(cdm)),
ready_cb_(std::move(ready_cb)),
session_callbacks_(std::move(callbacks)),
- decryptor_(cdm_.get()) {
+ decryptor_(this) {
DCHECK(cdm_);
cdm_.events().OnProvisioned =
fit::bind_member(this, &FuchsiaCdm::OnProvisioned);
@@ -292,6 +296,18 @@ std::unique_ptr<FuchsiaSecureStreamDecryptor> FuchsiaCdm::CreateVideoDecryptor(
return decryptor;
}
+std::unique_ptr<FuchsiaClearStreamDecryptor>
+FuchsiaCdm::CreateAudioDecryptor() {
+ fuchsia::media::drm::DecryptorParams params;
+ params.set_require_secure_mode(false);
+ params.mutable_input_details()->set_format_details_version_ordinal(0);
+ fuchsia::media::StreamProcessorPtr stream_processor;
+ cdm_->CreateDecryptor(std::move(params), stream_processor.NewRequest());
+
+ return std::make_unique<FuchsiaClearStreamDecryptor>(
+ std::move(stream_processor), kAudioStreamBufferSize);
+}
+
void FuchsiaCdm::SetServerCertificate(
const std::vector<uint8_t>& certificate,
std::unique_ptr<SimpleCdmPromise> promise) {
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm.h b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
index 27b093e3744..c81dc1b1da7 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm.h
@@ -73,6 +73,7 @@ class FuchsiaCdm : public ContentDecryptionModule,
// FuchsiaCdmContext implementation:
std::unique_ptr<FuchsiaSecureStreamDecryptor> CreateVideoDecryptor(
FuchsiaSecureStreamDecryptor::Client* client) override;
+ std::unique_ptr<FuchsiaClearStreamDecryptor> CreateAudioDecryptor() override;
private:
class CdmSession;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_cdm_context.h b/chromium/media/fuchsia/cdm/fuchsia_cdm_context.h
index 9301cf657fc..667b78d71a4 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_cdm_context.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_cdm_context.h
@@ -17,6 +17,8 @@ class FuchsiaCdmContext {
// Creates FuchsiaSecureStreamDecryptor instance for the CDM context.
virtual std::unique_ptr<FuchsiaSecureStreamDecryptor> CreateVideoDecryptor(
FuchsiaSecureStreamDecryptor::Client* client) = 0;
+ virtual std::unique_ptr<FuchsiaClearStreamDecryptor>
+ CreateAudioDecryptor() = 0;
protected:
virtual ~FuchsiaCdmContext() = default;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc b/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc
index 09c44e6562b..6df4312e094 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_decryptor.cc
@@ -11,14 +11,14 @@
#include "base/threading/thread_task_runner_handle.h"
#include "media/base/decoder_buffer.h"
#include "media/base/video_frame.h"
+#include "media/fuchsia/cdm/fuchsia_cdm_context.h"
#include "media/fuchsia/cdm/fuchsia_stream_decryptor.h"
namespace media {
-FuchsiaDecryptor::FuchsiaDecryptor(
- fuchsia::media::drm::ContentDecryptionModule* cdm)
- : cdm_(cdm) {
- DCHECK(cdm_);
+FuchsiaDecryptor::FuchsiaDecryptor(FuchsiaCdmContext* cdm_context)
+ : cdm_context_(cdm_context) {
+ DCHECK(cdm_context_);
}
FuchsiaDecryptor::~FuchsiaDecryptor() {
@@ -38,7 +38,7 @@ void FuchsiaDecryptor::Decrypt(StreamType stream_type,
if (!audio_decryptor_) {
audio_decryptor_task_runner_ = base::ThreadTaskRunnerHandle::Get();
- audio_decryptor_ = FuchsiaClearStreamDecryptor::Create(cdm_);
+ audio_decryptor_ = cdm_context_->CreateAudioDecryptor();
}
audio_decryptor_->Decrypt(std::move(encrypted), std::move(decrypt_cb));
diff --git a/chromium/media/fuchsia/cdm/fuchsia_decryptor.h b/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
index fd0b17e1edb..c9397c3e6fa 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_decryptor.h
@@ -14,22 +14,15 @@
#include "media/base/decryptor.h"
#include "media/fuchsia/cdm/fuchsia_stream_decryptor.h"
-namespace fuchsia {
-namespace media {
-namespace drm {
-class ContentDecryptionModule;
-} // namespace drm
-} // namespace media
-} // namespace fuchsia
-
namespace media {
+class FuchsiaCdmContext;
class FuchsiaClearStreamDecryptor;
class FuchsiaDecryptor : public Decryptor {
public:
// Caller should make sure |cdm| lives longer than this class.
- explicit FuchsiaDecryptor(fuchsia::media::drm::ContentDecryptionModule* cdm);
+ explicit FuchsiaDecryptor(FuchsiaCdmContext* cdm_context);
~FuchsiaDecryptor() override;
// media::Decryptor implementation:
@@ -50,7 +43,7 @@ class FuchsiaDecryptor : public Decryptor {
bool CanAlwaysDecrypt() override;
private:
- fuchsia::media::drm::ContentDecryptionModule* const cdm_;
+ FuchsiaCdmContext* const cdm_context_;
std::unique_ptr<FuchsiaClearStreamDecryptor> audio_decryptor_;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc b/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc
index f10135dffde..6a45dc9a521 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc
+++ b/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.cc
@@ -21,9 +21,11 @@
namespace media {
namespace {
-// FuchsiaClearStreamDecryptor copies decrypted data immediately once it's
-// available, so it doesn't need more than one output buffer.
-const size_t kMinClearStreamOutputFrames = 1;
+// Minimum number of buffers in the input and output buffer collection.
+// Decryptors provided by fuchsia.media.drm API normally decrypt a single
+// buffer at a time. Second buffer is useful to allow reading/writing a
+// packet while the decryptor is working on another one.
+const size_t kMinBufferCount = 2;
std::string GetEncryptionScheme(EncryptionScheme mode) {
switch (mode) {
@@ -60,7 +62,9 @@ fuchsia::media::EncryptionPattern GetEncryptionPattern(
fuchsia::media::FormatDetails GetClearFormatDetails() {
fuchsia::media::EncryptedFormat encrypted_format;
- encrypted_format.set_scheme(fuchsia::media::ENCRYPTION_SCHEME_UNENCRYPTED);
+ encrypted_format.set_scheme(fuchsia::media::ENCRYPTION_SCHEME_UNENCRYPTED)
+ .set_subsamples({})
+ .set_init_vector({});
fuchsia::media::FormatDetails format;
format.set_format_details_version_ordinal(0);
@@ -94,13 +98,19 @@ fuchsia::media::FormatDetails GetEncryptedFormatDetails(
} // namespace
FuchsiaStreamDecryptorBase::FuchsiaStreamDecryptorBase(
- fuchsia::media::StreamProcessorPtr processor)
- : processor_(std::move(processor), this) {}
+ fuchsia::media::StreamProcessorPtr processor,
+ size_t min_buffer_size)
+ : processor_(std::move(processor), this),
+ min_buffer_size_(min_buffer_size) {}
FuchsiaStreamDecryptorBase::~FuchsiaStreamDecryptorBase() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
}
+int FuchsiaStreamDecryptorBase::GetMaxDecryptRequests() const {
+ return input_writer_queue_.num_buffers() + 1;
+}
+
void FuchsiaStreamDecryptorBase::DecryptInternal(
scoped_refptr<DecoderBuffer> encrypted) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -123,8 +133,8 @@ void FuchsiaStreamDecryptorBase::AllocateInputBuffers(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
base::Optional<fuchsia::sysmem::BufferCollectionConstraints>
- buffer_constraints =
- SysmemBufferWriter::GetRecommendedConstraints(stream_constraints);
+ buffer_constraints = SysmemBufferWriter::GetRecommendedConstraints(
+ kMinBufferCount, min_buffer_size_);
if (!buffer_constraints.has_value()) {
OnError();
@@ -212,24 +222,10 @@ void FuchsiaStreamDecryptorBase::ProcessEndOfStream() {
processor_.ProcessEos();
}
-std::unique_ptr<FuchsiaClearStreamDecryptor>
-FuchsiaClearStreamDecryptor::Create(
- fuchsia::media::drm::ContentDecryptionModule* cdm) {
- DCHECK(cdm);
-
- fuchsia::media::drm::DecryptorParams params;
- params.set_require_secure_mode(false);
- params.mutable_input_details()->set_format_details_version_ordinal(0);
- fuchsia::media::StreamProcessorPtr stream_processor;
- cdm->CreateDecryptor(std::move(params), stream_processor.NewRequest());
-
- return std::make_unique<FuchsiaClearStreamDecryptor>(
- std::move(stream_processor));
-}
-
FuchsiaClearStreamDecryptor::FuchsiaClearStreamDecryptor(
- fuchsia::media::StreamProcessorPtr processor)
- : FuchsiaStreamDecryptorBase(std::move(processor)) {}
+ fuchsia::media::StreamProcessorPtr processor,
+ size_t min_buffer_size)
+ : FuchsiaStreamDecryptorBase(std::move(processor), min_buffer_size) {}
FuchsiaClearStreamDecryptor::~FuchsiaClearStreamDecryptor() = default;
@@ -265,19 +261,13 @@ void FuchsiaClearStreamDecryptor::AllocateOutputBuffers(
return;
}
- size_t num_buffers_for_client = std::max(
- kMinClearStreamOutputFrames,
- static_cast<size_t>(stream_constraints.packet_count_for_client_min()));
- size_t num_buffers_for_server =
- stream_constraints.default_settings().packet_count_for_server();
-
output_pool_creator_ =
allocator_.MakeBufferPoolCreator(1 /* num_shared_token */);
output_pool_creator_->Create(
- SysmemBufferReader::GetRecommendedConstraints(num_buffers_for_client),
+ SysmemBufferReader::GetRecommendedConstraints(kMinBufferCount,
+ min_buffer_size_),
base::BindOnce(&FuchsiaClearStreamDecryptor::OnOutputBufferPoolCreated,
- base::Unretained(this), num_buffers_for_client,
- num_buffers_for_server));
+ base::Unretained(this)));
}
void FuchsiaClearStreamDecryptor::OnProcessEos() {
@@ -368,8 +358,6 @@ void FuchsiaClearStreamDecryptor::OnError() {
}
void FuchsiaClearStreamDecryptor::OnOutputBufferPoolCreated(
- size_t num_buffers_for_client,
- size_t num_buffers_for_server,
std::unique_ptr<SysmemBufferPool> pool) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -383,9 +371,7 @@ void FuchsiaClearStreamDecryptor::OnOutputBufferPoolCreated(
// Provide token before enabling reader. Tokens must be provided to
// StreamProcessor before getting the allocated buffers.
- processor_.CompleteOutputBuffersAllocation(num_buffers_for_client,
- num_buffers_for_server,
- output_pool_->TakeToken());
+ processor_.CompleteOutputBuffersAllocation(output_pool_->TakeToken());
output_pool_->CreateReader(base::BindOnce(
&FuchsiaClearStreamDecryptor::OnOutputBufferPoolReaderCreated,
@@ -409,20 +395,19 @@ void FuchsiaClearStreamDecryptor::OnOutputBufferPoolReaderCreated(
FuchsiaSecureStreamDecryptor::FuchsiaSecureStreamDecryptor(
fuchsia::media::StreamProcessorPtr processor,
Client* client)
- : FuchsiaStreamDecryptorBase(std::move(processor)), client_(client) {}
+ : FuchsiaStreamDecryptorBase(std::move(processor),
+ client->GetInputBufferSize()),
+ client_(client) {}
FuchsiaSecureStreamDecryptor::~FuchsiaSecureStreamDecryptor() = default;
void FuchsiaSecureStreamDecryptor::SetOutputBufferCollectionToken(
- fuchsia::sysmem::BufferCollectionTokenPtr token,
- size_t num_buffers_for_decryptor,
- size_t num_buffers_for_codec) {
+ fuchsia::sysmem::BufferCollectionTokenPtr token) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!complete_buffer_allocation_callback_);
complete_buffer_allocation_callback_ =
base::BindOnce(&StreamProcessorHelper::CompleteOutputBuffersAllocation,
- base::Unretained(&processor_), num_buffers_for_decryptor,
- num_buffers_for_codec, std::move(token));
+ base::Unretained(&processor_), std::move(token));
if (waiting_output_buffers_) {
std::move(complete_buffer_allocation_callback_).Run();
waiting_output_buffers_ = false;
diff --git a/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.h b/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.h
index 530c28e640d..e26fa6897a0 100644
--- a/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.h
+++ b/chromium/media/fuchsia/cdm/fuchsia_stream_decryptor.h
@@ -21,10 +21,12 @@ class SysmemBufferReader;
// Base class for media stream decryptor implementations.
class FuchsiaStreamDecryptorBase : public StreamProcessorHelper::Client {
public:
- explicit FuchsiaStreamDecryptorBase(
- fuchsia::media::StreamProcessorPtr processor);
+ FuchsiaStreamDecryptorBase(fuchsia::media::StreamProcessorPtr processor,
+ size_t min_buffer_size);
~FuchsiaStreamDecryptorBase() override;
+ int GetMaxDecryptRequests() const;
+
protected:
// StreamProcessorHelper::Client overrides.
void AllocateInputBuffers(
@@ -36,6 +38,8 @@ class FuchsiaStreamDecryptorBase : public StreamProcessorHelper::Client {
StreamProcessorHelper processor_;
+ const size_t min_buffer_size_;
+
BufferAllocator allocator_;
SysmemBufferWriterQueue input_writer_queue_;
@@ -63,9 +67,11 @@ class FuchsiaStreamDecryptorBase : public StreamProcessorHelper::Client {
class FuchsiaClearStreamDecryptor : public FuchsiaStreamDecryptorBase {
public:
static std::unique_ptr<FuchsiaClearStreamDecryptor> Create(
- fuchsia::media::drm::ContentDecryptionModule* cdm);
+ fuchsia::media::drm::ContentDecryptionModule* cdm,
+ size_t min_buffer_size);
- FuchsiaClearStreamDecryptor(fuchsia::media::StreamProcessorPtr processor);
+ FuchsiaClearStreamDecryptor(fuchsia::media::StreamProcessorPtr processor,
+ size_t min_buffer_size);
~FuchsiaClearStreamDecryptor() override;
// Decrypt() behavior should match media::Decryptor interface.
@@ -82,9 +88,7 @@ class FuchsiaClearStreamDecryptor : public FuchsiaStreamDecryptorBase {
void OnNoKey() final;
void OnError() final;
- void OnOutputBufferPoolCreated(size_t num_buffers_for_client,
- size_t num_buffers_for_server,
- std::unique_ptr<SysmemBufferPool> pool);
+ void OnOutputBufferPoolCreated(std::unique_ptr<SysmemBufferPool> pool);
void OnOutputBufferPoolReaderCreated(
std::unique_ptr<SysmemBufferReader> reader);
@@ -108,6 +112,7 @@ class FuchsiaSecureStreamDecryptor : public FuchsiaStreamDecryptorBase {
public:
class Client {
public:
+ virtual size_t GetInputBufferSize() = 0;
virtual void OnDecryptorOutputPacket(
StreamProcessorHelper::IoPacket packet) = 0;
virtual void OnDecryptorEndOfStreamPacket() = 0;
@@ -123,9 +128,7 @@ class FuchsiaSecureStreamDecryptor : public FuchsiaStreamDecryptorBase {
~FuchsiaSecureStreamDecryptor() override;
void SetOutputBufferCollectionToken(
- fuchsia::sysmem::BufferCollectionTokenPtr token,
- size_t num_buffers_for_decryptor,
- size_t num_buffers_for_codec);
+ fuchsia::sysmem::BufferCollectionTokenPtr token);
// Enqueues the specified buffer to the input queue. Caller is allowed to
// queue as many buffers as it needs without waiting for results from the
diff --git a/chromium/media/fuchsia/cdm/service/BUILD.gn b/chromium/media/fuchsia/cdm/service/BUILD.gn
index 3e1b71d39fe..b87b99f7128 100644
--- a/chromium/media/fuchsia/cdm/service/BUILD.gn
+++ b/chromium/media/fuchsia/cdm/service/BUILD.gn
@@ -39,9 +39,9 @@ source_set("unittests") {
]
sources = [
- "fuchsia_cdm_manager_test.cc",
+ "fuchsia_cdm_manager_unittest.cc",
"mock_provision_fetcher.cc",
"mock_provision_fetcher.h",
- "provisioning_fetcher_impl_test.cc",
+ "provisioning_fetcher_impl_unittest.cc",
]
}
diff --git a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
index 81f3b0337d6..cb2f70ea362 100644
--- a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
+++ b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.cc
@@ -11,13 +11,17 @@
#include "base/callback.h"
#include "base/containers/flat_set.h"
#include "base/containers/unique_ptr_adapters.h"
+#include "base/files/file_enumerator.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/fuchsia/file_utils.h"
#include "base/fuchsia/fuchsia_logging.h"
#include "base/hash/hash.h"
#include "base/logging.h"
+#include "base/optional.h"
#include "base/strings/string_number_conversions.h"
+#include "base/task/task_traits.h"
+#include "base/task/thread_pool.h"
#include "media/fuchsia/cdm/service/provisioning_fetcher_impl.h"
#include "url/origin.h"
@@ -25,11 +29,112 @@ namespace media {
namespace {
+struct CdmDirectoryInfo {
+ base::FilePath path;
+ base::Time last_used;
+ uint64_t size_bytes;
+};
+
+// Enumerates all the files in the directory to determine its size and
+// the most recent "last used" time.
+// The implementation is based on base::ComputeDirectorySize(), with the
+// addition of most-recently-modified calculation, and inclusion of directory
+// node sizes toward the total.
+CdmDirectoryInfo GetCdmDirectoryInfo(const base::FilePath& path) {
+ int64_t directory_size = 0;
+ base::Time last_used;
+ base::FileEnumerator enumerator(
+ path, true /* recursive */,
+ base::FileEnumerator::DIRECTORIES | base::FileEnumerator::FILES);
+ while (!enumerator.Next().empty()) {
+ const base::FileEnumerator::FileInfo info = enumerator.GetInfo();
+ if (info.GetSize() > 0)
+ directory_size += info.GetSize();
+ last_used = std::max(last_used, info.GetLastModifiedTime());
+ }
+ return {
+ .path = path,
+ .last_used = last_used,
+ .size_bytes = directory_size,
+ };
+}
+
+void ApplyCdmStorageQuota(base::FilePath cdm_data_path,
+ uint64_t cdm_data_quota_bytes) {
+ // TODO(crbug.com/1148334): Migrate to using a platform-provided quota
+ // mechanism to manage CDM storage.
+ VLOG(2) << "Enumerating CDM data directories.";
+
+ uint64_t directories_size_bytes = 0;
+ std::vector<CdmDirectoryInfo> directories_info;
+
+ // CDM storage consistes of per-origin directories, each containing one or
+ // more per-key-system sub-directories. Each per-origin-per-key-system
+ // directory is assumed to be independent of other CDM data.
+ base::FileEnumerator by_origin(cdm_data_path, false /* recursive */,
+ base::FileEnumerator::DIRECTORIES);
+ for (;;) {
+ const base::FilePath origin_directory = by_origin.Next();
+ if (origin_directory.empty())
+ break;
+ base::FileEnumerator by_key_system(origin_directory, false /* recursive */,
+ base::FileEnumerator::DIRECTORIES);
+ for (;;) {
+ const base::FilePath key_system_directory = by_key_system.Next();
+ if (key_system_directory.empty())
+ break;
+ directories_info.push_back(GetCdmDirectoryInfo(key_system_directory));
+ directories_size_bytes += directories_info.back().size_bytes;
+ }
+ }
+
+ if (directories_size_bytes <= cdm_data_quota_bytes)
+ return;
+
+ VLOG(1) << "Removing least recently accessed CDM data.";
+
+ // Enumerate directories starting with the least most recently "used",
+ // deleting them until the the total amount of CDM data is within quota.
+ std::sort(directories_info.begin(), directories_info.end(),
+ [](const CdmDirectoryInfo& lhs, const CdmDirectoryInfo& rhs) {
+ return lhs.last_used < rhs.last_used;
+ });
+ base::flat_set<base::FilePath> affected_origin_directories;
+ for (const auto& directory_info : directories_info) {
+ if (directories_size_bytes <= cdm_data_quota_bytes)
+ break;
+
+ VLOG(1) << "Removing " << directory_info.path;
+ base::DeletePathRecursively(directory_info.path);
+ affected_origin_directories.insert(directory_info.path.DirName());
+
+ DCHECK_GE(directories_size_bytes, directory_info.size_bytes);
+ directories_size_bytes -= directory_info.size_bytes;
+ }
+
+ // Enumerate all the origin directories that had sub-directories deleted,
+ // and delete any that are now empty.
+ for (const auto& origin_directory : affected_origin_directories) {
+ if (base::IsDirectoryEmpty(origin_directory))
+ base::DeleteFile(origin_directory);
+ }
+}
+
std::string HexEncodeHash(const std::string& name) {
uint32_t hash = base::PersistentHash(name);
return base::HexEncode(&hash, sizeof(uint32_t));
}
+// Returns a nullopt if storage was created successfully.
+base::Optional<base::File::Error> CreateStorageDirectory(base::FilePath path) {
+ base::File::Error error;
+ bool success = base::CreateDirectoryAndGetError(path, &error);
+ if (!success) {
+ return error;
+ }
+ return {};
+}
+
} // namespace
// Manages individual KeySystem connections. Provides data stores and
@@ -93,7 +198,7 @@ class FuchsiaCdmManager::KeySystemClient {
}
fidl::InterfaceHandle<fuchsia::io::Directory> data_directory =
- base::fuchsia::OpenDirectory(storage_path);
+ base::OpenDirectoryHandle(storage_path);
if (!data_directory.is_valid()) {
DLOG(ERROR) << "Unable to OpenDirectory " << storage_path;
return base::nullopt;
@@ -159,10 +264,21 @@ class FuchsiaCdmManager::KeySystemClient {
FuchsiaCdmManager::FuchsiaCdmManager(
CreateKeySystemCallbackMap create_key_system_callbacks_by_name,
- base::FilePath cdm_data_path)
+ base::FilePath cdm_data_path,
+ base::Optional<uint64_t> cdm_data_quota_bytes)
: create_key_system_callbacks_by_name_(
std::move(create_key_system_callbacks_by_name)),
- cdm_data_path_(std::move(cdm_data_path)) {}
+ cdm_data_path_(std::move(cdm_data_path)),
+ cdm_data_quota_bytes_(std::move(cdm_data_quota_bytes)),
+ storage_task_runner_(
+ base::ThreadPool::CreateSequencedTaskRunner({base::MayBlock()})) {
+ // To avoid potential for the CDM directory "cleanup" task removing
+ // CDM data directories that are in active use, the |storage_task_runner_| is
+ // sequenced, thereby ensuring cleanup completes before any CDM activities
+ // start.
+ if (cdm_data_quota_bytes)
+ ApplyCdmStorageQuota(cdm_data_path_, *cdm_data_quota_bytes);
+}
FuchsiaCdmManager::~FuchsiaCdmManager() = default;
@@ -174,24 +290,13 @@ void FuchsiaCdmManager::CreateAndProvision(
request) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
- KeySystemClient* key_system_client = GetOrCreateKeySystemClient(key_system);
- if (!key_system_client) {
- // GetOrCreateKeySystemClient will log the reason for failure.
- return;
- }
-
base::FilePath storage_path = GetStoragePath(key_system, origin);
- base::File::Error error;
- bool success = base::CreateDirectoryAndGetError(storage_path, &error);
- if (!success) {
- DLOG(ERROR) << "Failed to create directory: " << storage_path
- << ", error: " << error;
- return;
- }
- key_system_client->CreateCdm(std::move(storage_path),
- std::move(create_fetcher_cb),
- std::move(request));
+ storage_task_runner_->PostTaskAndReplyWithResult(
+ FROM_HERE, base::BindOnce(&CreateStorageDirectory, storage_path),
+ base::BindOnce(&FuchsiaCdmManager::CreateCdm, weak_factory_.GetWeakPtr(),
+ key_system, std::move(create_fetcher_cb),
+ std::move(request), storage_path));
}
void FuchsiaCdmManager::set_on_key_system_disconnect_for_test_callback(
@@ -241,6 +346,35 @@ base::FilePath FuchsiaCdmManager::GetStoragePath(const std::string& key_system,
.Append(HexEncodeHash(key_system));
}
+void FuchsiaCdmManager::CreateCdm(
+ const std::string& key_system_name,
+ CreateFetcherCB create_fetcher_cb,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
+ request,
+ base::FilePath storage_path,
+ base::Optional<base::File::Error> storage_creation_error) {
+ DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
+
+ if (storage_creation_error) {
+ DLOG(ERROR) << "Failed to create directory: " << storage_path
+ << ", error: " << *storage_creation_error;
+ request.Close(ZX_ERR_NO_RESOURCES);
+ return;
+ }
+
+ KeySystemClient* key_system_client =
+ GetOrCreateKeySystemClient(key_system_name);
+ if (!key_system_client) {
+ // GetOrCreateKeySystemClient will log the reason for failure.
+ request.Close(ZX_ERR_NOT_FOUND);
+ return;
+ }
+
+ key_system_client->CreateCdm(std::move(storage_path),
+ std::move(create_fetcher_cb),
+ std::move(request));
+}
+
void FuchsiaCdmManager::OnKeySystemClientError(
const std::string& key_system_name) {
if (on_key_system_disconnect_for_test_callback_) {
diff --git a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h
index c18629eeafb..40f91be48fa 100644
--- a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h
+++ b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager.h
@@ -10,8 +10,12 @@
#include "base/callback_forward.h"
#include "base/containers/flat_map.h"
+#include "base/files/file.h"
#include "base/files/file_path.h"
#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "base/optional.h"
+#include "base/sequenced_task_runner.h"
#include "base/threading/thread_checker.h"
#include "media/base/provision_fetcher.h"
@@ -32,9 +36,12 @@ class FuchsiaCdmManager {
using CreateKeySystemCallbackMap =
base::flat_map<std::string, CreateKeySystemCallback>;
+ // |cdm_data_quota_bytes| is currently only applied once, when the manager is
+ // created.
FuchsiaCdmManager(
CreateKeySystemCallbackMap create_key_system_callbacks_by_name,
- base::FilePath cdm_data_path);
+ base::FilePath cdm_data_path,
+ base::Optional<uint64_t> cdm_data_quota_bytes);
~FuchsiaCdmManager();
@@ -63,11 +70,22 @@ class FuchsiaCdmManager {
KeySystemClient* CreateKeySystemClient(const std::string& key_system_name);
base::FilePath GetStoragePath(const std::string& key_system_name,
const url::Origin& origin);
+ void CreateCdm(
+ const std::string& key_system_name,
+ CreateFetcherCB create_fetcher_cb,
+ fidl::InterfaceRequest<fuchsia::media::drm::ContentDecryptionModule>
+ request,
+ base::FilePath storage_path,
+ base::Optional<base::File::Error> storage_creation_error);
void OnKeySystemClientError(const std::string& key_system_name);
// A map of callbacks to create KeySystem channels indexed by their EME name.
const CreateKeySystemCallbackMap create_key_system_callbacks_by_name_;
const base::FilePath cdm_data_path_;
+ const base::Optional<uint64_t> cdm_data_quota_bytes_;
+
+ // Used for operations on the CDM data directory.
+ const scoped_refptr<base::SequencedTaskRunner> storage_task_runner_;
// A map of the active KeySystem clients indexed by their EME name. Entries
// in this map will be added on the first CreateAndProvision call for that
@@ -79,6 +97,7 @@ class FuchsiaCdmManager {
on_key_system_disconnect_for_test_callback_;
THREAD_CHECKER(thread_checker_);
+ base::WeakPtrFactory<FuchsiaCdmManager> weak_factory_{this};
};
} // namespace media
diff --git a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_test.cc b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc
index 9455d7e26cc..64a011a535e 100644
--- a/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_test.cc
+++ b/chromium/media/fuchsia/cdm/service/fuchsia_cdm_manager_unittest.cc
@@ -12,9 +12,10 @@
#include "base/bind.h"
#include "base/callback.h"
+#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/run_loop.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "media/fuchsia/cdm/service/mock_provision_fetcher.h"
#include "testing/gmock/include/gmock/gmock.h"
@@ -76,7 +77,8 @@ class FuchsiaCdmManagerTest : public ::testing::Test {
FuchsiaCdmManagerTest() { EXPECT_TRUE(temp_dir_.CreateUniqueTempDir()); }
std::unique_ptr<FuchsiaCdmManager> CreateFuchsiaCdmManager(
- std::vector<base::StringPiece> key_systems) {
+ std::vector<base::StringPiece> key_systems,
+ base::Optional<uint64_t> cdm_data_quota_bytes = base::nullopt) {
FuchsiaCdmManager::CreateKeySystemCallbackMap create_key_system_callbacks;
for (const base::StringPiece& name : key_systems) {
@@ -86,7 +88,8 @@ class FuchsiaCdmManagerTest : public ::testing::Test {
base::Unretained(&key_system)));
}
return std::make_unique<FuchsiaCdmManager>(
- std::move(create_key_system_callbacks), temp_dir_.GetPath());
+ std::move(create_key_system_callbacks), temp_dir_.GetPath(),
+ cdm_data_quota_bytes);
}
protected:
@@ -96,8 +99,8 @@ class FuchsiaCdmManagerTest : public ::testing::Test {
return mock_key_systems_[key_system_name];
}
- base::test::SingleThreadTaskEnvironment task_environment_{
- base::test::SingleThreadTaskEnvironment::MainThreadType::IO};
+ base::test::TaskEnvironment task_environment_{
+ base::test::TaskEnvironment::MainThreadType::IO};
MockKeySystemMap mock_key_systems_;
base::ScopedTempDir temp_dir_;
@@ -109,7 +112,7 @@ TEST_F(FuchsiaCdmManagerTest, NoKeySystems) {
base::RunLoop run_loop;
drm::ContentDecryptionModulePtr cdm_ptr;
cdm_ptr.set_error_handler([&](zx_status_t status) {
- EXPECT_EQ(status, ZX_ERR_PEER_CLOSED);
+ EXPECT_EQ(status, ZX_ERR_NOT_FOUND);
run_loop.Quit();
});
@@ -211,7 +214,14 @@ TEST_F(FuchsiaCdmManagerTest, SameOriginShareDataStore) {
base::RunLoop run_loop;
drm::ContentDecryptionModulePtr cdm1, cdm2;
- cdm2.set_error_handler([&](zx_status_t) { run_loop.Quit(); });
+ auto error_handler = [&](zx_status_t status) {
+ EXPECT_EQ(status, ZX_ERR_PEER_CLOSED);
+ if (!cdm1.is_bound() && !cdm2.is_bound()) {
+ run_loop.Quit();
+ }
+ };
+ cdm1.set_error_handler(error_handler);
+ cdm2.set_error_handler(error_handler);
EXPECT_CALL(mock_key_system(kKeySystem), AddDataStore(Eq(1u), _, _))
.WillOnce(
@@ -240,7 +250,15 @@ TEST_F(FuchsiaCdmManagerTest, DifferentOriginDoNotShareDataStore) {
base::RunLoop run_loop;
drm::ContentDecryptionModulePtr cdm1, cdm2;
- cdm2.set_error_handler([&](zx_status_t) { run_loop.Quit(); });
+ auto error_handler = [&](zx_status_t status) {
+ EXPECT_EQ(status, ZX_ERR_PEER_CLOSED);
+ if (!cdm1.is_bound() && !cdm2.is_bound()) {
+ run_loop.Quit();
+ }
+ };
+ cdm1.set_error_handler(error_handler);
+ cdm2.set_error_handler(error_handler);
+
EXPECT_CALL(mock_key_system(kKeySystem), AddDataStore(Eq(1u), _, _))
.WillOnce(
WithArgs<2>(Invoke([](drm::KeySystem::AddDataStoreCallback callback) {
@@ -269,5 +287,135 @@ TEST_F(FuchsiaCdmManagerTest, DifferentOriginDoNotShareDataStore) {
run_loop.Run();
}
+
+void CreateDummyCdmDirectory(const base::FilePath& cdm_data_path,
+ base::StringPiece origin,
+ base::StringPiece key_system,
+ uint64_t size) {
+ const base::FilePath path = cdm_data_path.Append(origin).Append(key_system);
+ CHECK(base::CreateDirectory(path));
+ if (size) {
+ std::vector<uint8_t> zeroes(size);
+ CHECK(base::WriteFile(path.Append("zeroes"), zeroes));
+ }
+}
+
+// Verify that the least recently used CDM data directories are removed, until
+// the quota is met. Also verify that old directories are removed regardless
+// of whether they are empty or not.
+TEST_F(FuchsiaCdmManagerTest, CdmDataQuotaBytes) {
+ constexpr uint64_t kTestQuotaBytes = 1024;
+ constexpr char kOriginDirectory1[] = "origin1";
+ constexpr char kOriginDirectory2[] = "origin2";
+ constexpr char kKeySystemDirectory1[] = "key_system1";
+ constexpr char kKeySystemDirectory2[] = "key_system2";
+ constexpr char kEmptyKeySystemDirectory[] = "empty_key_system";
+
+ // Create fake CDM data directories for two origins, each with two key
+ // systems, with each directory consuming 50% of the total quota, so that
+ // two directories must be removed to meet quota.
+
+ // Create least-recently-used directories & their contents.
+ const base::FilePath temp_path = temp_dir_.GetPath();
+ CreateDummyCdmDirectory(temp_path, kOriginDirectory1, kKeySystemDirectory1,
+ kTestQuotaBytes / 2);
+ CreateDummyCdmDirectory(temp_path, kOriginDirectory2, kKeySystemDirectory2,
+ kTestQuotaBytes / 2);
+ CreateDummyCdmDirectory(temp_path, kOriginDirectory1,
+ kEmptyKeySystemDirectory, 0);
+
+ // Sleep to account for coarse-grained filesystem timestamps.
+ base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(1));
+
+ // Create the recently-used directories.
+ CreateDummyCdmDirectory(temp_path, kOriginDirectory1, kKeySystemDirectory2,
+ kTestQuotaBytes / 2);
+ CreateDummyCdmDirectory(temp_path, kOriginDirectory2, kKeySystemDirectory1,
+ kTestQuotaBytes / 2);
+ CreateDummyCdmDirectory(temp_path, kOriginDirectory2,
+ kEmptyKeySystemDirectory, 0);
+
+ // Create the CDM manager, to run the data directory quota enforcement.
+ std::unique_ptr<FuchsiaCdmManager> cdm_manager =
+ CreateFuchsiaCdmManager({}, kTestQuotaBytes);
+
+ // Use a CreateAndProvision() request as a proxy to wait for quota enforcement
+ // to finish being applied.
+ base::RunLoop run_loop;
+ drm::ContentDecryptionModulePtr cdm_ptr;
+ cdm_ptr.set_error_handler([&](zx_status_t status) {
+ EXPECT_EQ(status, ZX_ERR_NOT_FOUND);
+ run_loop.Quit();
+ });
+
+ cdm_manager->CreateAndProvision(
+ "com.key_system", url::Origin(),
+ base::BindRepeating(&CreateMockProvisionFetcher), cdm_ptr.NewRequest());
+ run_loop.Run();
+
+ EXPECT_FALSE(base::PathExists(
+ temp_path.Append(kOriginDirectory1).Append(kKeySystemDirectory1)));
+ EXPECT_FALSE(base::PathExists(
+ temp_path.Append(kOriginDirectory2).Append(kKeySystemDirectory2)));
+
+ EXPECT_TRUE(base::PathExists(
+ temp_path.Append(kOriginDirectory1).Append(kKeySystemDirectory2)));
+ EXPECT_TRUE(base::PathExists(
+ temp_path.Append(kOriginDirectory2).Append(kKeySystemDirectory1)));
+
+ // Empty directories are currently always treated as old, causing them all to
+ // be deleted if the CDM data directory exceeds its quota.
+ EXPECT_FALSE(base::PathExists(
+ temp_path.Append(kOriginDirectory1).Append(kEmptyKeySystemDirectory)));
+ EXPECT_FALSE(base::PathExists(
+ temp_path.Append(kOriginDirectory2).Append(kEmptyKeySystemDirectory)));
+}
+
+// Verify that if all key-system sub-directories for a given origin have been
+// deleted then the origin's directory is also deleted.
+TEST_F(FuchsiaCdmManagerTest, EmptyOriginDirectory) {
+ constexpr uint64_t kTestQuotaBytes = 1024;
+ constexpr char kInactiveOriginDirectory[] = "origin1";
+ constexpr char kActiveOriginDirectory[] = "origin2";
+ constexpr char kKeySystemDirectory1[] = "key_system1";
+ constexpr char kKeySystemDirectory2[] = "key_system2";
+
+ // Create dummy data for an inactive origin.
+ const base::FilePath temp_path = temp_dir_.GetPath();
+ CreateDummyCdmDirectory(temp_path, kInactiveOriginDirectory,
+ kKeySystemDirectory1, kTestQuotaBytes / 2);
+ CreateDummyCdmDirectory(temp_path, kInactiveOriginDirectory,
+ kKeySystemDirectory2, kTestQuotaBytes / 2);
+
+ // Sleep to account for coarse-grained filesystem timestamps.
+ base::PlatformThread::Sleep(base::TimeDelta::FromSeconds(1));
+
+ // Create dummy data for a recently-used, active origin.
+ CreateDummyCdmDirectory(temp_path, kActiveOriginDirectory,
+ kKeySystemDirectory2, kTestQuotaBytes);
+
+ // Create the CDM manager, to run the data directory quota enforcement.
+ std::unique_ptr<FuchsiaCdmManager> cdm_manager =
+ CreateFuchsiaCdmManager({}, kTestQuotaBytes);
+
+ // Use a CreateAndProvision() request as a proxy to wait for quota enforcement
+ // to finish being applied.
+ base::RunLoop run_loop;
+ drm::ContentDecryptionModulePtr cdm_ptr;
+ cdm_ptr.set_error_handler([&](zx_status_t status) {
+ EXPECT_EQ(status, ZX_ERR_NOT_FOUND);
+ run_loop.Quit();
+ });
+
+ cdm_manager->CreateAndProvision(
+ "com.key_system", url::Origin(),
+ base::BindRepeating(&CreateMockProvisionFetcher), cdm_ptr.NewRequest());
+ run_loop.Run();
+
+ EXPECT_FALSE(base::PathExists(temp_path.Append(kInactiveOriginDirectory)));
+ EXPECT_TRUE(base::PathExists(
+ temp_path.Append(kActiveOriginDirectory).Append(kKeySystemDirectory2)));
+}
+
} // namespace
} // namespace media
diff --git a/chromium/media/fuchsia/cdm/service/mock_provision_fetcher.h b/chromium/media/fuchsia/cdm/service/mock_provision_fetcher.h
index 4f76f67d5fc..dab6c73215e 100644
--- a/chromium/media/fuchsia/cdm/service/mock_provision_fetcher.h
+++ b/chromium/media/fuchsia/cdm/service/mock_provision_fetcher.h
@@ -29,7 +29,7 @@ class MockProvisionFetcher : public ProvisionFetcher {
MOCK_METHOD(void,
Retrieve,
- (const std::string& default_url,
+ (const GURL& default_url,
const std::string& request_data,
ResponseCB response_cb),
(override));
diff --git a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc
index 87adae00cad..d234d4c8b92 100644
--- a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc
+++ b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl.cc
@@ -64,7 +64,7 @@ void ProvisioningFetcherImpl::Fetch(
retrieve_in_progress_ = true;
fetcher_->Retrieve(
- request.default_provisioning_server_url.value(), request_str,
+ GURL(request.default_provisioning_server_url.value()), request_str,
base::BindRepeating(&ProvisioningFetcherImpl::OnRetrieveComplete,
base::Unretained(this),
base::Passed(std::move(callback))));
diff --git a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_test.cc b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
index 8d1975ae76a..9b3f753bd3d 100644
--- a/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_test.cc
+++ b/chromium/media/fuchsia/cdm/service/provisioning_fetcher_impl_unittest.cc
@@ -9,7 +9,7 @@
#include "base/bind.h"
#include "base/location.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "fuchsia/base/mem_buffer_util.h"
#include "media/fuchsia/cdm/service/mock_provision_fetcher.h"
@@ -27,7 +27,7 @@ using ::testing::Invoke;
using ::testing::WithArgs;
using MockProvisionFetcher = ::media::testing::MockProvisionFetcher;
-constexpr char kTestDefaultUrl[] = "http://test_default_url.com";
+constexpr char kTestDefaultUrl[] = "http://test_default_url.com/";
constexpr char kTestRequest[] = "test_request_message";
constexpr char kTestResponse[] = "test_response_message";
diff --git a/chromium/media/fuchsia/common/stream_processor_helper.cc b/chromium/media/fuchsia/common/stream_processor_helper.cc
index ad53611b478..13259ed07ce 100644
--- a/chromium/media/fuchsia/common/stream_processor_helper.cc
+++ b/chromium/media/fuchsia/common/stream_processor_helper.cc
@@ -343,30 +343,20 @@ void StreamProcessorHelper::CompleteInputBuffersAllocation(
settings.set_buffer_constraints_version_ordinal(
input_buffer_constraints_.buffer_constraints_version_ordinal());
settings.set_single_buffer_mode(false);
- settings.set_packet_count_for_server(
- input_buffer_constraints_.default_settings().packet_count_for_server());
- settings.set_packet_count_for_client(
- input_buffer_constraints_.default_settings().packet_count_for_client());
settings.set_sysmem_token(std::move(sysmem_token));
processor_->SetInputBufferPartialSettings(std::move(settings));
}
void StreamProcessorHelper::CompleteOutputBuffersAllocation(
- size_t num_buffers_for_client,
- size_t num_buffers_for_server,
fuchsia::sysmem::BufferCollectionTokenPtr collection_token) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(!output_buffer_constraints_.IsEmpty());
- DCHECK_LE(num_buffers_for_client,
- output_buffer_constraints_.packet_count_for_client_max());
// Pass new output buffer settings to the stream processor.
fuchsia::media::StreamBufferPartialSettings settings;
settings.set_buffer_lifetime_ordinal(output_buffer_lifetime_ordinal_);
settings.set_buffer_constraints_version_ordinal(
output_buffer_constraints_.buffer_constraints_version_ordinal());
- settings.set_packet_count_for_client(num_buffers_for_client);
- settings.set_packet_count_for_server(num_buffers_for_server);
settings.set_sysmem_token(std::move(collection_token));
processor_->SetOutputBufferPartialSettings(std::move(settings));
processor_->CompleteOutputBufferPartialSettings(
diff --git a/chromium/media/fuchsia/common/stream_processor_helper.h b/chromium/media/fuchsia/common/stream_processor_helper.h
index 73902f8d15b..3b333d15daa 100644
--- a/chromium/media/fuchsia/common/stream_processor_helper.h
+++ b/chromium/media/fuchsia/common/stream_processor_helper.h
@@ -122,10 +122,11 @@ class StreamProcessorHelper {
void CompleteInputBuffersAllocation(
fuchsia::sysmem::BufferCollectionTokenPtr token);
void CompleteOutputBuffersAllocation(
- size_t num_buffers_for_client,
- size_t num_buffers_for_server,
fuchsia::sysmem::BufferCollectionTokenPtr token);
+ // Closes the current stream and starts a new one. After that all packets
+ // passed to Process() will be sent with a new |stream_lifetime_ordinal|
+ // value.
void Reset();
private:
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_reader.cc b/chromium/media/fuchsia/common/sysmem_buffer_reader.cc
index 406096a2617..3bc7cf268ac 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_reader.cc
+++ b/chromium/media/fuchsia/common/sysmem_buffer_reader.cc
@@ -93,10 +93,19 @@ std::unique_ptr<SysmemBufferReader> SysmemBufferReader::Create(
// static
fuchsia::sysmem::BufferCollectionConstraints
-SysmemBufferReader::GetRecommendedConstraints(size_t max_used_output_frames) {
+SysmemBufferReader::GetRecommendedConstraints(
+ size_t min_buffer_count,
+ base::Optional<size_t> min_buffer_size) {
fuchsia::sysmem::BufferCollectionConstraints buffer_constraints;
buffer_constraints.usage.cpu = fuchsia::sysmem::cpuUsageRead;
- buffer_constraints.min_buffer_count_for_camping = max_used_output_frames;
+ buffer_constraints.min_buffer_count = min_buffer_count;
+ if (min_buffer_size) {
+ buffer_constraints.has_buffer_memory_constraints = true;
+ buffer_constraints.buffer_memory_constraints.min_size_bytes =
+ min_buffer_size.value();
+ buffer_constraints.buffer_memory_constraints.ram_domain_supported = true;
+ buffer_constraints.buffer_memory_constraints.cpu_domain_supported = true;
+ }
return buffer_constraints;
}
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_reader.h b/chromium/media/fuchsia/common/sysmem_buffer_reader.h
index bc9727facd0..c4c4e0b10a4 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_reader.h
+++ b/chromium/media/fuchsia/common/sysmem_buffer_reader.h
@@ -12,14 +12,20 @@
#include "base/containers/span.h"
#include "base/memory/read_only_shared_memory_region.h"
+#include "base/optional.h"
namespace media {
// Helper class to read content from fuchsia::sysmem::BufferCollection.
class SysmemBufferReader {
public:
+ // Returns sysmem buffer constraints with the specified |min_buffer_count|.
+ // Currently it doesn't request buffers for camping or any shared slack, so
+ // the clients are expected to read incoming buffers (using Read() or
+ // GetMappingForBuffer()) and then release them back to the source.
static fuchsia::sysmem::BufferCollectionConstraints GetRecommendedConstraints(
- size_t max_used_output_frames);
+ size_t min_buffer_count,
+ base::Optional<size_t> min_buffer_size);
static std::unique_ptr<SysmemBufferReader> Create(
fuchsia::sysmem::BufferCollectionInfo_2 info);
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_writer.cc b/chromium/media/fuchsia/common/sysmem_buffer_writer.cc
index 0942b9f5337..d2a1a28e4f0 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_writer.cc
+++ b/chromium/media/fuchsia/common/sysmem_buffer_writer.cc
@@ -44,8 +44,8 @@ class SysmemBufferWriter::Buffer {
size_t bytes_to_map = base::bits::Align(offset + size, base::GetPageSize());
uintptr_t addr;
zx_status_t status = zx::vmar::root_self()->map(
- /*vmar_offset=*/0, vmo, /*vmo_offset=*/0, bytes_to_map,
- ZX_VM_PERM_READ | ZX_VM_PERM_WRITE, &addr);
+ ZX_VM_PERM_READ | ZX_VM_PERM_WRITE, /*vmar_offset=*/0, vmo,
+ /*vmo_offset=*/0, bytes_to_map, &addr);
if (status != ZX_OK) {
ZX_DLOG(ERROR, status) << "zx_vmar_map";
return false;
@@ -168,33 +168,19 @@ std::unique_ptr<SysmemBufferWriter> SysmemBufferWriter::Create(
// static
base::Optional<fuchsia::sysmem::BufferCollectionConstraints>
-SysmemBufferWriter::GetRecommendedConstraints(
- const fuchsia::media::StreamBufferConstraints& stream_constraints) {
+SysmemBufferWriter::GetRecommendedConstraints(size_t min_buffer_count,
+ size_t min_buffer_size) {
fuchsia::sysmem::BufferCollectionConstraints buffer_constraints;
- if (!stream_constraints.has_default_settings() ||
- !stream_constraints.default_settings().has_packet_count_for_client()) {
- DLOG(ERROR)
- << "Received StreamBufferConstaints with missing required fields.";
- return base::nullopt;
- }
-
// Currently we have to map buffers VMOs to write to them (see ZX-4854) and
// memory cannot be mapped as write-only (see ZX-4872), so request RW access
// even though we will never need to read from these buffers.
buffer_constraints.usage.cpu =
fuchsia::sysmem::cpuUsageRead | fuchsia::sysmem::cpuUsageWrite;
- buffer_constraints.min_buffer_count_for_camping =
- stream_constraints.default_settings().packet_count_for_client();
+ buffer_constraints.min_buffer_count = min_buffer_count;
buffer_constraints.has_buffer_memory_constraints = true;
-
- const int kDefaultPacketSize = 512 * 1024;
- buffer_constraints.buffer_memory_constraints.min_size_bytes =
- stream_constraints.has_per_packet_buffer_bytes_recommended()
- ? stream_constraints.per_packet_buffer_bytes_recommended()
- : kDefaultPacketSize;
-
+ buffer_constraints.buffer_memory_constraints.min_size_bytes = min_buffer_size;
buffer_constraints.buffer_memory_constraints.ram_domain_supported = true;
buffer_constraints.buffer_memory_constraints.cpu_domain_supported = true;
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_writer.h b/chromium/media/fuchsia/common/sysmem_buffer_writer.h
index aaee25c1449..376468d1b75 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_writer.h
+++ b/chromium/media/fuchsia/common/sysmem_buffer_writer.h
@@ -22,8 +22,7 @@ class SysmemBufferWriter {
class Buffer;
static base::Optional<fuchsia::sysmem::BufferCollectionConstraints>
- GetRecommendedConstraints(
- const fuchsia::media::StreamBufferConstraints& stream_constraints);
+ GetRecommendedConstraints(size_t min_buffer_count, size_t min_buffer_size);
static std::unique_ptr<SysmemBufferWriter> Create(
fuchsia::sysmem::BufferCollectionInfo_2 info);
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.cc b/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.cc
index 34fedaae1f8..defb54d059d 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.cc
+++ b/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.cc
@@ -37,9 +37,9 @@ struct SysmemBufferWriterQueue::PendingBuffer {
// be released.
bool is_complete = false;
- // Index of the last buffer in the sysmem buffer collection that was used for
- // this input buffer. Valid only when |bytes_left()==0|.
- size_t tail_sysmem_buffer_index = 0;
+ // Index of the last buffer in the sysmem buffer collection that was used to
+ // send this input buffer. Should be set only when |bytes_left()==0|.
+ base::Optional<size_t> tail_sysmem_buffer_index;
};
SysmemBufferWriterQueue::SysmemBufferWriterQueue() = default;
@@ -137,6 +137,11 @@ void SysmemBufferWriterQueue::ResetPositionAndPause() {
for (auto& buffer : pending_buffers_) {
buffer.buffer_pos = 0;
buffer.is_complete = false;
+
+ // All packets that were pending will need to be resent. Reset
+ // |tail_sysmem_buffer_index| to ensure that these packets are not removed
+ // from the queue in ReleaseBuffer().
+ buffer.tail_sysmem_buffer_index = base::nullopt;
}
input_queue_position_ = 0;
is_paused_ = true;
diff --git a/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.h b/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.h
index b3d7d0e03db..761d2b5aae1 100644
--- a/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.h
+++ b/chromium/media/fuchsia/common/sysmem_buffer_writer_queue.h
@@ -55,12 +55,18 @@ class SysmemBufferWriterQueue {
void ResetBuffers();
// Resets pending queue position to the start of the queue and pauses the
- // writer. All pending buffers will be resent when Unpause() is
- // called.
+ // writer. All pending buffers will be resent when Unpause() is called.
+ // This method is used to handle OnStreamFailed event received from
+ // StreamProcessor, particularly to handle NoKey error in CDM. When that event
+ // is received the StreamProcessor client should assumes that all queued
+ // packets were not processed. Once the error condition is resolved (e.g. by
+ // adding a new decryption key), the client should start a new stream and
+ // resend all failed packets, which is achieved by calling Unpause()
void ResetPositionAndPause();
- // Normally this should be called after restarting a stream in a
- // StreamProcessor.
+ // Resumes sending packets on stream that was previously paused with
+ // ResetPositionAndPause(). Should be called after starting a new stream in
+ // the StreamProcessor (e.g. by calling StreamProcessorHelper::Reset()).
void Unpause();
// Number of buffers in the sysmem collection or 0 if sysmem buffers has not
diff --git a/chromium/media/fuchsia/media_unittests.cmx b/chromium/media/fuchsia/media_unittests.test-cmx
index 923550a61f5..923550a61f5 100644
--- a/chromium/media/fuchsia/media_unittests.cmx
+++ b/chromium/media/fuchsia/media_unittests.test-cmx
diff --git a/chromium/media/gpu/BUILD.gn b/chromium/media/gpu/BUILD.gn
index e3d961939c3..c82217f283b 100644
--- a/chromium/media/gpu/BUILD.gn
+++ b/chromium/media/gpu/BUILD.gn
@@ -3,6 +3,7 @@
# found in the LICENSE file.
import("//build/buildflag_header.gni")
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//gpu/vulkan/features.gni")
@@ -16,6 +17,7 @@ buildflag_header("buildflags") {
flags = [
"USE_VAAPI=$use_vaapi",
+ "USE_VAAPI_IMAGE_CODECS=$use_vaapi_image_codecs",
"USE_V4L2_CODEC=$use_v4l2_codec",
"USE_LIBV4L2=$use_v4lplugin",
]
@@ -27,22 +29,22 @@ component("gpu") {
# Only local test code, GPU-related IPC code in the media layer, and
# media-related content code should access //media/gpu.
visibility = [
+ ":*",
"//chrome/gpu",
"//chromecast/*",
"//components/arc/mojom:media",
"//components/arc/mojom:media_mojolpm",
"//components/arc/video_accelerator:common",
- "//components/mirroring/service:mirroring_service",
"//components/chromeos_camera/*",
+ "//components/mirroring/service:mirroring_service",
"//components/viz/service/main",
"//content/gpu:*",
"//content/renderer:*",
"//media/gpu/ipc/*",
- "//media/gpu/vaapi/*",
"//media/gpu/test/*",
+ "//media/gpu/vaapi/*",
"//media/mojo/*",
"//remoting/codec:encoder",
- ":*",
]
if (is_mac) {
@@ -268,6 +270,15 @@ source_set("common") {
"vp9_reference_frame_vector.h",
]
+ if (proprietary_codecs && enable_platform_hevc) {
+ sources += [
+ "h265_decoder.cc",
+ "h265_decoder.h",
+ "h265_dpb.cc",
+ "h265_dpb.h",
+ ]
+ }
+
visibility = [
":gpu",
"//media/gpu/*",
@@ -289,6 +300,7 @@ source_set("common") {
deps = [
":buildflags",
"//base",
+ "//build:chromeos_buildflags",
"//media",
"//media/parsers",
"//ui/gfx:buffer_types",
@@ -437,6 +449,7 @@ if (use_v4l2_codec || use_vaapi || is_mac || is_win) {
"test:helpers",
"//base",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//media:test_support",
"//media/gpu",
"//media/gpu/test:test_helpers",
@@ -494,8 +507,10 @@ source_set("unit_tests") {
"//ui/gl:test_support",
]
sources = [ "h264_decoder_unittest.cc" ]
-
- if (use_v4l2_codec || use_vaapi) {
+ if (proprietary_codecs && enable_platform_hevc) {
+ sources += [ "h265_decoder_unittest.cc" ]
+ }
+ if (is_ash && (use_v4l2_codec || use_vaapi)) {
deps += [ "//media/gpu/chromeos:unit_tests" ]
}
if (use_vaapi) {
@@ -513,6 +528,7 @@ source_set("unit_tests") {
sources += [
"windows/d3d11_copying_texture_wrapper_unittest.cc",
"windows/d3d11_decoder_configurator_unittest.cc",
+ "windows/d3d11_picture_buffer_unittest.cc",
"windows/d3d11_texture_selector_unittest.cc",
"windows/d3d11_texture_wrapper_unittest.cc",
"windows/d3d11_video_decoder_unittest.cc",
@@ -592,3 +608,14 @@ if (use_v4l2_codec || use_vaapi) {
]
}
}
+
+if (proprietary_codecs && enable_platform_hevc) {
+ fuzzer_test("media_h265_decoder_fuzzer") {
+ sources = [ "h265_decoder_fuzzertest.cc" ]
+ deps = [
+ ":gpu",
+ "//base",
+ "//media",
+ ]
+ }
+}
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
index 872ec8d52f3..fb0f8010f23 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl.cc
@@ -213,19 +213,20 @@ void AndroidVideoSurfaceChooserImpl::SwitchToOverlay(
// We bind all of our callbacks with weak ptrs, since we don't know how long
// the client will hold on to overlays. They could, in principle, show up
// long after the client is destroyed too, if codec destruction hangs.
- config.ready_cb = base::Bind(&AndroidVideoSurfaceChooserImpl::OnOverlayReady,
- weak_factory_.GetWeakPtr());
+ config.ready_cb =
+ base::BindOnce(&AndroidVideoSurfaceChooserImpl::OnOverlayReady,
+ weak_factory_.GetWeakPtr());
config.failed_cb =
- base::Bind(&AndroidVideoSurfaceChooserImpl::OnOverlayFailed,
- weak_factory_.GetWeakPtr());
+ base::BindOnce(&AndroidVideoSurfaceChooserImpl::OnOverlayFailed,
+ weak_factory_.GetWeakPtr());
config.rect = current_state_.initial_position;
config.secure = current_state_.is_secure;
// Request power efficient overlays and callbacks if we're supposed to.
config.power_efficient = needs_power_efficient;
- config.power_cb =
- base::Bind(&AndroidVideoSurfaceChooserImpl::OnPowerEfficientState,
- weak_factory_.GetWeakPtr());
+ config.power_cb = base::BindRepeating(
+ &AndroidVideoSurfaceChooserImpl::OnPowerEfficientState,
+ weak_factory_.GetWeakPtr());
overlay_ = overlay_factory_.Run(std::move(config));
if (!overlay_)
diff --git a/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc b/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
index 9f5eff7e972..d3de47e086c 100644
--- a/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
+++ b/chromium/media/gpu/android/android_video_surface_chooser_impl_unittest.cc
@@ -189,11 +189,13 @@ class AndroidVideoSurfaceChooserImplTest
std::unique_ptr<MockAndroidOverlay> overlay) {
Factory* factory = new Factory(
std::move(overlay),
- base::Bind(&AndroidVideoSurfaceChooserImplTest::MockOnOverlayCreated,
- base::Unretained(this)));
+ base::BindRepeating(
+ &AndroidVideoSurfaceChooserImplTest::MockOnOverlayCreated,
+ base::Unretained(this)));
// Leaky!
- return base::Bind(&Factory::ReturnOverlay, base::Unretained(factory));
+ return base::BindRepeating(&Factory::ReturnOverlay,
+ base::Unretained(factory));
}
// Called by the factory when it's run.
diff --git a/chromium/media/gpu/android/codec_allocator.cc b/chromium/media/gpu/android/codec_allocator.cc
index ad822d7b76c..724c54a4fff 100644
--- a/chromium/media/gpu/android/codec_allocator.cc
+++ b/chromium/media/gpu/android/codec_allocator.cc
@@ -9,7 +9,7 @@
#include <algorithm>
#include <memory>
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/task/post_task.h"
#include "base/task/task_traits.h"
diff --git a/chromium/media/gpu/android/codec_allocator_unittest.cc b/chromium/media/gpu/android/codec_allocator_unittest.cc
index 439a310fcd4..f9568c33ca9 100644
--- a/chromium/media/gpu/android/codec_allocator_unittest.cc
+++ b/chromium/media/gpu/android/codec_allocator_unittest.cc
@@ -9,7 +9,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/check.h"
#include "base/single_thread_task_runner.h"
#include "base/test/simple_test_tick_clock.h"
diff --git a/chromium/media/gpu/android/codec_image.cc b/chromium/media/gpu/android/codec_image.cc
index 00e047b8141..3721ceb5f8e 100644
--- a/chromium/media/gpu/android/codec_image.cc
+++ b/chromium/media/gpu/android/codec_image.cc
@@ -9,7 +9,7 @@
#include <memory>
#include "base/android/scoped_hardware_buffer_fence_sync.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/command_buffer/service/texture_manager.h"
#include "ui/gl/gl_context.h"
diff --git a/chromium/media/gpu/android/codec_image_unittest.cc b/chromium/media/gpu/android/codec_image_unittest.cc
index 87496e335c9..bc0b5e4c27f 100644
--- a/chromium/media/gpu/android/codec_image_unittest.cc
+++ b/chromium/media/gpu/android/codec_image_unittest.cc
@@ -5,7 +5,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/test/mock_callback.h"
#include "base/test/task_environment.h"
#include "base/threading/sequenced_task_runner_handle.h"
diff --git a/chromium/media/gpu/android/codec_output_buffer_renderer.cc b/chromium/media/gpu/android/codec_output_buffer_renderer.cc
index ed08fb6018e..9beaf756533 100644
--- a/chromium/media/gpu/android/codec_output_buffer_renderer.cc
+++ b/chromium/media/gpu/android/codec_output_buffer_renderer.cc
@@ -6,7 +6,7 @@
#include <string.h>
#include "base/android/scoped_hardware_buffer_fence_sync.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/optional.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#include "gpu/command_buffer/service/texture_manager.h"
diff --git a/chromium/media/gpu/android/direct_shared_image_video_provider.cc b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
index b7daff68eb5..2a700882f09 100644
--- a/chromium/media/gpu/android/direct_shared_image_video_provider.cc
+++ b/chromium/media/gpu/android/direct_shared_image_video_provider.cc
@@ -7,8 +7,8 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/memory/ref_counted.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
diff --git a/chromium/media/gpu/android/frame_info_helper_unittest.cc b/chromium/media/gpu/android/frame_info_helper_unittest.cc
index ce167cbeed2..94dc6b5fa09 100644
--- a/chromium/media/gpu/android/frame_info_helper_unittest.cc
+++ b/chromium/media/gpu/android/frame_info_helper_unittest.cc
@@ -4,7 +4,7 @@
#include "media/gpu/android/frame_info_helper.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "gpu/command_buffer/service/mock_texture_owner.h"
@@ -25,7 +25,7 @@ constexpr gfx::Size kTestCodedSize(128, 128);
std::unique_ptr<FrameInfoHelper> CreateHelper() {
auto task_runner = base::ThreadTaskRunnerHandle::Get();
auto get_stub_cb =
- base::Bind([]() -> gpu::CommandBufferStub* { return nullptr; });
+ base::BindRepeating([]() -> gpu::CommandBufferStub* { return nullptr; });
return FrameInfoHelper::Create(std::move(task_runner),
std::move(get_stub_cb));
}
diff --git a/chromium/media/gpu/android/media_codec_video_decoder.cc b/chromium/media/gpu/android/media_codec_video_decoder.cc
index cbc58254c07..686357a8891 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder.cc
@@ -8,11 +8,11 @@
#include "base/android/build_info.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
#include "base/logging.h"
+#include "base/memory/ptr_util.h"
#include "base/memory/weak_ptr.h"
#include "base/metrics/histogram_macros.h"
#include "base/threading/sequenced_task_runner_handle.h"
@@ -232,10 +232,10 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
base::FeatureList::IsEnabled(media::kAllowNonSecureOverlays)) {
DVLOG(2) << __func__;
surface_chooser_helper_.chooser()->SetClientCallbacks(
- base::Bind(&MediaCodecVideoDecoder::OnSurfaceChosen,
- weak_factory_.GetWeakPtr()),
- base::Bind(&MediaCodecVideoDecoder::OnSurfaceChosen,
- weak_factory_.GetWeakPtr(), nullptr));
+ base::BindRepeating(&MediaCodecVideoDecoder::OnSurfaceChosen,
+ weak_factory_.GetWeakPtr()),
+ base::BindRepeating(&MediaCodecVideoDecoder::OnSurfaceChosen,
+ weak_factory_.GetWeakPtr(), nullptr));
}
std::unique_ptr<VideoDecoder> MediaCodecVideoDecoder::Create(
@@ -500,9 +500,9 @@ void MediaCodecVideoDecoder::StartLazyInit() {
overlay_mode = VideoFrameFactory::OverlayMode::kDontRequestPromotionHints;
video_frame_factory_->Initialize(
- overlay_mode,
- base::Bind(&MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized,
- weak_factory_.GetWeakPtr()));
+ overlay_mode, base::BindRepeating(
+ &MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized,
+ weak_factory_.GetWeakPtr()));
}
void MediaCodecVideoDecoder::OnVideoFrameFactoryInitialized(
@@ -815,9 +815,10 @@ void MediaCodecVideoDecoder::StartTimerOrPumpCodec() {
// at this frequency is likely overkill in the steady state.
const auto kPollingPeriod = base::TimeDelta::FromMilliseconds(10);
if (!pump_codec_timer_.IsRunning()) {
- pump_codec_timer_.Start(FROM_HERE, kPollingPeriod,
- base::Bind(&MediaCodecVideoDecoder::PumpCodec,
- base::Unretained(this), false));
+ pump_codec_timer_.Start(
+ FROM_HERE, kPollingPeriod,
+ base::BindRepeating(&MediaCodecVideoDecoder::PumpCodec,
+ base::Unretained(this), false));
}
}
diff --git a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
index 190cceeded7..0fc5de42bdf 100644
--- a/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
+++ b/chromium/media/gpu/android/media_codec_video_decoder_unittest.cc
@@ -6,7 +6,7 @@
#include "base/android/jni_android.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
@@ -963,7 +963,7 @@ TEST_P(MediaCodecVideoDecoderVp9Test, ColorSpaceIsIncludedInCodecConfig) {
TEST_P(MediaCodecVideoDecoderVp9Test, HdrMetadataIsIncludedInCodecConfig) {
VideoDecoderConfig config = TestVideoConfig::Normal(kCodecVP9);
- gl::HDRMetadata hdr_metadata;
+ gfx::HDRMetadata hdr_metadata;
hdr_metadata.max_frame_average_light_level = 123;
hdr_metadata.max_content_light_level = 456;
hdr_metadata.mastering_metadata.primary_r.set_x(0.1f);
diff --git a/chromium/media/gpu/android/video_frame_factory_impl.cc b/chromium/media/gpu/android/video_frame_factory_impl.cc
index 70b66984f11..6531d808624 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl.cc
@@ -8,8 +8,8 @@
#include "base/android/android_image_reader_compat.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
@@ -43,22 +43,16 @@ base::Optional<VideoFrameMetadata::CopyMode> GetVideoFrameCopyMode(
if (!enable_threaded_texture_mailboxes)
return base::nullopt;
- if (features::IsAImageReaderEnabled() &&
- base::FeatureList::IsEnabled(media::kWebViewZeroCopyVideo) &&
- !base::android::AndroidImageReader::LimitAImageReaderMaxSizeToOne()) {
- return VideoFrameMetadata::CopyMode::kCopyMailboxesOnly;
- } else {
- return VideoFrameMetadata::CopyMode::kCopyToNewTexture;
- }
+ return features::IsWebViewZeroCopyVideoEnabled()
+ ? VideoFrameMetadata::CopyMode::kCopyMailboxesOnly
+ : VideoFrameMetadata::CopyMode::kCopyToNewTexture;
}
gpu::TextureOwner::Mode GetTextureOwnerMode(
VideoFrameFactory::OverlayMode overlay_mode,
const base::Optional<VideoFrameMetadata::CopyMode>& copy_mode) {
if (copy_mode == VideoFrameMetadata::kCopyMailboxesOnly) {
- DCHECK(features::IsAImageReaderEnabled() &&
- base::FeatureList::IsEnabled(media::kWebViewZeroCopyVideo) &&
- !base::android::AndroidImageReader::LimitAImageReaderMaxSizeToOne());
+ DCHECK(features::IsWebViewZeroCopyVideoEnabled());
return gpu::TextureOwner::Mode::kAImageReaderInsecureMultithreaded;
}
diff --git a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
index 13231efe252..11c06ac2a2f 100644
--- a/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
+++ b/chromium/media/gpu/android/video_frame_factory_impl_unittest.cc
@@ -5,7 +5,7 @@
#include "media/gpu/android/video_frame_factory_impl.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/single_thread_task_runner.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
diff --git a/chromium/media/gpu/args.gni b/chromium/media/gpu/args.gni
index 2a87b9980f7..e61e9567d85 100644
--- a/chromium/media/gpu/args.gni
+++ b/chromium/media/gpu/args.gni
@@ -2,6 +2,9 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chromeos/ui_mode.gni")
+import("//build/config/ui.gni")
+
declare_args() {
# Indicates if V4L plugin is used.
use_v4lplugin = false
@@ -16,5 +19,21 @@ declare_args() {
# Indicates if VA-API-based hardware acceleration is to be used. This
# is typically the case on x86-based ChromeOS devices.
- use_vaapi = false
+ # VA-API should also be compiled by default on x11-using linux devices
+ # using x86/x64.
+ use_vaapi =
+ is_linux && use_x11 && (current_cpu == "x86" || current_cpu == "x64")
+
+ # Indicates if ChromeOS protected media support exists. This is used
+ # to enable the CDM daemon in Chrome OS as well as support for
+ # encrypted content with HW video decoders.
+ use_chromeos_protected_media = false
+}
+
+# GN requires args that depend on other args to be declared in successive
+# declare_args() blocks.
+declare_args() {
+ # VA-API also allows decoding of images, but we don't want to use this
+ # outside of chromeos, even if video decoding is enabled.
+ use_vaapi_image_codecs = use_vaapi && is_ash
}
diff --git a/chromium/media/gpu/chromeos/BUILD.gn b/chromium/media/gpu/chromeos/BUILD.gn
index 633a3c74e1e..d3f918b9552 100644
--- a/chromium/media/gpu/chromeos/BUILD.gn
+++ b/chromium/media/gpu/chromeos/BUILD.gn
@@ -161,6 +161,7 @@ test("image_processor_test") {
deps = [
":chromeos",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//media:test_support",
"//media/gpu:buildflags",
"//media/gpu/test:frame_file_writer",
diff --git a/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h b/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h
index 92aa1d63484..b874b62005f 100644
--- a/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h
+++ b/chromium/media/gpu/chromeos/dmabuf_video_frame_pool.h
@@ -47,7 +47,8 @@ class MEDIA_GPU_EXPORT DmabufVideoFramePool {
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- size_t max_num_frames) = 0;
+ size_t max_num_frames,
+ bool use_protected) = 0;
// Returns a frame from the pool with the parameters assigned by
// SetFrameFormat() and zero timestamp. Returns nullptr if the pool is
diff --git a/chromium/media/gpu/chromeos/fourcc.h b/chromium/media/gpu/chromeos/fourcc.h
index 652f203e02a..f9e5d691174 100644
--- a/chromium/media/gpu/chromeos/fourcc.h
+++ b/chromium/media/gpu/chromeos/fourcc.h
@@ -31,7 +31,7 @@ class MEDIA_GPU_EXPORT Fourcc {
public:
enum Value : uint32_t {
// RGB formats.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-rgb.html
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-rgb.html
// Maps to PIXEL_FORMAT_ARGB, V4L2_PIX_FMT_ABGR32, VA_FOURCC_BGRA.
// 32bpp BGRA (byte-order), 1 plane.
AR24 = ComposeFourcc('A', 'R', '2', '4'),
@@ -56,7 +56,7 @@ class MEDIA_GPU_EXPORT Fourcc {
RGB4 = ComposeFourcc('R', 'G', 'B', '4'),
// YUV420 single-planar formats.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-yuv420.html
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-yuv420.html
// Maps to PIXEL_FORMAT_I420, V4L2_PIX_FMT_YUV420, VA_FOURCC_I420.
// 12bpp YUV planar 1x1 Y, 2x2 UV samples.
YU12 = ComposeFourcc('Y', 'U', '1', '2'),
@@ -65,20 +65,20 @@ class MEDIA_GPU_EXPORT Fourcc {
YV12 = ComposeFourcc('Y', 'V', '1', '2'),
// YUV420 multi-planar format.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-yuv420m.htm
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-yuv420m.html
// Maps to PIXEL_FORMAT_I420, V4L2_PIX_FMT_YUV420M.
YM12 = ComposeFourcc('Y', 'M', '1', '2'),
// Maps to PIXEL_FORMAT_YV12, V4L2_PIX_FMT_YVU420M.
YM21 = ComposeFourcc('Y', 'M', '2', '1'),
// YUYV format.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-yuyv.html
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-yuyv.html
// Maps to PIXEL_FORMAT_YUY2, V4L2_PIX_FMT_YUYV, VA_FOURCC_YUY2.
// 16bpp YUV planar (YUV 4:2:2), YUYV (byte-order), 1 plane.
YUYV = ComposeFourcc('Y', 'U', 'Y', 'V'),
// NV12 single-planar format.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-nv12.html
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-nv12.html
// Maps to PIXEL_FORMAT_NV12, V4L2_PIX_FMT_NV12, VA_FOURCC_NV12.
// 12bpp with Y plane followed by a 2x2 interleaved UV plane.
NV12 = ComposeFourcc('N', 'V', '1', '2'),
@@ -87,20 +87,20 @@ class MEDIA_GPU_EXPORT Fourcc {
NV21 = ComposeFourcc('N', 'V', '2', '1'),
// NV12 multi-planar format.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-nv12m.html
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-nv12m.html
// Maps to PIXEL_FORMAT_NV12, V4L2_PIX_FMT_NV12M,
NM12 = ComposeFourcc('N', 'M', '1', '2'),
// Maps to PIXEL_FORMAT_NV21, V4L2_PIX_FMT_NV21M.
NM21 = ComposeFourcc('N', 'M', '2', '1'),
// YUV422 multi-planar format.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-yuv422m.html
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-yuv422m.html
// Maps to PIXEL_FORMAT_I422, V4L2_PIX_FMT_YUV422M
// 16bpp YUV planar 1x1 Y, 2x1 UV samples.
YM16 = ComposeFourcc('Y', 'M', '1', '6'),
// V4L2 proprietary format.
- // https://linuxtv.org/downloads/v4l-dvb-apis/uapi/v4l/pixfmt-reserved.html
+ // https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/pixfmt-reserved.html
// Maps to V4L2_PIX_FMT_MT21C.
// It is used for MT8173 hardware video decoder output and should be
// converted by MT8173 image processor for compositor to render.
diff --git a/chromium/media/gpu/chromeos/image_processor.cc b/chromium/media/gpu/chromeos/image_processor.cc
index c3227c88154..6b3dccc35e6 100644
--- a/chromium/media/gpu/chromeos/image_processor.cc
+++ b/chromium/media/gpu/chromeos/image_processor.cc
@@ -9,7 +9,7 @@
#include <sstream>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
#include "base/task/post_task.h"
#include "base/task/task_traits.h"
@@ -21,12 +21,6 @@ namespace media {
namespace {
-std::ostream& operator<<(std::ostream& ostream,
- const VideoFrame::StorageType& storage_type) {
- ostream << VideoFrame::StorageTypeToString(storage_type);
- return ostream;
-}
-
// Verify if the format of |frame| matches |config|.
bool CheckVideoFrameFormat(const ImageProcessor::PortConfig& config,
const VideoFrame& frame) {
@@ -45,12 +39,6 @@ bool CheckVideoFrameFormat(const ImageProcessor::PortConfig& config,
return false;
}
- if (frame.storage_type() != config.storage_type()) {
- VLOGF(1) << "Invalid frame.storage_type=" << frame.storage_type()
- << ", input_storage_type=" << config.storage_type();
- return false;
- }
-
return true;
}
diff --git a/chromium/media/gpu/chromeos/image_processor_test.cc b/chromium/media/gpu/chromeos/image_processor_test.cc
index 84c3535203b..c13ee59f2d3 100644
--- a/chromium/media/gpu/chromeos/image_processor_test.cc
+++ b/chromium/media/gpu/chromeos/image_processor_test.cc
@@ -12,6 +12,7 @@
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/test_suite.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/video_frame.h"
#include "media/base/video_frame_layout.h"
#include "media/base/video_types.h"
@@ -222,7 +223,7 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_MemToMem) {
EXPECT_TRUE(ip_client->WaitForFrameProcessors());
}
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
// We don't yet have the function to create Dmabuf-backed VideoFrame on
// platforms except ChromeOS. So MemToDmabuf test is limited on ChromeOS.
TEST_P(ImageProcessorParamTest, ConvertOneTime_DmabufToMem) {
@@ -286,7 +287,7 @@ TEST_P(ImageProcessorParamTest, ConvertOneTime_GmbToGmb) {
EXPECT_EQ(ip_client->GetNumOfProcessedImages(), 1u);
EXPECT_TRUE(ip_client->WaitForFrameProcessors());
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
// BGRA -> NV12
// I420 -> NV12
@@ -334,7 +335,7 @@ INSTANTIATE_TEST_SUITE_P(
std::make_tuple(kNV12Image180, kNV12Image90),
std::make_tuple(kNV12Image180, kNV12Image)));
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
// TODO(hiroh): Add more tests.
// MEM->DMABUF (V4L2VideoEncodeAccelerator),
#endif
diff --git a/chromium/media/gpu/chromeos/image_processor_with_pool.cc b/chromium/media/gpu/chromeos/image_processor_with_pool.cc
index b070ab0057e..2c03db29ef3 100644
--- a/chromium/media/gpu/chromeos/image_processor_with_pool.cc
+++ b/chromium/media/gpu/chromeos/image_processor_with_pool.cc
@@ -19,7 +19,8 @@ std::unique_ptr<ImageProcessorWithPool> ImageProcessorWithPool::Create(
const scoped_refptr<base::SequencedTaskRunner> task_runner) {
const ImageProcessor::PortConfig& config = image_processor->output_config();
base::Optional<GpuBufferLayout> layout = frame_pool->Initialize(
- config.fourcc, config.size, config.visible_rect, config.size, num_frames);
+ config.fourcc, config.size, config.visible_rect, config.size, num_frames,
+ /*use_protected=*/false);
if (!layout || layout->size() != config.size) {
VLOGF(1) << "Failed to request frame with correct size. "
<< config.size.ToString() << " != "
diff --git a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
index b941310d99e..515146c48ba 100644
--- a/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
+++ b/chromium/media/gpu/chromeos/mailbox_video_frame_converter.cc
@@ -5,7 +5,7 @@
#include "media/gpu/chromeos/mailbox_video_frame_converter.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/memory/ptr_util.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
index 9fd415c4252..5c1cdd7792c 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool.cc
@@ -13,6 +13,7 @@
#include "media/gpu/chromeos/gpu_buffer_layout.h"
#include "media/gpu/chromeos/platform_video_frame_utils.h"
#include "media/gpu/macros.h"
+#include "media/media_buildflags.h"
namespace media {
@@ -25,10 +26,20 @@ scoped_refptr<VideoFrame> DefaultCreateFrame(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
+ bool use_protected,
base::TimeDelta timestamp) {
- return CreateGpuMemoryBufferVideoFrame(
+ scoped_refptr<VideoFrame> frame = CreateGpuMemoryBufferVideoFrame(
gpu_memory_buffer_factory, format, coded_size, visible_rect, natural_size,
- timestamp, gfx::BufferUsage::SCANOUT_VDA_WRITE);
+ timestamp,
+ use_protected ? gfx::BufferUsage::PROTECTED_SCANOUT_VDA_WRITE
+ : gfx::BufferUsage::SCANOUT_VDA_WRITE);
+ if (frame && use_protected) {
+ media::VideoFrameMetadata frame_metadata;
+ frame_metadata.protected_video = true;
+ frame_metadata.hw_protected = true;
+ frame->set_metadata(frame_metadata);
+ }
+ return frame;
}
} // namespace
@@ -89,7 +100,7 @@ scoped_refptr<VideoFrame> PlatformVideoFramePool::GetFrame() {
scoped_refptr<VideoFrame> new_frame =
create_frame_cb_.Run(gpu_memory_buffer_factory_, format, coded_size,
gfx::Rect(GetRectSizeFromOrigin(visible_rect_)),
- coded_size, base::TimeDelta());
+ coded_size, use_protected_, base::TimeDelta());
if (!new_frame)
return nullptr;
@@ -114,6 +125,16 @@ scoped_refptr<VideoFrame> PlatformVideoFramePool::GetFrame() {
// Clear all metadata before returning to client, in case origin frame has any
// unrelated metadata.
wrapped_frame->clear_metadata();
+
+ // We need to put this metadata in the wrapped frame if we are in protected
+ // mode.
+ if (use_protected_) {
+ media::VideoFrameMetadata frame_metadata;
+ frame_metadata.protected_video = true;
+ frame_metadata.hw_protected = true;
+ wrapped_frame->set_metadata(frame_metadata);
+ }
+
return wrapped_frame;
}
@@ -122,7 +143,8 @@ base::Optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- size_t max_num_frames) {
+ size_t max_num_frames,
+ bool use_protected) {
DVLOGF(4);
base::AutoLock auto_lock(lock_);
@@ -133,6 +155,13 @@ base::Optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
return base::nullopt;
}
+#if !BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+ if (use_protected) {
+ VLOGF(1) << "Protected buffers unsupported";
+ return base::nullopt;
+ }
+#endif
+
// If the frame layout changed we need to allocate new frames so we will clear
// the pool here. If only the visible rect or natural size changed, we don't
// need to allocate new frames (unless the change in the visible rect causes a
@@ -145,15 +174,15 @@ base::Optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
// hardware overlay purposes. The caveat is that different visible rectangles
// can map to the same framebuffer size, i.e., all the visible rectangles with
// the same bottom-right corner map to the same framebuffer size.
- if (!IsSameFormat_Locked(format, coded_size, visible_rect)) {
+ if (!IsSameFormat_Locked(format, coded_size, visible_rect, use_protected)) {
DVLOGF(4) << "The video frame format is changed. Clearing the pool.";
free_frames_.clear();
// Create a temporary frame in order to know VideoFrameLayout that
// VideoFrame that will be allocated in GetFrame() has.
- auto frame =
- create_frame_cb_.Run(gpu_memory_buffer_factory_, format, coded_size,
- visible_rect, natural_size, base::TimeDelta());
+ auto frame = create_frame_cb_.Run(gpu_memory_buffer_factory_, format,
+ coded_size, visible_rect, natural_size,
+ use_protected, base::TimeDelta());
if (!frame) {
VLOGF(1) << "Failed to create video frame " << format << " (fourcc "
<< fourcc.ToString() << ")";
@@ -167,6 +196,7 @@ base::Optional<GpuBufferLayout> PlatformVideoFramePool::Initialize(
visible_rect_ = visible_rect;
natural_size_ = natural_size;
max_num_frames_ = max_num_frames;
+ use_protected_ = use_protected;
// The pool might become available because of |max_num_frames_| increased.
// Notify the client if so.
@@ -236,7 +266,8 @@ void PlatformVideoFramePool::OnFrameReleased(
frames_in_use_.erase(it);
if (IsSameFormat_Locked(origin_frame->format(), origin_frame->coded_size(),
- origin_frame->visible_rect())) {
+ origin_frame->visible_rect(),
+ origin_frame->metadata()->hw_protected)) {
InsertFreeFrame_Locked(std::move(origin_frame));
}
@@ -261,10 +292,10 @@ size_t PlatformVideoFramePool::GetTotalNumFrames_Locked() const {
return free_frames_.size() + frames_in_use_.size();
}
-bool PlatformVideoFramePool::IsSameFormat_Locked(
- VideoPixelFormat format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect) const {
+bool PlatformVideoFramePool::IsSameFormat_Locked(VideoPixelFormat format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ bool use_protected) const {
DVLOGF(4);
lock_.AssertAcquired();
@@ -272,7 +303,8 @@ bool PlatformVideoFramePool::IsSameFormat_Locked(
frame_layout_->fourcc().ToVideoPixelFormat() == format &&
frame_layout_->size() == coded_size &&
GetRectSizeFromOrigin(visible_rect_) ==
- GetRectSizeFromOrigin(visible_rect);
+ GetRectSizeFromOrigin(visible_rect) &&
+ use_protected_ == use_protected;
}
size_t PlatformVideoFramePool::GetPoolSizeForTesting() {
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool.h b/chromium/media/gpu/chromeos/platform_video_frame_pool.h
index 4406b8f5cb6..ed8c875baa5 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool.h
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool.h
@@ -52,7 +52,8 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- size_t max_num_frames) override;
+ size_t max_num_frames,
+ bool use_protected) override;
scoped_refptr<VideoFrame> GetFrame() override;
bool IsExhausted() override;
void NotifyWhenFrameAvailable(base::OnceClosure cb) override;
@@ -87,7 +88,8 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
size_t GetTotalNumFrames_Locked() const EXCLUSIVE_LOCKS_REQUIRED(lock_);
bool IsSameFormat_Locked(VideoPixelFormat format,
const gfx::Size& coded_size,
- const gfx::Rect& visible_rect) const
+ const gfx::Rect& visible_rect,
+ bool use_protected) const
EXCLUSIVE_LOCKS_REQUIRED(lock_);
bool IsExhausted_Locked() EXCLUSIVE_LOCKS_REQUIRED(lock_);
@@ -98,6 +100,7 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
+ bool use_protected,
base::TimeDelta timestamp)>;
CreateFrameCB create_frame_cb_;
@@ -128,6 +131,9 @@ class MEDIA_GPU_EXPORT PlatformVideoFramePool : public DmabufVideoFramePool {
// The maximum number of frames created by the pool.
size_t max_num_frames_ GUARDED_BY(lock_) = 0;
+ // If we are using HW protected buffers.
+ bool use_protected_ GUARDED_BY(lock_) = false;
+
// Callback which is called when the pool is not exhausted.
base::OnceClosure frame_available_cb_ GUARDED_BY(lock_);
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc b/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
index f25359cb1e2..fbb9f6589ac 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_pool_unittest.cc
@@ -9,7 +9,7 @@
#include <memory>
#include <vector>
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
@@ -30,6 +30,7 @@ scoped_refptr<VideoFrame> CreateGpuMemoryBufferVideoFrame(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
+ bool use_protected,
base::TimeDelta timestamp) {
base::Optional<gfx::BufferFormat> gfx_format =
VideoPixelFormatToGfxBufferFormat(format);
@@ -68,7 +69,8 @@ class PlatformVideoFramePoolTest
visible_rect_ = visible_rect;
natural_size_ = visible_rect.size();
layout_ = pool_->Initialize(fourcc, coded_size, visible_rect_,
- natural_size_, kNumFrames);
+ natural_size_, kNumFrames,
+ /*use_protected=*/false);
return !!layout_;
}
@@ -288,7 +290,8 @@ TEST_P(PlatformVideoFramePoolTest, InitializeFail) {
SetCreateFrameCB(base::BindRepeating(
[](gpu::GpuMemoryBufferFactory* factory, VideoPixelFormat format,
const gfx::Size& coded_size, const gfx::Rect& visible_rect,
- const gfx::Size& natural_size, base::TimeDelta timestamp) {
+ const gfx::Size& natural_size, bool use_protected,
+ base::TimeDelta timestamp) {
auto frame = scoped_refptr<VideoFrame>(nullptr);
return frame;
}));
diff --git a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
index 3a70bda71e3..ce31a4e5454 100644
--- a/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
+++ b/chromium/media/gpu/chromeos/platform_video_frame_utils.cc
@@ -10,7 +10,6 @@
#include <limits>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/files/file.h"
#include "base/files/file_path.h"
diff --git a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
index fb085cba78d..4b5ca97fd64 100644
--- a/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
+++ b/chromium/media/gpu/chromeos/vd_video_decode_accelerator.cc
@@ -291,6 +291,12 @@ void VdVideoDecodeAccelerator::RequestFrames(
notify_layout_changed_cb_ = std::move(notify_layout_changed_cb);
import_frame_cb_ = std::move(import_frame_cb);
+ // Stop tracking currently-allocated pictures, otherwise the count will be
+ // corrupted as we import new frames with the same IDs as the old ones.
+ // The client should still have its own reference to the frame data, which
+ // will keep it valid for as long as it needs it.
+ picture_at_client_.clear();
+
// After calling ProvidePictureBuffersWithVisibleRect(), the client might
// still send buffers with old coded size. We temporarily store at
// |pending_coded_size_|.
@@ -375,6 +381,16 @@ void VdVideoDecodeAccelerator::ImportBufferForPicture(
base::BindOnce(&VdVideoDecodeAccelerator::OnFrameReleasedThunk,
weak_this_, client_task_runner_, std::move(origin_frame)));
+ // This should not happen - picture_at_client_ should either be initially
+ // empty, or be cleared as RequestFrames() is called. However for extra safety
+ // let's make sure the slot for the picture buffer ID is free, otherwise we
+ // might lose track of the reference count and keep frames out of the pool
+ // forever.
+ if (picture_at_client_.erase(picture_buffer_id) > 0) {
+ VLOGF(1) << "Picture " << picture_buffer_id
+ << " still referenced, dropping it.";
+ }
+
DCHECK(import_frame_cb_);
import_frame_cb_.Run(std::move(wrapped_frame));
}
diff --git a/chromium/media/gpu/chromeos/vda_video_frame_pool.cc b/chromium/media/gpu/chromeos/vda_video_frame_pool.cc
index 076b25f7798..f7ba7977be4 100644
--- a/chromium/media/gpu/chromeos/vda_video_frame_pool.cc
+++ b/chromium/media/gpu/chromeos/vda_video_frame_pool.cc
@@ -34,10 +34,16 @@ base::Optional<GpuBufferLayout> VdaVideoFramePool::Initialize(
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- size_t max_num_frames) {
+ size_t max_num_frames,
+ bool use_protected) {
DVLOGF(3);
DCHECK_CALLED_ON_VALID_SEQUENCE(parent_sequence_checker_);
+ if (use_protected) {
+ LOG(ERROR) << "Cannot allocated protected buffers for VDA";
+ return base::nullopt;
+ }
+
visible_rect_ = visible_rect;
natural_size_ = natural_size;
@@ -46,6 +52,12 @@ base::Optional<GpuBufferLayout> VdaVideoFramePool::Initialize(
DVLOGF(3) << "Arguments related to frame layout are not changed, skip.";
return layout_;
}
+
+ // Invalidate weak pointers so the re-import callbacks of the frames we are
+ // about to stop managing do not run and add them back to us.
+ weak_this_factory_.InvalidateWeakPtrs();
+ weak_this_ = weak_this_factory_.GetWeakPtr();
+
max_num_frames_ = max_num_frames;
fourcc_ = fourcc;
coded_size_ = coded_size;
diff --git a/chromium/media/gpu/chromeos/vda_video_frame_pool.h b/chromium/media/gpu/chromeos/vda_video_frame_pool.h
index d1d426bf7f2..fd2995a9af7 100644
--- a/chromium/media/gpu/chromeos/vda_video_frame_pool.h
+++ b/chromium/media/gpu/chromeos/vda_video_frame_pool.h
@@ -66,7 +66,8 @@ class VdaVideoFramePool : public DmabufVideoFramePool {
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- size_t max_num_frames) override;
+ size_t max_num_frames,
+ bool use_protected) override;
scoped_refptr<VideoFrame> GetFrame() override;
bool IsExhausted() override;
void NotifyWhenFrameAvailable(base::OnceClosure cb) override;
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
index 7c52f3c93bb..8ff5ac44366 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.cc
@@ -22,6 +22,7 @@
#include "media/gpu/chromeos/image_processor_factory.h"
#include "media/gpu/chromeos/platform_video_frame_pool.h"
#include "media/gpu/macros.h"
+#include "media/media_buildflags.h"
namespace media {
namespace {
@@ -198,6 +199,13 @@ void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
std::move(init_cb).Run(StatusCode::kDecoderUnsupportedConfig);
return;
}
+#if BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+ if (config.is_encrypted() && !cdm_context) {
+ VLOGF(1) << "Encrypted streams require a CdmContext";
+ std::move(init_cb).Run(StatusCode::kDecoderUnsupportedConfig);
+ return;
+ }
+#else // BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
if (config.is_encrypted()) {
VLOGF(1) << "Encrypted streams are not supported for this VD";
std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
@@ -208,16 +216,18 @@ void VideoDecoderPipeline::Initialize(const VideoDecoderConfig& config,
std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
return;
}
+#endif // !BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
needs_bitstream_conversion_ = (config.codec() == kCodecH264);
decoder_task_runner_->PostTask(
- FROM_HERE,
- base::BindOnce(&VideoDecoderPipeline::InitializeTask, decoder_weak_this_,
- config, std::move(init_cb), std::move(output_cb)));
+ FROM_HERE, base::BindOnce(&VideoDecoderPipeline::InitializeTask,
+ decoder_weak_this_, config, cdm_context,
+ std::move(init_cb), std::move(output_cb)));
}
void VideoDecoderPipeline::InitializeTask(const VideoDecoderConfig& config,
+ CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb) {
DVLOGF(3);
@@ -232,18 +242,19 @@ void VideoDecoderPipeline::InitializeTask(const VideoDecoderConfig& config,
// resolution. Subsequent initializations are marked by |decoder_| already
// existing.
if (!decoder_) {
- CreateAndInitializeVD(config, Status());
+ CreateAndInitializeVD(config, cdm_context, Status());
} else {
decoder_->Initialize(
- config,
+ config, cdm_context,
base::BindOnce(&VideoDecoderPipeline::OnInitializeDone,
- decoder_weak_this_, config, Status()),
+ decoder_weak_this_, config, cdm_context, Status()),
base::BindRepeating(&VideoDecoderPipeline::OnFrameDecoded,
decoder_weak_this_));
}
}
void VideoDecoderPipeline::CreateAndInitializeVD(VideoDecoderConfig config,
+ CdmContext* cdm_context,
Status parent_error) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
DCHECK(init_cb_);
@@ -269,19 +280,22 @@ void VideoDecoderPipeline::CreateAndInitializeVD(VideoDecoderConfig config,
DVLOGF(2) << "|decoder_| creation failed, trying again with the next "
"available create function.";
return CreateAndInitializeVD(
- config, AppendOrForwardStatus(parent_error,
- StatusCode::kDecoderFailedCreation));
+ config, cdm_context,
+ AppendOrForwardStatus(parent_error,
+ StatusCode::kDecoderFailedCreation));
}
decoder_->Initialize(
- config,
+ config, cdm_context,
base::BindOnce(&VideoDecoderPipeline::OnInitializeDone,
- decoder_weak_this_, config, std::move(parent_error)),
+ decoder_weak_this_, config, cdm_context,
+ std::move(parent_error)),
base::BindRepeating(&VideoDecoderPipeline::OnFrameDecoded,
decoder_weak_this_));
}
void VideoDecoderPipeline::OnInitializeDone(VideoDecoderConfig config,
+ CdmContext* cdm_context,
Status parent_error,
Status status) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
@@ -300,7 +314,7 @@ void VideoDecoderPipeline::OnInitializeDone(VideoDecoderConfig config,
DVLOGF(3) << "|decoder_| initialization failed, trying again with the next "
"available create function.";
decoder_ = nullptr;
- CreateAndInitializeVD(config,
+ CreateAndInitializeVD(config, cdm_context,
AppendOrForwardStatus(parent_error, std::move(status)));
}
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline.h b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
index c3af61afa4e..d286ae2e6b1 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline.h
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline.h
@@ -85,6 +85,7 @@ class MEDIA_GPU_EXPORT DecoderInterface {
// TODO(akahuang): Add an error notification method to handle misused case.
// 4) |init_cb| may be called before this returns.
virtual void Initialize(const VideoDecoderConfig& config,
+ CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb) = 0;
@@ -180,13 +181,17 @@ class MEDIA_GPU_EXPORT VideoDecoderPipeline : public VideoDecoder,
GetCreateDecoderFunctionsCB get_create_decoder_functions_cb);
void InitializeTask(const VideoDecoderConfig& config,
+ CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb);
void ResetTask(base::OnceClosure closure);
void DecodeTask(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb);
- void CreateAndInitializeVD(VideoDecoderConfig config, Status parent_error);
+ void CreateAndInitializeVD(VideoDecoderConfig config,
+ CdmContext* cdm_context,
+ Status parent_error);
void OnInitializeDone(VideoDecoderConfig config,
+ CdmContext* cdm_context,
Status parent_error,
Status status);
diff --git a/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
index b95a52d7e64..f46ef9ea434 100644
--- a/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
+++ b/chromium/media/gpu/chromeos/video_decoder_pipeline_unittest.cc
@@ -5,10 +5,11 @@
#include "media/gpu/chromeos/video_decoder_pipeline.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/task_environment.h"
#include "base/threading/thread_task_runner_handle.h"
+#include "media/base/cdm_context.h"
#include "media/base/media_util.h"
#include "media/base/status.h"
#include "media/base/video_decoder_config.h"
@@ -34,12 +35,13 @@ class MockVideoFramePool : public DmabufVideoFramePool {
~MockVideoFramePool() override = default;
// DmabufVideoFramePool implementation.
- MOCK_METHOD5(Initialize,
+ MOCK_METHOD6(Initialize,
base::Optional<GpuBufferLayout>(const Fourcc&,
const gfx::Size&,
const gfx::Rect&,
const gfx::Size&,
- size_t));
+ size_t,
+ bool));
MOCK_METHOD0(GetFrame, scoped_refptr<VideoFrame>());
MOCK_METHOD0(IsExhausted, bool());
MOCK_METHOD1(NotifyWhenFrameAvailable, void(base::OnceClosure));
@@ -54,8 +56,9 @@ class MockDecoder : public DecoderInterface {
base::WeakPtr<DecoderInterface::Client>(nullptr)) {}
~MockDecoder() override = default;
- MOCK_METHOD3(Initialize,
- void(const VideoDecoderConfig&, InitCB, const OutputCB&));
+ MOCK_METHOD4(
+ Initialize,
+ void(const VideoDecoderConfig&, CdmContext*, InitCB, const OutputCB&));
MOCK_METHOD2(Decode, void(scoped_refptr<DecoderBuffer>, DecodeCB));
MOCK_METHOD1(Reset, void(base::OnceClosure));
MOCK_METHOD0(ApplyResolutionChange, void());
@@ -125,8 +128,8 @@ class VideoDecoderPipelineTest
scoped_refptr<base::SequencedTaskRunner> /* decoder_task_runner */,
base::WeakPtr<DecoderInterface::Client> /* client */) {
std::unique_ptr<MockDecoder> decoder(new MockDecoder());
- EXPECT_CALL(*decoder, Initialize(_, _, _))
- .WillOnce(::testing::WithArgs<1>([](VideoDecoder::InitCB init_cb) {
+ EXPECT_CALL(*decoder, Initialize(_, _, _, _))
+ .WillOnce(::testing::WithArgs<2>([](VideoDecoder::InitCB init_cb) {
std::move(init_cb).Run(OkStatus());
}));
return std::move(decoder);
@@ -137,8 +140,8 @@ class VideoDecoderPipelineTest
scoped_refptr<base::SequencedTaskRunner> /* decoder_task_runner */,
base::WeakPtr<DecoderInterface::Client> /* client */) {
std::unique_ptr<MockDecoder> decoder(new MockDecoder());
- EXPECT_CALL(*decoder, Initialize(_, _, _))
- .WillOnce(::testing::WithArgs<1>([](VideoDecoder::InitCB init_cb) {
+ EXPECT_CALL(*decoder, Initialize(_, _, _, _))
+ .WillOnce(::testing::WithArgs<2>([](VideoDecoder::InitCB init_cb) {
std::move(init_cb).Run(StatusCode::kDecoderFailedInitialization);
}));
return std::move(decoder);
diff --git a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
index a24ee0fe3c4..37cf3a0e089 100644
--- a/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
+++ b/chromium/media/gpu/gpu_video_decode_accelerator_factory.cc
@@ -74,7 +74,7 @@ gpu::VideoDecodeAcceleratorCapabilities GetDecoderCapabilitiesInternal(
#endif
#elif defined(OS_MAC)
capabilities.supported_profiles =
- VTVideoDecodeAccelerator::GetSupportedProfiles();
+ VTVideoDecodeAccelerator::GetSupportedProfiles(workarounds);
#endif
return GpuVideoAcceleratorUtil::ConvertMediaToGpuDecodeCapabilities(
@@ -232,7 +232,8 @@ GpuVideoDecodeAcceleratorFactory::CreateVTVDA(
const gpu::GpuPreferences& gpu_preferences,
MediaLog* media_log) const {
std::unique_ptr<VideoDecodeAccelerator> decoder;
- decoder.reset(new VTVideoDecodeAccelerator(gl_client_, media_log));
+ decoder.reset(
+ new VTVideoDecodeAccelerator(gl_client_, workarounds, media_log));
return decoder;
}
#endif
diff --git a/chromium/media/gpu/h264_decoder.cc b/chromium/media/gpu/h264_decoder.cc
index c224a1daff2..f679f986eba 100644
--- a/chromium/media/gpu/h264_decoder.cc
+++ b/chromium/media/gpu/h264_decoder.cc
@@ -6,7 +6,6 @@
#include <limits>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/feature_list.h"
#include "base/logging.h"
diff --git a/chromium/media/gpu/h265_decoder.cc b/chromium/media/gpu/h265_decoder.cc
new file mode 100644
index 00000000000..4c44016a0c6
--- /dev/null
+++ b/chromium/media/gpu/h265_decoder.cc
@@ -0,0 +1,888 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+
+#include "base/logging.h"
+#include "media/base/limits.h"
+#include "media/gpu/h265_decoder.h"
+
+namespace media {
+
+namespace {
+
+struct POCAscCompare {
+ bool operator()(const scoped_refptr<H265Picture>& a,
+ const scoped_refptr<H265Picture>& b) const {
+ return a->pic_order_cnt_val_ < b->pic_order_cnt_val_;
+ }
+};
+
+} // namespace
+
+H265Decoder::H265Accelerator::H265Accelerator() = default;
+
+H265Decoder::H265Accelerator::~H265Accelerator() = default;
+
+H265Decoder::H265Accelerator::Status H265Decoder::H265Accelerator::SetStream(
+ base::span<const uint8_t> stream,
+ const DecryptConfig* decrypt_config) {
+ return H265Decoder::H265Accelerator::Status::kNotSupported;
+}
+
+H265Decoder::H265Decoder(std::unique_ptr<H265Accelerator> accelerator,
+ VideoCodecProfile profile,
+ const VideoColorSpace& container_color_space)
+ : state_(kAfterReset),
+ container_color_space_(container_color_space),
+ profile_(profile),
+ accelerator_(std::move(accelerator)) {
+ DCHECK(accelerator_);
+ Reset();
+}
+
+H265Decoder::~H265Decoder() = default;
+
+#define SET_ERROR_AND_RETURN() \
+ do { \
+ DVLOG(1) << "Error during decode"; \
+ state_ = kError; \
+ return H265Decoder::kDecodeError; \
+ } while (0)
+
+#define CHECK_ACCELERATOR_RESULT(func) \
+ do { \
+ H265Accelerator::Status result = (func); \
+ switch (result) { \
+ case H265Accelerator::Status::kOk: \
+ break; \
+ case H265Accelerator::Status::kTryAgain: \
+ DVLOG(1) << #func " needs to try again"; \
+ return H265Decoder::kTryAgain; \
+ case H265Accelerator::Status::kFail: /* fallthrough */ \
+ case H265Accelerator::Status::kNotSupported: \
+ SET_ERROR_AND_RETURN(); \
+ } \
+ } while (0)
+
+void H265Decoder::SetStream(int32_t id, const DecoderBuffer& decoder_buffer) {
+ const uint8_t* ptr = decoder_buffer.data();
+ const size_t size = decoder_buffer.data_size();
+ const DecryptConfig* decrypt_config = decoder_buffer.decrypt_config();
+
+ DCHECK(ptr);
+ DCHECK(size);
+ DVLOG(4) << "New input stream id: " << id << " at: " << (void*)ptr
+ << " size: " << size;
+ stream_id_ = id;
+ current_stream_ = ptr;
+ current_stream_size_ = size;
+ current_stream_has_been_changed_ = true;
+ if (decrypt_config) {
+ parser_.SetEncryptedStream(ptr, size, decrypt_config->subsamples());
+ current_decrypt_config_ = decrypt_config->Clone();
+ } else {
+ parser_.SetStream(ptr, size);
+ current_decrypt_config_ = nullptr;
+ }
+}
+
+void H265Decoder::Reset() {
+ curr_pic_ = nullptr;
+ curr_nalu_ = nullptr;
+ curr_slice_hdr_ = nullptr;
+ last_slice_hdr_ = nullptr;
+ curr_sps_id_ = -1;
+ curr_pps_id_ = -1;
+
+ prev_tid0_pic_ = nullptr;
+ ref_pic_list_.clear();
+ ref_pic_list0_.clear();
+ ref_pic_list1_.clear();
+
+ dpb_.Clear();
+ parser_.Reset();
+ accelerator_->Reset();
+
+ state_ = kAfterReset;
+}
+
+H265Decoder::DecodeResult H265Decoder::Decode() {
+ if (state_ == kError) {
+ DVLOG(1) << "Decoder in error state";
+ return kDecodeError;
+ }
+
+ if (current_stream_has_been_changed_) {
+ // Calling H265Accelerator::SetStream() here instead of when the stream is
+ // originally set in case the accelerator needs to return kTryAgain.
+ H265Accelerator::Status result = accelerator_->SetStream(
+ base::span<const uint8_t>(current_stream_, current_stream_size_),
+ current_decrypt_config_.get());
+ switch (result) {
+ case H265Accelerator::Status::kOk: // fallthrough
+ case H265Accelerator::Status::kNotSupported:
+ // kNotSupported means the accelerator can't handle this stream,
+ // so everything will be done through the parser.
+ break;
+ case H265Accelerator::Status::kTryAgain:
+ DVLOG(1) << "SetStream() needs to try again";
+ return H265Decoder::kTryAgain;
+ case H265Accelerator::Status::kFail:
+ SET_ERROR_AND_RETURN();
+ }
+
+ // Reset the flag so that this is only called again next time SetStream()
+ // is called.
+ current_stream_has_been_changed_ = false;
+ }
+
+ while (true) {
+ H265Parser::Result par_res;
+
+ if (!curr_nalu_) {
+ curr_nalu_ = std::make_unique<H265NALU>();
+ par_res = parser_.AdvanceToNextNALU(curr_nalu_.get());
+ if (par_res == H265Parser::kEOStream) {
+ curr_nalu_.reset();
+ // We receive one frame per buffer, so we can output the frame now.
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
+ return kRanOutOfStreamData;
+ }
+ if (par_res != H265Parser::kOk) {
+ curr_nalu_.reset();
+ SET_ERROR_AND_RETURN();
+ }
+
+ DVLOG(4) << "New NALU: " << static_cast<int>(curr_nalu_->nal_unit_type);
+ }
+
+ // 8.1.2 We only want nuh_layer_id of zero.
+ if (curr_nalu_->nuh_layer_id) {
+ DVLOG(4) << "Skipping NALU with nuh_layer_id="
+ << curr_nalu_->nuh_layer_id;
+ curr_nalu_.reset();
+ continue;
+ }
+
+ switch (curr_nalu_->nal_unit_type) {
+ case H265NALU::BLA_W_LP: // fallthrough
+ case H265NALU::BLA_W_RADL:
+ case H265NALU::BLA_N_LP:
+ case H265NALU::IDR_W_RADL:
+ case H265NALU::IDR_N_LP:
+ case H265NALU::TRAIL_N:
+ case H265NALU::TRAIL_R:
+ case H265NALU::TSA_N:
+ case H265NALU::TSA_R:
+ case H265NALU::STSA_N:
+ case H265NALU::STSA_R:
+ case H265NALU::RADL_N:
+ case H265NALU::RADL_R:
+ case H265NALU::RASL_N:
+ case H265NALU::RASL_R:
+ case H265NALU::CRA_NUT:
+ if (!curr_slice_hdr_) {
+ curr_slice_hdr_.reset(new H265SliceHeader());
+ if (last_slice_hdr_) {
+ // This is a multi-slice picture, so we should copy all of the prior
+ // slice header data to the new slice and use those as the default
+ // values that don't have syntax elements present.
+ memcpy(curr_slice_hdr_.get(), last_slice_hdr_.get(),
+ sizeof(H265SliceHeader));
+ last_slice_hdr_.reset();
+ }
+ par_res =
+ parser_.ParseSliceHeader(*curr_nalu_, curr_slice_hdr_.get());
+ if (par_res == H265Parser::kMissingParameterSet) {
+ // We may still be able to recover if we skip until we find the
+ // SPS/PPS.
+ curr_slice_hdr_.reset();
+ last_slice_hdr_.reset();
+ break;
+ }
+ if (par_res != H265Parser::kOk)
+ SET_ERROR_AND_RETURN();
+ if (!curr_slice_hdr_->irap_pic && state_ == kAfterReset) {
+ // We can't resume from a non-IRAP picture.
+ curr_slice_hdr_.reset();
+ last_slice_hdr_.reset();
+ break;
+ }
+
+ state_ = kTryPreprocessCurrentSlice;
+ if (curr_slice_hdr_->slice_pic_parameter_set_id != curr_pps_id_) {
+ bool need_new_buffers = false;
+ if (!ProcessPPS(curr_slice_hdr_->slice_pic_parameter_set_id,
+ &need_new_buffers)) {
+ SET_ERROR_AND_RETURN();
+ }
+
+ if (need_new_buffers) {
+ curr_pic_ = nullptr;
+ return kConfigChange;
+ }
+ }
+ }
+
+ if (state_ == kTryPreprocessCurrentSlice) {
+ CHECK_ACCELERATOR_RESULT(PreprocessCurrentSlice());
+ state_ = kEnsurePicture;
+ }
+
+ if (state_ == kEnsurePicture) {
+ if (curr_pic_) {
+ // |curr_pic_| already exists, so skip to ProcessCurrentSlice().
+ state_ = kTryCurrentSlice;
+ } else {
+ // New picture, try to start a new one or tell client we need more
+ // surfaces.
+ curr_pic_ = accelerator_->CreateH265Picture();
+ if (!curr_pic_)
+ return kRanOutOfSurfaces;
+ if (current_decrypt_config_)
+ curr_pic_->set_decrypt_config(current_decrypt_config_->Clone());
+
+ curr_pic_->first_picture_ = first_picture_;
+ first_picture_ = false;
+ state_ = kTryNewFrame;
+ }
+ }
+
+ if (state_ == kTryNewFrame) {
+ CHECK_ACCELERATOR_RESULT(StartNewFrame(curr_slice_hdr_.get()));
+ state_ = kTryCurrentSlice;
+ }
+
+ DCHECK_EQ(state_, kTryCurrentSlice);
+ CHECK_ACCELERATOR_RESULT(ProcessCurrentSlice());
+ state_ = kDecoding;
+ last_slice_hdr_.swap(curr_slice_hdr_);
+ curr_slice_hdr_.reset();
+ break;
+ case H265NALU::SPS_NUT:
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
+ int sps_id;
+ par_res = parser_.ParseSPS(&sps_id);
+ if (par_res != H265Parser::kOk)
+ SET_ERROR_AND_RETURN();
+
+ break;
+ case H265NALU::PPS_NUT:
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
+ int pps_id;
+ par_res = parser_.ParsePPS(*curr_nalu_, &pps_id);
+ if (par_res != H265Parser::kOk)
+ SET_ERROR_AND_RETURN();
+
+ break;
+ case H265NALU::EOS_NUT:
+ first_picture_ = true;
+ FALLTHROUGH;
+ case H265NALU::EOB_NUT: // fallthrough
+ case H265NALU::AUD_NUT:
+ case H265NALU::RSV_NVCL41:
+ case H265NALU::RSV_NVCL42:
+ case H265NALU::RSV_NVCL43:
+ case H265NALU::RSV_NVCL44:
+ case H265NALU::UNSPEC48:
+ case H265NALU::UNSPEC49:
+ case H265NALU::UNSPEC50:
+ case H265NALU::UNSPEC51:
+ case H265NALU::UNSPEC52:
+ case H265NALU::UNSPEC53:
+ case H265NALU::UNSPEC54:
+ case H265NALU::UNSPEC55:
+ CHECK_ACCELERATOR_RESULT(FinishPrevFrameIfPresent());
+ break;
+ default:
+ DVLOG(4) << "Skipping NALU type: " << curr_nalu_->nal_unit_type;
+ break;
+ }
+
+ DVLOG(4) << "NALU done";
+ curr_nalu_.reset();
+ }
+}
+
+gfx::Size H265Decoder::GetPicSize() const {
+ return pic_size_;
+}
+
+gfx::Rect H265Decoder::GetVisibleRect() const {
+ return visible_rect_;
+}
+
+VideoCodecProfile H265Decoder::GetProfile() const {
+ return profile_;
+}
+
+size_t H265Decoder::GetRequiredNumOfPictures() const {
+ constexpr size_t kPicsInPipeline = limits::kMaxVideoFrames + 1;
+ return GetNumReferenceFrames() + kPicsInPipeline;
+}
+
+size_t H265Decoder::GetNumReferenceFrames() const {
+ // Use the maximum number of pictures in the Decoded Picture Buffer.
+ return dpb_.max_num_pics();
+}
+
+bool H265Decoder::ProcessPPS(int pps_id, bool* need_new_buffers) {
+ DVLOG(4) << "Processing PPS id:" << pps_id;
+
+ const H265PPS* pps = parser_.GetPPS(pps_id);
+ // Slice header parsing already verified this should exist.
+ DCHECK(pps);
+
+ const H265SPS* sps = parser_.GetSPS(pps->pps_seq_parameter_set_id);
+ // PPS parsing already verified this should exist.
+ DCHECK(sps);
+
+ if (need_new_buffers)
+ *need_new_buffers = false;
+
+ gfx::Size new_pic_size = sps->GetCodedSize();
+ gfx::Rect new_visible_rect = sps->GetVisibleRect();
+ if (visible_rect_ != new_visible_rect) {
+ DVLOG(2) << "New visible rect: " << new_visible_rect.ToString();
+ visible_rect_ = new_visible_rect;
+ }
+
+ // Equation 7-8
+ max_pic_order_cnt_lsb_ =
+ std::pow(2, sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
+
+ VideoCodecProfile new_profile = H265Parser::ProfileIDCToVideoCodecProfile(
+ sps->profile_tier_level.general_profile_idc);
+
+ if (pic_size_ != new_pic_size || dpb_.max_num_pics() != sps->max_dpb_size ||
+ profile_ != new_profile) {
+ if (!Flush())
+ return false;
+ DVLOG(1) << "Codec profile: " << GetProfileName(new_profile)
+ << ", level(x30): " << sps->profile_tier_level.general_level_idc
+ << ", DPB size: " << sps->max_dpb_size
+ << ", Picture size: " << new_pic_size.ToString();
+ profile_ = new_profile;
+ pic_size_ = new_pic_size;
+ dpb_.set_max_num_pics(sps->max_dpb_size);
+ if (need_new_buffers)
+ *need_new_buffers = true;
+ }
+
+ return true;
+}
+
+H265Decoder::H265Accelerator::Status H265Decoder::PreprocessCurrentSlice() {
+ const H265SliceHeader* slice_hdr = curr_slice_hdr_.get();
+ DCHECK(slice_hdr);
+
+ if (slice_hdr->first_slice_segment_in_pic_flag) {
+ // New picture, so first finish the previous one before processing it.
+ H265Accelerator::Status result = FinishPrevFrameIfPresent();
+ if (result != H265Accelerator::Status::kOk)
+ return result;
+
+ DCHECK(!curr_pic_);
+ }
+
+ return H265Accelerator::Status::kOk;
+}
+
+H265Decoder::H265Accelerator::Status H265Decoder::ProcessCurrentSlice() {
+ DCHECK(curr_pic_);
+
+ const H265SliceHeader* slice_hdr = curr_slice_hdr_.get();
+ DCHECK(slice_hdr);
+
+ const H265SPS* sps = parser_.GetSPS(curr_sps_id_);
+ DCHECK(sps);
+
+ const H265PPS* pps = parser_.GetPPS(curr_pps_id_);
+ DCHECK(pps);
+ return accelerator_->SubmitSlice(sps, pps, slice_hdr, ref_pic_list0_,
+ ref_pic_list1_, curr_pic_.get(),
+ slice_hdr->nalu_data, slice_hdr->nalu_size,
+ parser_.GetCurrentSubsamples());
+}
+
+void H265Decoder::CalcPicOutputFlags(const H265SliceHeader* slice_hdr) {
+ if (slice_hdr->irap_pic) {
+ // 8.1.3
+ curr_pic_->no_rasl_output_flag_ =
+ (curr_nalu_->nal_unit_type >= H265NALU::BLA_W_LP &&
+ curr_nalu_->nal_unit_type <= H265NALU::IDR_N_LP) ||
+ curr_pic_->first_picture_;
+ } else {
+ curr_pic_->no_rasl_output_flag_ = false;
+ }
+
+ // C.5.2.2
+ if (slice_hdr->irap_pic && curr_pic_->no_rasl_output_flag_ &&
+ !curr_pic_->first_picture_) {
+ curr_pic_->no_output_of_prior_pics_flag_ =
+ (slice_hdr->nal_unit_type == H265NALU::CRA_NUT) ||
+ slice_hdr->no_output_of_prior_pics_flag;
+ } else {
+ curr_pic_->no_output_of_prior_pics_flag_ = false;
+ }
+
+ if ((slice_hdr->nal_unit_type == H265NALU::RASL_N ||
+ slice_hdr->nal_unit_type == H265NALU::RASL_R) &&
+ curr_pic_->no_rasl_output_flag_) {
+ curr_pic_->pic_output_flag_ = false;
+ } else {
+ curr_pic_->pic_output_flag_ = slice_hdr->pic_output_flag;
+ }
+}
+
+void H265Decoder::CalcPictureOrderCount(const H265PPS* pps,
+ const H265SliceHeader* slice_hdr) {
+ // 8.3.1 Decoding process for picture order count.
+ curr_pic_->valid_for_prev_tid0_pic_ =
+ !pps->temporal_id && (slice_hdr->nal_unit_type < H265NALU::RADL_N ||
+ slice_hdr->nal_unit_type > H265NALU::RSV_VCL_N14);
+ curr_pic_->slice_pic_order_cnt_lsb_ = slice_hdr->slice_pic_order_cnt_lsb;
+
+ // Calculate POC for current picture.
+ if ((!slice_hdr->irap_pic || !curr_pic_->no_rasl_output_flag_) &&
+ prev_tid0_pic_) {
+ const int prev_pic_order_cnt_lsb = prev_tid0_pic_->slice_pic_order_cnt_lsb_;
+ const int prev_pic_order_cnt_msb = prev_tid0_pic_->pic_order_cnt_msb_;
+ if ((slice_hdr->slice_pic_order_cnt_lsb < prev_pic_order_cnt_lsb) &&
+ ((prev_pic_order_cnt_lsb - slice_hdr->slice_pic_order_cnt_lsb) >=
+ (max_pic_order_cnt_lsb_ / 2))) {
+ curr_pic_->pic_order_cnt_msb_ =
+ prev_pic_order_cnt_msb + max_pic_order_cnt_lsb_;
+ } else if ((slice_hdr->slice_pic_order_cnt_lsb > prev_pic_order_cnt_lsb) &&
+ ((slice_hdr->slice_pic_order_cnt_lsb - prev_pic_order_cnt_lsb) >
+ (max_pic_order_cnt_lsb_ / 2))) {
+ curr_pic_->pic_order_cnt_msb_ =
+ prev_pic_order_cnt_msb - max_pic_order_cnt_lsb_;
+ } else {
+ curr_pic_->pic_order_cnt_msb_ = prev_pic_order_cnt_msb;
+ }
+ } else {
+ curr_pic_->pic_order_cnt_msb_ = 0;
+ }
+ curr_pic_->pic_order_cnt_val_ =
+ curr_pic_->pic_order_cnt_msb_ + slice_hdr->slice_pic_order_cnt_lsb;
+}
+
+bool H265Decoder::CalcRefPicPocs(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr) {
+ if (slice_hdr->nal_unit_type == H265NALU::IDR_W_RADL ||
+ slice_hdr->nal_unit_type == H265NALU::IDR_N_LP) {
+ num_poc_st_curr_before_ = num_poc_st_curr_after_ = num_poc_st_foll_ =
+ num_poc_lt_curr_ = num_poc_lt_foll_ = 0;
+ return true;
+ }
+
+ // 8.3.2 - NOTE 2
+ const H265StRefPicSet& curr_st_ref_pic_set = slice_hdr->GetStRefPicSet(sps);
+
+ // Equation 8-5.
+ int i, j, k;
+ for (i = 0, j = 0, k = 0; i < curr_st_ref_pic_set.num_negative_pics; ++i) {
+ if (curr_st_ref_pic_set.used_by_curr_pic_s0[i]) {
+ poc_st_curr_before_[j++] =
+ curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s0[i];
+ } else {
+ poc_st_foll_[k++] =
+ curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s0[i];
+ }
+ }
+ num_poc_st_curr_before_ = j;
+ for (i = 0, j = 0; i < curr_st_ref_pic_set.num_positive_pics; ++i) {
+ if (curr_st_ref_pic_set.used_by_curr_pic_s1[i]) {
+ poc_st_curr_after_[j++] =
+ curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s1[i];
+ } else {
+ poc_st_foll_[k++] =
+ curr_pic_->pic_order_cnt_val_ + curr_st_ref_pic_set.delta_poc_s1[i];
+ }
+ }
+ num_poc_st_curr_after_ = j;
+ num_poc_st_foll_ = k;
+ for (i = 0, j = 0, k = 0;
+ i < slice_hdr->num_long_term_sps + slice_hdr->num_long_term_pics; ++i) {
+ int poc_lt = slice_hdr->poc_lsb_lt[i];
+ if (slice_hdr->delta_poc_msb_present_flag[i]) {
+ poc_lt +=
+ curr_pic_->pic_order_cnt_val_ -
+ (slice_hdr->delta_poc_msb_cycle_lt[i] * max_pic_order_cnt_lsb_) -
+ (curr_pic_->pic_order_cnt_val_ & (max_pic_order_cnt_lsb_ - 1));
+ }
+ if (slice_hdr->used_by_curr_pic_lt[i]) {
+ poc_lt_curr_[j] = poc_lt;
+ curr_delta_poc_msb_present_flag_[j++] =
+ slice_hdr->delta_poc_msb_present_flag[i];
+ } else {
+ poc_lt_foll_[k] = poc_lt;
+ foll_delta_poc_msb_present_flag_[k++] =
+ slice_hdr->delta_poc_msb_present_flag[i];
+ }
+ }
+ num_poc_lt_curr_ = j;
+ num_poc_lt_foll_ = k;
+
+ // Check conformance for |num_pic_total_curr|.
+ if (slice_hdr->nal_unit_type == H265NALU::CRA_NUT ||
+ (slice_hdr->nal_unit_type >= H265NALU::BLA_W_LP &&
+ slice_hdr->nal_unit_type <= H265NALU::BLA_N_LP)) {
+ if (slice_hdr->num_pic_total_curr) {
+ DVLOG(1) << "Invalid value for num_pic_total_curr";
+ return false;
+ }
+ } else if ((slice_hdr->IsBSlice() || slice_hdr->IsPSlice()) &&
+ !slice_hdr->num_pic_total_curr) {
+ DVLOG(1) << "Invalid value for num_pic_total_curr";
+ return false;
+ }
+
+ return true;
+}
+
+bool H265Decoder::BuildRefPicLists(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr) {
+ scoped_refptr<H265Picture> ref_pic_set_lt_curr[kMaxDpbSize];
+ scoped_refptr<H265Picture> ref_pic_set_lt_foll[kMaxDpbSize];
+ scoped_refptr<H265Picture> ref_pic_set_st_curr_after[kMaxDpbSize];
+ scoped_refptr<H265Picture> ref_pic_set_st_curr_before[kMaxDpbSize];
+ scoped_refptr<H265Picture> ref_pic_set_st_foll[kMaxDpbSize];
+
+ // Mark everything in the DPB as unused for reference now. When we determine
+ // the pics in the ref list, then we will mark them appropriately.
+ dpb_.MarkAllUnusedForReference();
+
+ // Equation 8-6.
+ // We may be missing reference pictures, if so then we just don't specify
+ // them and let the accelerator deal with the missing reference pictures
+ // which is covered in the spec.
+ int total_ref_pics = 0;
+ for (int i = 0; i < num_poc_lt_curr_; ++i) {
+ if (!curr_delta_poc_msb_present_flag_[i])
+ ref_pic_set_lt_curr[i] = dpb_.GetPicByPocMaskedAndMark(
+ poc_lt_curr_[i], sps->max_pic_order_cnt_lsb - 1,
+ H265Picture::kLongTermCurr);
+ else
+ ref_pic_set_lt_curr[i] =
+ dpb_.GetPicByPocAndMark(poc_lt_curr_[i], H265Picture::kLongTermCurr);
+
+ if (ref_pic_set_lt_curr[i])
+ total_ref_pics++;
+ }
+ for (int i = 0; i < num_poc_lt_foll_; ++i) {
+ if (!foll_delta_poc_msb_present_flag_[i])
+ ref_pic_set_lt_foll[i] = dpb_.GetPicByPocMaskedAndMark(
+ poc_lt_foll_[i], sps->max_pic_order_cnt_lsb - 1,
+ H265Picture::kLongTermFoll);
+ else
+ ref_pic_set_lt_foll[i] =
+ dpb_.GetPicByPocAndMark(poc_lt_foll_[i], H265Picture::kLongTermFoll);
+
+ if (ref_pic_set_lt_foll[i])
+ total_ref_pics++;
+ }
+
+ // Equation 8-7.
+ for (int i = 0; i < num_poc_st_curr_before_; ++i) {
+ ref_pic_set_st_curr_before[i] = dpb_.GetPicByPocAndMark(
+ poc_st_curr_before_[i], H265Picture::kShortTermCurrBefore);
+
+ if (ref_pic_set_st_curr_before[i])
+ total_ref_pics++;
+ }
+ for (int i = 0; i < num_poc_st_curr_after_; ++i) {
+ ref_pic_set_st_curr_after[i] = dpb_.GetPicByPocAndMark(
+ poc_st_curr_after_[i], H265Picture::kShortTermCurrAfter);
+ if (ref_pic_set_st_curr_after[i])
+ total_ref_pics++;
+ }
+ for (int i = 0; i < num_poc_st_foll_; ++i) {
+ ref_pic_set_st_foll[i] =
+ dpb_.GetPicByPocAndMark(poc_st_foll_[i], H265Picture::kShortTermFoll);
+ if (ref_pic_set_st_foll[i])
+ total_ref_pics++;
+ }
+
+ // Verify that the total number of reference pictures in the DPB matches the
+ // total count of reference pics. This ensures that a picture is not in more
+ // than one list, per the spec.
+ if (dpb_.GetReferencePicCount() != total_ref_pics) {
+ DVLOG(1) << "Conformance problem, reference pic is in more than one list";
+ return false;
+ }
+
+ ref_pic_list_.clear();
+ dpb_.AppendReferencePics(&ref_pic_list_);
+ ref_pic_list0_.clear();
+ ref_pic_list1_.clear();
+
+ // 8.3.3 Generation of unavailable reference pictures is something we do not
+ // need to handle here. It's handled by the accelerator itself when we do not
+ // specify a reference picture that it needs.
+
+ if (slice_hdr->IsPSlice() || slice_hdr->IsBSlice()) {
+ // 8.3.4 Decoding process for reference picture lists construction
+ int num_rps_curr_temp_list0 =
+ std::max(slice_hdr->num_ref_idx_l0_active_minus1 + 1,
+ slice_hdr->num_pic_total_curr);
+ scoped_refptr<H265Picture> ref_pic_list_temp0[kMaxDpbSize];
+
+ // Equation 8-8.
+ int r_idx = 0;
+ while (r_idx < num_rps_curr_temp_list0) {
+ for (int i = 0;
+ i < num_poc_st_curr_before_ && r_idx < num_rps_curr_temp_list0;
+ ++i, ++r_idx) {
+ ref_pic_list_temp0[r_idx] = ref_pic_set_st_curr_before[i];
+ }
+ for (int i = 0;
+ i < num_poc_st_curr_after_ && r_idx < num_rps_curr_temp_list0;
+ ++i, ++r_idx) {
+ ref_pic_list_temp0[r_idx] = ref_pic_set_st_curr_after[i];
+ }
+ for (int i = 0; i < num_poc_lt_curr_ && r_idx < num_rps_curr_temp_list0;
+ ++i, ++r_idx) {
+ ref_pic_list_temp0[r_idx] = ref_pic_set_lt_curr[i];
+ }
+ }
+
+ // Equation 8-9.
+ for (r_idx = 0; r_idx <= slice_hdr->num_ref_idx_l0_active_minus1; ++r_idx) {
+ ref_pic_list0_.push_back(
+ slice_hdr->ref_pic_lists_modification
+ .ref_pic_list_modification_flag_l0
+ ? ref_pic_list_temp0[slice_hdr->ref_pic_lists_modification
+ .list_entry_l0[r_idx]]
+ : ref_pic_list_temp0[r_idx]);
+ }
+
+ if (slice_hdr->IsBSlice()) {
+ int num_rps_curr_temp_list1 =
+ std::max(slice_hdr->num_ref_idx_l1_active_minus1 + 1,
+ slice_hdr->num_pic_total_curr);
+ scoped_refptr<H265Picture> ref_pic_list_temp1[kMaxDpbSize];
+
+ // Equation 8-10.
+ r_idx = 0;
+ while (r_idx < num_rps_curr_temp_list1) {
+ for (int i = 0;
+ i < num_poc_st_curr_after_ && r_idx < num_rps_curr_temp_list1;
+ ++i, r_idx++) {
+ ref_pic_list_temp1[r_idx] = ref_pic_set_st_curr_after[i];
+ }
+ for (int i = 0;
+ i < num_poc_st_curr_before_ && r_idx < num_rps_curr_temp_list1;
+ ++i, r_idx++) {
+ ref_pic_list_temp1[r_idx] = ref_pic_set_st_curr_before[i];
+ }
+ for (int i = 0; i < num_poc_lt_curr_ && r_idx < num_rps_curr_temp_list1;
+ ++i, r_idx++) {
+ ref_pic_list_temp1[r_idx] = ref_pic_set_lt_curr[i];
+ }
+ }
+
+ // Equation 8-11.
+ for (r_idx = 0; r_idx <= slice_hdr->num_ref_idx_l1_active_minus1;
+ ++r_idx) {
+ ref_pic_list1_.push_back(
+ slice_hdr->ref_pic_lists_modification
+ .ref_pic_list_modification_flag_l1
+ ? ref_pic_list_temp1[slice_hdr->ref_pic_lists_modification
+ .list_entry_l1[r_idx]]
+ : ref_pic_list_temp1[r_idx]);
+ }
+ }
+ }
+
+ return true;
+}
+
+H265Decoder::H265Accelerator::Status H265Decoder::StartNewFrame(
+ const H265SliceHeader* slice_hdr) {
+ CHECK(curr_pic_.get());
+ DCHECK(slice_hdr);
+
+ curr_pps_id_ = slice_hdr->slice_pic_parameter_set_id;
+ const H265PPS* pps = parser_.GetPPS(curr_pps_id_);
+ // Slice header parsing already verified this should exist.
+ DCHECK(pps);
+
+ curr_sps_id_ = pps->pps_seq_parameter_set_id;
+ const H265SPS* sps = parser_.GetSPS(curr_sps_id_);
+ // Slice header parsing already verified this should exist.
+ DCHECK(sps);
+
+ // If this is from a retry on SubmitFrameMetadata, we should not redo all of
+ // these calculations.
+ if (!curr_pic_->processed_) {
+ // Copy slice/pps variables we need to the picture.
+ curr_pic_->nal_unit_type_ = curr_nalu_->nal_unit_type;
+ curr_pic_->irap_pic_ = slice_hdr->irap_pic;
+
+ curr_pic_->set_visible_rect(visible_rect_);
+ curr_pic_->set_bitstream_id(stream_id_);
+ if (sps->GetColorSpace().IsSpecified())
+ curr_pic_->set_colorspace(sps->GetColorSpace());
+ else
+ curr_pic_->set_colorspace(container_color_space_);
+
+ CalcPicOutputFlags(slice_hdr);
+ CalcPictureOrderCount(pps, slice_hdr);
+
+ if (!CalcRefPicPocs(sps, pps, slice_hdr)) {
+ return H265Accelerator::Status::kFail;
+ }
+
+ if (!BuildRefPicLists(sps, pps, slice_hdr)) {
+ return H265Accelerator::Status::kFail;
+ }
+
+ if (!PerformDpbOperations(sps)) {
+ return H265Accelerator::Status::kFail;
+ }
+ curr_pic_->processed_ = true;
+ }
+
+ return accelerator_->SubmitFrameMetadata(sps, pps, slice_hdr, ref_pic_list_,
+ curr_pic_);
+}
+
+H265Decoder::H265Accelerator::Status H265Decoder::FinishPrevFrameIfPresent() {
+ // If we already have a frame waiting to be decoded, decode it and finish.
+ if (curr_pic_) {
+ H265Accelerator::Status result = DecodePicture();
+ if (result != H265Accelerator::Status::kOk)
+ return result;
+
+ scoped_refptr<H265Picture> pic = curr_pic_;
+ curr_pic_ = nullptr;
+ FinishPicture(pic);
+ }
+
+ return H265Accelerator::Status::kOk;
+}
+
+bool H265Decoder::PerformDpbOperations(const H265SPS* sps) {
+ // C.5.2.2
+ if (curr_pic_->irap_pic_ && curr_pic_->no_rasl_output_flag_ &&
+ !curr_pic_->first_picture_) {
+ if (!curr_pic_->no_output_of_prior_pics_flag_) {
+ OutputAllRemainingPics();
+ }
+ dpb_.Clear();
+ } else {
+ int num_to_output;
+ do {
+ dpb_.DeleteUnused();
+ // Get all pictures that haven't been outputted yet.
+ H265Picture::Vector not_outputted;
+ dpb_.AppendPendingOutputPics(&not_outputted);
+ // Sort in output order.
+ std::sort(not_outputted.begin(), not_outputted.end(), POCAscCompare());
+
+ // Calculate how many pictures we need to output.
+ num_to_output = 0;
+ int highest_tid = sps->sps_max_sub_layers_minus1;
+ num_to_output = std::max(num_to_output,
+ static_cast<int>(not_outputted.size()) -
+ sps->sps_max_num_reorder_pics[highest_tid]);
+ num_to_output =
+ std::max(num_to_output,
+ static_cast<int>(dpb_.size()) -
+ sps->sps_max_dec_pic_buffering_minus1[highest_tid]);
+
+ num_to_output =
+ std::min(num_to_output, static_cast<int>(not_outputted.size()));
+ if (!num_to_output && dpb_.IsFull()) {
+ // This is wrong, we should try to output pictures until we can clear
+ // one from the DPB. This is better than failing, but we then may end up
+ // with something out of order.
+ DVLOG(1) << "Forcibly outputting pictures to make room in DPB.";
+ for (const auto& pic : not_outputted) {
+ num_to_output++;
+ if (pic->ref_ == H265Picture::kUnused)
+ break;
+ }
+ }
+
+ // TODO(jkardatzke): There's another output picture requirement regarding
+ // the sps_max_latency_increase_plus1, but I have yet to understand how
+ // that could be larger than the sps_max_num_reorder_pics since the actual
+ // latency value used is the sum of both.
+ not_outputted.resize(num_to_output);
+ for (auto& pic : not_outputted) {
+ if (!OutputPic(pic))
+ return false;
+ }
+
+ dpb_.DeleteUnused();
+ } while (dpb_.IsFull() && num_to_output);
+ }
+
+ if (dpb_.IsFull()) {
+ DVLOG(1) << "Could not free up space in DPB for current picture";
+ return false;
+ }
+
+ // Put the current pic in the DPB.
+ dpb_.StorePicture(curr_pic_, H265Picture::kShortTermFoll);
+ return true;
+}
+
+void H265Decoder::FinishPicture(scoped_refptr<H265Picture> pic) {
+ // 8.3.1
+ if (pic->valid_for_prev_tid0_pic_)
+ prev_tid0_pic_ = pic;
+
+ ref_pic_list_.clear();
+ ref_pic_list0_.clear();
+ ref_pic_list1_.clear();
+
+ last_slice_hdr_.reset();
+}
+
+H265Decoder::H265Accelerator::Status H265Decoder::DecodePicture() {
+ DCHECK(curr_pic_.get());
+ return accelerator_->SubmitDecode(curr_pic_);
+}
+
+bool H265Decoder::OutputPic(scoped_refptr<H265Picture> pic) {
+ DCHECK(!pic->outputted_);
+ pic->outputted_ = true;
+
+ DVLOG(4) << "Posting output task for POC: " << pic->pic_order_cnt_val_;
+ return accelerator_->OutputPicture(std::move(pic));
+}
+
+bool H265Decoder::OutputAllRemainingPics() {
+ // Output all pictures that are waiting to be outputted.
+ H265Picture::Vector to_output;
+ dpb_.AppendPendingOutputPics(&to_output);
+ // Sort them by ascending POC to output in order.
+ std::sort(to_output.begin(), to_output.end(), POCAscCompare());
+
+ for (auto& pic : to_output) {
+ if (!OutputPic(std::move(pic)))
+ return false;
+ }
+ return true;
+}
+
+bool H265Decoder::Flush() {
+ DVLOG(2) << "Decoder flush";
+
+ if (!OutputAllRemainingPics())
+ return false;
+
+ dpb_.Clear();
+ prev_tid0_pic_ = nullptr;
+ return true;
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/h265_decoder.h b/chromium/media/gpu/h265_decoder.h
new file mode 100644
index 00000000000..820e136e8fe
--- /dev/null
+++ b/chromium/media/gpu/h265_decoder.h
@@ -0,0 +1,329 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_H265_DECODER_H_
+#define MEDIA_GPU_H265_DECODER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <memory>
+#include <vector>
+
+#include "base/containers/span.h"
+#include "base/memory/ref_counted.h"
+#include "media/base/decrypt_config.h"
+#include "media/base/subsample_entry.h"
+#include "media/base/video_codecs.h"
+#include "media/gpu/accelerated_video_decoder.h"
+#include "media/gpu/h265_dpb.h"
+#include "media/gpu/media_gpu_export.h"
+#include "media/video/h265_parser.h"
+#include "ui/gfx/geometry/rect.h"
+#include "ui/gfx/geometry/size.h"
+
+namespace media {
+
+// Clients of this class are expected to pass H265 Annex-B byte stream
+// and are expected to provide an implementation of H265Accelerator for
+// offloading final steps of the decoding process.
+//
+// This class must be created, called and destroyed on a single thread, and
+// does nothing internally on any other thread.
+//
+// It is expected that when a DecoderBuffer is submitted, that it will contain a
+// complete frame of data. Multiple slices per frame are handled. This class can
+// also handle multiple frames in a DecoderBuffer, but that condition should
+// never actually occur.
+class MEDIA_GPU_EXPORT H265Decoder final : public AcceleratedVideoDecoder {
+ public:
+ class MEDIA_GPU_EXPORT H265Accelerator {
+ public:
+ // Methods may return kTryAgain if they need additional data (provided
+ // independently) in order to proceed. Examples are things like not having
+ // an appropriate key to decode encrypted content, or needing to wait
+ // until hardware buffers are available. This is not considered an
+ // unrecoverable error, but rather a pause to allow an application to
+ // independently provide the required data. When H265Decoder::Decode()
+ // is called again, it will attempt to resume processing of the stream
+ // by calling the same method again.
+ enum class Status {
+ // Operation completed successfully.
+ kOk,
+
+ // Operation failed.
+ kFail,
+
+ // Operation failed because some external data is missing. Retry the same
+ // operation later, once the data has been provided.
+ kTryAgain,
+
+ // Operation is not supported. Used by SetStream() to indicate that the
+ // Accelerator can not handle this operation.
+ kNotSupported,
+ };
+
+ H265Accelerator();
+
+ H265Accelerator(const H265Accelerator&) = delete;
+ H265Accelerator& operator=(const H265Accelerator&) = delete;
+
+ virtual ~H265Accelerator();
+
+ // Create a new H265Picture that the decoder client can use for decoding
+ // and pass back to this accelerator for decoding or reference.
+ // When the picture is no longer needed by decoder, it will just drop
+ // its reference to it, and it may do so at any time.
+ // Note that this may return nullptr if accelerator is not able to provide
+ // any new pictures at given time. The decoder is expected to handle
+ // this situation as normal and return from Decode() with kRanOutOfSurfaces.
+ virtual scoped_refptr<H265Picture> CreateH265Picture() = 0;
+
+ // Submit metadata for the current frame, providing the current |sps|, |pps|
+ // and |slice_hdr| for it. |ref_pic_list| contains the set of pictures as
+ // described in 8.3.2 from the lists RefPicSetLtCurr, RefPicSetLtFoll,
+ // RefPicSetStCurrBefore, RefPicSetStCurrAfter and RefPicSetStFoll.
+ // |pic| contains information about the picture for the current frame.
+ // Note that this does not run decode in the accelerator and the decoder
+ // is expected to follow this call with one or more SubmitSlice() calls
+ // before calling SubmitDecode().
+ // Returns kOk if successful, kFail if there are errors, or kTryAgain if
+ // the accelerator needs additional data before being able to proceed.
+ virtual Status SubmitFrameMetadata(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list,
+ scoped_refptr<H265Picture> pic) = 0;
+
+ // Submit one slice for the current frame, passing the current |pps| and
+ // |pic| (same as in SubmitFrameMetadata()), the parsed header for the
+ // current slice in |slice_hdr|, and the |ref_pic_listX|, as per H265 spec.
+ // |data| pointing to the full slice (including the unparsed header) of
+ // |size| in bytes.
+ // |subsamples| specifies which part of the slice data is encrypted.
+ // This must be called one or more times per frame, before SubmitDecode().
+ // Note that |data| does not have to remain valid after this call returns.
+ // Returns kOk if successful, kFail if there are errors, or kTryAgain if
+ // the accelerator needs additional data before being able to proceed.
+ virtual Status SubmitSlice(
+ const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list0,
+ const H265Picture::Vector& ref_pic_list1,
+ scoped_refptr<H265Picture> pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) = 0;
+
+ // Execute the decode in hardware for |pic|, using all the slices and
+ // metadata submitted via SubmitFrameMetadata() and SubmitSlice() since
+ // the previous call to SubmitDecode().
+ // Returns kOk if successful, kFail if there are errors, or kTryAgain if
+ // the accelerator needs additional data before being able to proceed.
+ virtual Status SubmitDecode(scoped_refptr<H265Picture> pic) = 0;
+
+ // Schedule output (display) of |pic|. Note that returning from this
+ // method does not mean that |pic| has already been outputted (displayed),
+ // but guarantees that all pictures will be outputted in the same order
+ // as this method was called for them. Decoder may drop its reference
+ // to |pic| after calling this method.
+ // Return true if successful.
+ virtual bool OutputPicture(scoped_refptr<H265Picture> pic) = 0;
+
+ // Reset any current state that may be cached in the accelerator, dropping
+ // any cached parameters/slices that have not been committed yet.
+ virtual void Reset() = 0;
+
+ // Notifies the accelerator whenever there is a new stream to process.
+ // |stream| is the data in annex B format, which may include SPS and PPS
+ // NALUs when there is a configuration change. The first frame must contain
+ // the SPS and PPS NALUs. SPS and PPS NALUs may not be encrypted.
+ // |decrypt_config| is the config for decrypting the stream. The accelerator
+ // should use |decrypt_config| to keep track of the parts of |stream| that
+ // are encrypted. If kTryAgain is returned, the decoder will retry this call
+ // later. This method has a default implementation that returns
+ // kNotSupported.
+ virtual Status SetStream(base::span<const uint8_t> stream,
+ const DecryptConfig* decrypt_config);
+ };
+
+ H265Decoder(std::unique_ptr<H265Accelerator> accelerator,
+ VideoCodecProfile profile,
+ const VideoColorSpace& container_color_space = VideoColorSpace());
+
+ H265Decoder(const H265Decoder&) = delete;
+ H265Decoder& operator=(const H265Decoder&) = delete;
+
+ ~H265Decoder() override;
+
+ // AcceleratedVideoDecoder implementation.
+ void SetStream(int32_t id, const DecoderBuffer& decoder) override;
+ bool Flush() override WARN_UNUSED_RESULT;
+ void Reset() override;
+ DecodeResult Decode() override WARN_UNUSED_RESULT;
+ gfx::Size GetPicSize() const override;
+ gfx::Rect GetVisibleRect() const override;
+ VideoCodecProfile GetProfile() const override;
+ size_t GetRequiredNumOfPictures() const override;
+ size_t GetNumReferenceFrames() const override;
+
+ private:
+ // Internal state of the decoder.
+ enum State {
+ // Ready to decode from any point.
+ kDecoding,
+ // After Reset(), need a resume point.
+ kAfterReset,
+ // The following keep track of what step is next in Decode() processing
+ // in order to resume properly after H265Decoder::kTryAgain (or another
+ // retryable error) is returned. The next time Decode() is called the call
+ // that previously failed will be retried and execution continues from
+ // there (if possible).
+ kTryPreprocessCurrentSlice,
+ kEnsurePicture,
+ kTryNewFrame,
+ kTryCurrentSlice,
+ // Error in decode, can't continue.
+ kError,
+ };
+
+ // Process H265 stream structures.
+ bool ProcessPPS(int pps_id, bool* need_new_buffers);
+
+ // Process current slice header to discover if we need to start a new picture,
+ // finishing up the current one.
+ H265Accelerator::Status PreprocessCurrentSlice();
+
+ // Process current slice as a slice of the current picture.
+ H265Accelerator::Status ProcessCurrentSlice();
+
+ // Start processing a new frame. This also generates all the POC and output
+ // variables for the frame, generates reference picture lists, performs
+ // reference picture marking, DPB management and picture output.
+ H265Accelerator::Status StartNewFrame(const H265SliceHeader* slice_hdr);
+
+ // All data for a frame received, process it and decode.
+ H265Accelerator::Status FinishPrevFrameIfPresent();
+
+ // Called after we are done processing |pic|.
+ void FinishPicture(scoped_refptr<H265Picture> pic);
+
+ // Commits all pending data for HW decoder and starts HW decoder.
+ H265Accelerator::Status DecodePicture();
+
+ // Notifies client that a picture is ready for output.
+ bool OutputPic(scoped_refptr<H265Picture> pic);
+
+ // Output all pictures in DPB that have not been outputted yet.
+ bool OutputAllRemainingPics();
+
+ // Calculates the picture output flags using |slice_hdr| for |curr_pic_|.
+ void CalcPicOutputFlags(const H265SliceHeader* slice_hdr);
+
+ // Calculates picture order count (POC) using |pps| and|slice_hdr| for
+ // |curr_pic_|.
+ void CalcPictureOrderCount(const H265PPS* pps,
+ const H265SliceHeader* slice_hdr);
+
+ // Calculates the POCs for the reference pictures for |curr_pic_| using
+ // |sps|, |pps| and |slice_hdr| and stores them in the member variables.
+ // Returns false if bitstream conformance is not maintained, true otherwise.
+ bool CalcRefPicPocs(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr);
+
+ // Builds the reference pictures lists for |curr_pic_| using |sps|, |pps|,
+ // |slice_hdr| and the member variables calculated in CalcRefPicPocs. Returns
+ // false if bitstream conformance is not maintained or needed reference
+ // pictures are missing, true otherwise. At the end of this,
+ // |ref_pic_list{0,1}| will be populated with the required reference pictures
+ // for submitting to the accelerator.
+ bool BuildRefPicLists(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr);
+
+ // Performs DPB management operations for |curr_pic_| by removing no longer
+ // needed entries from the DPB and outputting pictures from the DPB. |sps|
+ // should be the corresponding SPS for |curr_pic_|.
+ bool PerformDpbOperations(const H265SPS* sps);
+
+ // Decoder state.
+ State state_;
+
+ // The colorspace for the h265 container.
+ const VideoColorSpace container_color_space_;
+
+ // Parser in use.
+ H265Parser parser_;
+
+ // Most recent call to SetStream().
+ const uint8_t* current_stream_ = nullptr;
+ size_t current_stream_size_ = 0;
+
+ // Decrypting config for the most recent data passed to SetStream().
+ std::unique_ptr<DecryptConfig> current_decrypt_config_;
+
+ // Keep track of when SetStream() is called so that
+ // H265Accelerator::SetStream() can be called.
+ bool current_stream_has_been_changed_ = false;
+
+ // DPB in use.
+ H265DPB dpb_;
+
+ // Current stream buffer id; to be assigned to pictures decoded from it.
+ int32_t stream_id_ = -1;
+
+ // Picture currently being processed/decoded.
+ scoped_refptr<H265Picture> curr_pic_;
+
+ // Used to identify first picture in decoding order or first picture that
+ // follows an EOS NALU.
+ bool first_picture_ = true;
+
+ // Global state values, needed in decoding. See spec.
+ scoped_refptr<H265Picture> prev_tid0_pic_;
+ int max_pic_order_cnt_lsb_;
+ bool curr_delta_poc_msb_present_flag_[kMaxDpbSize];
+ bool foll_delta_poc_msb_present_flag_[kMaxDpbSize];
+ int num_poc_st_curr_before_;
+ int num_poc_st_curr_after_;
+ int num_poc_st_foll_;
+ int num_poc_lt_curr_;
+ int num_poc_lt_foll_;
+ int poc_st_curr_before_[kMaxDpbSize];
+ int poc_st_curr_after_[kMaxDpbSize];
+ int poc_st_foll_[kMaxDpbSize];
+ int poc_lt_curr_[kMaxDpbSize];
+ int poc_lt_foll_[kMaxDpbSize];
+ H265Picture::Vector ref_pic_list0_;
+ H265Picture::Vector ref_pic_list1_;
+
+ // |ref_pic_list_| is the collection of all pictures from StCurrBefore,
+ // StCurrAfter, StFoll, LtCurr and LtFoll.
+ H265Picture::Vector ref_pic_list_;
+
+ // Currently active SPS and PPS.
+ int curr_sps_id_ = -1;
+ int curr_pps_id_ = -1;
+
+ // Current NALU and slice header being processed.
+ std::unique_ptr<H265NALU> curr_nalu_;
+ std::unique_ptr<H265SliceHeader> curr_slice_hdr_;
+ std::unique_ptr<H265SliceHeader> last_slice_hdr_;
+
+ // Output picture size.
+ gfx::Size pic_size_;
+ // Output visible cropping rect.
+ gfx::Rect visible_rect_;
+
+ // Profile of input bitstream.
+ VideoCodecProfile profile_;
+
+ const std::unique_ptr<H265Accelerator> accelerator_;
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_H265_DECODER_H_ \ No newline at end of file
diff --git a/chromium/media/gpu/h265_decoder_fuzzertest.cc b/chromium/media/gpu/h265_decoder_fuzzertest.cc
new file mode 100644
index 00000000000..23d257774a0
--- /dev/null
+++ b/chromium/media/gpu/h265_decoder_fuzzertest.cc
@@ -0,0 +1,81 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "base/numerics/safe_conversions.h"
+#include "media/base/decoder_buffer.h"
+#include "media/base/video_codecs.h"
+#include "media/gpu/h265_decoder.h"
+
+namespace {
+
+class FakeH265Accelerator : public media::H265Decoder::H265Accelerator {
+ public:
+ FakeH265Accelerator() = default;
+
+ FakeH265Accelerator(const FakeH265Accelerator&) = delete;
+ FakeH265Accelerator& operator=(const FakeH265Accelerator&) = delete;
+
+ ~FakeH265Accelerator() override = default;
+
+ // media::H265Decoder::H265Accelerator
+ scoped_refptr<media::H265Picture> CreateH265Picture() override {
+ return new media::H265Picture();
+ }
+
+ Status SubmitFrameMetadata(const media::H265SPS* sps,
+ const media::H265PPS* pps,
+ const media::H265SliceHeader* slice_hdr,
+ const media::H265Picture::Vector& ref_pic_list,
+ scoped_refptr<media::H265Picture> pic) override {
+ return Status::kOk;
+ }
+ Status SubmitSlice(
+ const media::H265SPS* sps,
+ const media::H265PPS* pps,
+ const media::H265SliceHeader* slice_hdr,
+ const media::H265Picture::Vector& ref_pic_list0,
+ const media::H265Picture::Vector& ref_pic_list1,
+ scoped_refptr<media::H265Picture> pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<media::SubsampleEntry>& subsamples) override {
+ return Status::kOk;
+ }
+ Status SubmitDecode(scoped_refptr<media::H265Picture> pic) override {
+ return Status::kOk;
+ }
+ bool OutputPicture(scoped_refptr<media::H265Picture> pic) override {
+ return true;
+ }
+ void Reset() override {}
+ Status SetStream(base::span<const uint8_t> stream,
+ const media::DecryptConfig* decrypt_config) override {
+ return Status::kOk;
+ }
+};
+
+} // namespace
+
+// Entry point for LibFuzzer.
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ if (!size)
+ return 0;
+
+ media::H265Decoder decoder(std::make_unique<FakeH265Accelerator>(),
+ media::HEVCPROFILE_MAIN);
+ scoped_refptr<media::DecoderBuffer> decoder_buffer =
+ media::DecoderBuffer::CopyFrom(data, size);
+ decoder.SetStream(1, *decoder_buffer);
+
+ // Decode should consume all the data unless it returns kConfigChange, and in
+ // that case it needs to be called again.
+ while (true) {
+ if (decoder.Decode() != media::AcceleratedVideoDecoder::kConfigChange)
+ break;
+ }
+
+ return 0;
+}
diff --git a/chromium/media/gpu/h265_decoder_unittest.cc b/chromium/media/gpu/h265_decoder_unittest.cc
new file mode 100644
index 00000000000..ffc0a85da5f
--- /dev/null
+++ b/chromium/media/gpu/h265_decoder_unittest.cc
@@ -0,0 +1,459 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <cstring>
+#include <memory>
+#include <string>
+
+#include "base/check.h"
+#include "base/containers/queue.h"
+#include "base/containers/span.h"
+#include "base/files/file_util.h"
+#include "media/base/test_data_util.h"
+#include "media/gpu/h265_decoder.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+using ::testing::_;
+using ::testing::Args;
+using ::testing::Expectation;
+using ::testing::InSequence;
+using ::testing::Invoke;
+using ::testing::MakeMatcher;
+using ::testing::Matcher;
+using ::testing::MatcherInterface;
+using ::testing::MatchResultListener;
+using ::testing::Mock;
+using ::testing::Return;
+using ::testing::WithArg;
+
+namespace media {
+namespace {
+constexpr char kSpsPps[] = "bear-sps-pps.hevc";
+constexpr char kFrame0[] = "bear-frame0.hevc";
+constexpr char kFrame1[] = "bear-frame1.hevc";
+constexpr char kFrame2[] = "bear-frame2.hevc";
+constexpr char kFrame3[] = "bear-frame3.hevc";
+constexpr char kFrame4[] = "bear-frame4.hevc";
+constexpr char kFrame5[] = "bear-frame5.hevc";
+
+// Checks whether the decrypt config in the picture matches the decrypt config
+// passed to this matcher.
+MATCHER_P(DecryptConfigMatches, decrypt_config, "") {
+ return arg->decrypt_config()->Matches(*decrypt_config);
+}
+
+MATCHER(SubsampleSizeMatches, "Verify subsample sizes match buffer size") {
+ const size_t buffer_size = ::testing::get<0>(arg);
+ const std::vector<SubsampleEntry>& subsamples = ::testing::get<1>(arg);
+ size_t subsample_total_size = 0;
+ for (const auto& sample : subsamples) {
+ subsample_total_size += sample.cypher_bytes;
+ subsample_total_size += sample.clear_bytes;
+ }
+ return subsample_total_size == buffer_size;
+}
+
+// To have better description on mismatch.
+class HasPocMatcher : public MatcherInterface<scoped_refptr<H265Picture>> {
+ public:
+ explicit HasPocMatcher(int expected_poc) : expected_poc_(expected_poc) {}
+
+ bool MatchAndExplain(scoped_refptr<H265Picture> p,
+ MatchResultListener* listener) const override {
+ if (p->pic_order_cnt_val_ == expected_poc_)
+ return true;
+ *listener << "with poc: " << p->pic_order_cnt_val_;
+ return false;
+ }
+
+ void DescribeTo(std::ostream* os) const override {
+ *os << "with poc " << expected_poc_;
+ }
+
+ private:
+ int expected_poc_;
+};
+
+Matcher<scoped_refptr<H265Picture>> HasPoc(int expected_poc) {
+ return MakeMatcher(new HasPocMatcher(expected_poc));
+}
+
+} // namespace
+
+class MockH265Accelerator : public H265Decoder::H265Accelerator {
+ public:
+ MockH265Accelerator() = default;
+
+ MOCK_METHOD0(CreateH265Picture, scoped_refptr<H265Picture>());
+ MOCK_METHOD5(SubmitFrameMetadata,
+ Status(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list,
+ scoped_refptr<H265Picture> pic));
+ MOCK_METHOD9(SubmitSlice,
+ Status(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list0,
+ const H265Picture::Vector& ref_pic_list1,
+ scoped_refptr<H265Picture> pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples));
+ MOCK_METHOD1(SubmitDecode, Status(scoped_refptr<H265Picture> pic));
+ MOCK_METHOD1(OutputPicture, bool(scoped_refptr<H265Picture>));
+ MOCK_METHOD2(SetStream,
+ Status(base::span<const uint8_t> stream,
+ const DecryptConfig* decrypt_config));
+
+ void Reset() override {}
+};
+
+// Test H265Decoder by feeding different h265 frame sequences and make sure it
+// behaves as expected.
+class H265DecoderTest : public ::testing::Test {
+ public:
+ H265DecoderTest() = default;
+
+ void SetUp() override;
+
+ // Sets the bitstreams to be decoded, frame by frame. The content of each
+ // file is the encoded bitstream of a single video frame.
+ void SetInputFrameFiles(const std::vector<std::string>& frame_files);
+
+ // Keeps decoding the input bitstream set at |SetInputFrameFiles| until the
+ // decoder has consumed all bitstreams or returned from
+ // |H265Decoder::Decode|. Returns the same result as |H265Decoder::Decode|.
+ // If |set_stream_expect| is true, it will setup EXPECT_CALL for SetStream.
+ AcceleratedVideoDecoder::DecodeResult Decode(bool set_stream_expect = true);
+
+ protected:
+ std::unique_ptr<H265Decoder> decoder_;
+ MockH265Accelerator* accelerator_;
+
+ private:
+ base::queue<std::string> input_frame_files_;
+ std::string bitstream_;
+ scoped_refptr<DecoderBuffer> decoder_buffer_;
+};
+
+void H265DecoderTest::SetUp() {
+ auto mock_accelerator = std::make_unique<MockH265Accelerator>();
+ accelerator_ = mock_accelerator.get();
+ decoder_.reset(new H265Decoder(std::move(mock_accelerator),
+ VIDEO_CODEC_PROFILE_UNKNOWN));
+
+ // Sets default behaviors for mock methods for convenience.
+ ON_CALL(*accelerator_, CreateH265Picture()).WillByDefault(Invoke([]() {
+ return new H265Picture();
+ }));
+ ON_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _))
+ .WillByDefault(Return(H265Decoder::H265Accelerator::Status::kOk));
+ ON_CALL(*accelerator_, SubmitDecode(_))
+ .WillByDefault(Return(H265Decoder::H265Accelerator::Status::kOk));
+ ON_CALL(*accelerator_, OutputPicture(_)).WillByDefault(Return(true));
+ ON_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _))
+ .With(Args<7, 8>(SubsampleSizeMatches()))
+ .WillByDefault(Return(H265Decoder::H265Accelerator::Status::kOk));
+ ON_CALL(*accelerator_, SetStream(_, _))
+ .WillByDefault(
+ Return(H265Decoder::H265Accelerator::Status::kNotSupported));
+}
+
+void H265DecoderTest::SetInputFrameFiles(
+ const std::vector<std::string>& input_frame_files) {
+ for (auto f : input_frame_files)
+ input_frame_files_.push(f);
+}
+
+AcceleratedVideoDecoder::DecodeResult H265DecoderTest::Decode(
+ bool set_stream_expect) {
+ while (true) {
+ auto result = decoder_->Decode();
+ int32_t bitstream_id = 0;
+ if (result != AcceleratedVideoDecoder::kRanOutOfStreamData ||
+ input_frame_files_.empty())
+ return result;
+ auto input_file = GetTestDataFilePath(input_frame_files_.front());
+ input_frame_files_.pop();
+ CHECK(base::ReadFileToString(input_file, &bitstream_));
+ decoder_buffer_ = DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(bitstream_.data()), bitstream_.size());
+ EXPECT_NE(decoder_buffer_.get(), nullptr);
+ if (set_stream_expect)
+ EXPECT_CALL(*accelerator_, SetStream(_, _));
+ decoder_->SetStream(bitstream_id++, *decoder_buffer_);
+ }
+}
+
+TEST_F(H265DecoderTest, DecodeSingleFrame) {
+ SetInputFrameFiles({kSpsPps, kFrame0});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+
+ // Also test running out of surfaces.
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).WillOnce(Return(nullptr));
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfSurfaces, Decode());
+ EXPECT_TRUE(Mock::VerifyAndClearExpectations(&*accelerator_));
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _)).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0))).Times(1);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0)));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, SkipNonIDRFrames) {
+ SetInputFrameFiles({kSpsPps, kFrame1, kFrame2, kFrame0});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _)).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0))).Times(1);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0)));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, DecodeProfileMain) {
+ SetInputFrameFiles(
+ {kSpsPps, kFrame0, kFrame1, kFrame2, kFrame3, kFrame4, kFrame5});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(6);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(6);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _)).Times(6);
+
+ Expectation decode_poc0, decode_poc1, decode_poc2, decode_poc3, decode_poc4,
+ decode_poc8;
+ {
+ InSequence decode_order;
+ decode_poc0 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0)));
+ decode_poc4 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(4)));
+ decode_poc2 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(2)));
+ decode_poc1 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(1)));
+ decode_poc3 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(3)));
+ decode_poc8 = EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(8)));
+ }
+ {
+ InSequence display_order;
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0))).After(decode_poc0);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(1))).After(decode_poc1);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(2))).After(decode_poc2);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(3))).After(decode_poc3);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(4))).After(decode_poc4);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(8))).After(decode_poc8);
+ }
+
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, OutputPictureFailureCausesDecodeToFail) {
+ // Provide enough data that Decode() will try to output a frame.
+ SetInputFrameFiles({kSpsPps, kFrame0, kFrame1, kFrame2, kFrame3});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(4);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(3);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _)).Times(3);
+ EXPECT_CALL(*accelerator_, SubmitDecode(_)).Times(3);
+ EXPECT_CALL(*accelerator_, OutputPicture(_)).WillRepeatedly(Return(false));
+ EXPECT_EQ(AcceleratedVideoDecoder::kDecodeError, Decode());
+}
+
+// Verify that the decryption config is passed to the accelerator.
+TEST_F(H265DecoderTest, SetEncryptedStream) {
+ std::string bitstream, bitstream1, bitstream2;
+ auto input_file1 = GetTestDataFilePath(kSpsPps);
+ CHECK(base::ReadFileToString(input_file1, &bitstream1));
+ auto input_file2 = GetTestDataFilePath(kFrame0);
+ CHECK(base::ReadFileToString(input_file2, &bitstream2));
+ bitstream = bitstream1 + bitstream2;
+
+ const char kAnyKeyId[] = "any_16byte_keyid";
+ const char kAnyIv[] = "any_16byte_iv___";
+ const std::vector<SubsampleEntry> subsamples = {
+ // No encrypted bytes. This test only checks whether the data is passed
+ // thru to the acclerator so making this completely clear.
+ {bitstream.size(), 0},
+ };
+
+ std::unique_ptr<DecryptConfig> decrypt_config =
+ DecryptConfig::CreateCencConfig(kAnyKeyId, kAnyIv, subsamples);
+ EXPECT_CALL(*accelerator_,
+ SubmitFrameMetadata(_, _, _, _,
+ DecryptConfigMatches(decrypt_config.get())))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kOk));
+ EXPECT_CALL(*accelerator_,
+ SubmitDecode(DecryptConfigMatches(decrypt_config.get())))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kOk));
+
+ auto buffer = DecoderBuffer::CopyFrom(
+ reinterpret_cast<const uint8_t*>(bitstream.data()), bitstream.size());
+ ASSERT_NE(buffer.get(), nullptr);
+ buffer->set_decrypt_config(std::move(decrypt_config));
+ decoder_->SetStream(0, *buffer);
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, decoder_->Decode());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, decoder_->Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, SubmitFrameMetadataRetry) {
+ SetInputFrameFiles({kSpsPps, kFrame0});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH265Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kTryAgain));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Try again, assuming key still not set. Only SubmitFrameMetadata()
+ // should be called again.
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kTryAgain));
+ EXPECT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Assume key has been provided now, next call to Decode() should proceed.
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0)));
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0)));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, SubmitSliceRetry) {
+ SetInputFrameFiles({kSpsPps, kFrame0});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH265Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kTryAgain));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Try again, assuming key still not set. Only SubmitSlice() should be
+ // called again.
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kTryAgain));
+ EXPECT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Assume key has been provided now, next call to Decode() should proceed.
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0)));
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0)));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, SubmitDecodeRetry) {
+ SetInputFrameFiles({kSpsPps, kFrame0, kFrame1});
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode());
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH265Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0)))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kTryAgain));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Try again, assuming key still not set. Only SubmitDecode() should be
+ // called again.
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _)).Times(0);
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0)))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kTryAgain));
+ EXPECT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode());
+
+ // Assume key has been provided now, next call to Decode() should output
+ // the first frame.
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0)));
+ EXPECT_CALL(*accelerator_, CreateH265Picture());
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _));
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(4)));
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0)));
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(4)));
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+TEST_F(H265DecoderTest, SetStreamRetry) {
+ SetInputFrameFiles({kSpsPps, kFrame0});
+
+ EXPECT_CALL(*accelerator_, SetStream(_, _))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kTryAgain))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kOk))
+ .WillOnce(Return(H265Decoder::H265Accelerator::Status::kOk));
+ EXPECT_EQ(AcceleratedVideoDecoder::kTryAgain, Decode(false));
+
+ EXPECT_EQ(AcceleratedVideoDecoder::kConfigChange, Decode(false));
+ EXPECT_EQ(gfx::Size(320, 184), decoder_->GetPicSize());
+ EXPECT_EQ(HEVCPROFILE_MAIN, decoder_->GetProfile());
+ EXPECT_EQ(17u, decoder_->GetRequiredNumOfPictures());
+
+ {
+ InSequence sequence;
+ EXPECT_CALL(*accelerator_, CreateH265Picture()).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitFrameMetadata(_, _, _, _, _)).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitSlice(_, _, _, _, _, _, _, _, _)).Times(1);
+ EXPECT_CALL(*accelerator_, SubmitDecode(HasPoc(0))).Times(1);
+ EXPECT_CALL(*accelerator_, OutputPicture(HasPoc(0))).Times(1);
+ }
+ EXPECT_EQ(AcceleratedVideoDecoder::kRanOutOfStreamData, Decode());
+ EXPECT_TRUE(decoder_->Flush());
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/h265_dpb.cc b/chromium/media/gpu/h265_dpb.cc
new file mode 100644
index 00000000000..f644bb2e4fc
--- /dev/null
+++ b/chromium/media/gpu/h265_dpb.cc
@@ -0,0 +1,107 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+
+#include "base/logging.h"
+#include "media/gpu/h265_dpb.h"
+#include "media/video/h265_parser.h"
+
+namespace media {
+
+H265Picture::H265Picture() = default;
+H265Picture::~H265Picture() = default;
+
+H265DPB::H265DPB() = default;
+H265DPB::~H265DPB() = default;
+
+VaapiH265Picture* H265Picture::AsVaapiH265Picture() {
+ return nullptr;
+}
+
+void H265DPB::set_max_num_pics(size_t max_num_pics) {
+ DCHECK_LE(max_num_pics, static_cast<size_t>(kMaxDpbSize));
+ max_num_pics_ = max_num_pics;
+ if (pics_.size() > max_num_pics_)
+ pics_.resize(max_num_pics_);
+}
+
+void H265DPB::Clear() {
+ pics_.clear();
+}
+
+void H265DPB::StorePicture(scoped_refptr<H265Picture> pic,
+ H265Picture::ReferenceType ref) {
+ DCHECK_LT(pics_.size(), max_num_pics_);
+ pic->ref_ = ref;
+ DVLOG(3) << "Adding PicNum: " << pic->pic_order_cnt_val_
+ << " ref: " << static_cast<int>(pic->ref_);
+ pics_.push_back(std::move(pic));
+}
+
+void H265DPB::MarkAllUnusedForReference() {
+ for (const auto& pic : pics_)
+ pic->ref_ = H265Picture::kUnused;
+}
+
+void H265DPB::DeleteUnused() {
+ for (auto it = pics_.begin(); it != pics_.end();) {
+ auto& pic = *it;
+ if ((!pic->pic_output_flag_ || pic->outputted_) &&
+ (pic->ref_ == H265Picture::kUnused)) {
+ std::swap(pic, *(pics_.end() - 1));
+ pics_.pop_back();
+ } else {
+ it++;
+ }
+ }
+}
+
+int H265DPB::GetReferencePicCount() {
+ int count = 0;
+ for (const auto& pic : pics_) {
+ if (pic->ref_ != H265Picture::kUnused)
+ count++;
+ }
+ return count;
+}
+
+scoped_refptr<H265Picture> H265DPB::GetPicByPocAndMark(
+ int poc,
+ H265Picture::ReferenceType ref) {
+ return GetPicByPocMaskedAndMark(poc, 0, ref);
+}
+
+scoped_refptr<H265Picture> H265DPB::GetPicByPocMaskedAndMark(
+ int poc,
+ int mask,
+ H265Picture::ReferenceType ref) {
+ for (const auto& pic : pics_) {
+ if ((mask && (pic->pic_order_cnt_val_ & mask) == poc) ||
+ (!mask && pic->pic_order_cnt_val_ == poc)) {
+ pic->ref_ = ref;
+ return pic;
+ }
+ }
+
+ DVLOG(1) << "Missing " << H265Picture::GetReferenceName(ref)
+ << " ref pic num: " << poc;
+ return nullptr;
+}
+
+void H265DPB::AppendPendingOutputPics(H265Picture::Vector* out) {
+ for (const auto& pic : pics_) {
+ if (pic->pic_output_flag_ && !pic->outputted_)
+ out->push_back(pic);
+ }
+}
+
+void H265DPB::AppendReferencePics(H265Picture::Vector* out) {
+ for (const auto& pic : pics_) {
+ if (pic->ref_ != H265Picture::kUnused)
+ out->push_back(pic);
+ }
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/gpu/h265_dpb.h b/chromium/media/gpu/h265_dpb.h
new file mode 100644
index 00000000000..78450ec4c9e
--- /dev/null
+++ b/chromium/media/gpu/h265_dpb.h
@@ -0,0 +1,147 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_H265_DPB_H_
+#define MEDIA_GPU_H265_DPB_H_
+
+#include <vector>
+
+#include "base/memory/ref_counted.h"
+#include "media/base/video_color_space.h"
+#include "media/gpu/codec_picture.h"
+#include "media/gpu/media_gpu_export.h"
+
+namespace media {
+
+class VaapiH265Picture;
+
+// A picture (a frame or a field) in the H.265 spec sense.
+// See spec at http://www.itu.int/rec/T-REC-H.265
+class MEDIA_GPU_EXPORT H265Picture : public CodecPicture {
+ public:
+ using Vector = std::vector<scoped_refptr<H265Picture>>;
+
+ H265Picture();
+
+ H265Picture(const H265Picture&) = delete;
+ H265Picture& operator=(const H265Picture&) = delete;
+
+ virtual VaapiH265Picture* AsVaapiH265Picture();
+
+ enum ReferenceType {
+ kUnused = 0,
+ kShortTermCurrBefore = 1,
+ kShortTermCurrAfter = 2,
+ kShortTermFoll = 3,
+ kLongTermCurr = 4,
+ kLongTermFoll = 5,
+ };
+
+ static std::string GetReferenceName(ReferenceType ref) {
+ if (ref == kUnused)
+ return "Unused";
+ else if (ref == kLongTermCurr || ref == kLongTermFoll)
+ return "LongTerm";
+ else
+ return "ShortTerm";
+ }
+
+ bool IsLongTermRef() const {
+ return ref_ == kLongTermCurr || ref_ == kLongTermFoll;
+ }
+ bool IsShortTermRef() const {
+ return ref_ == kShortTermCurrBefore || ref_ == kShortTermCurrAfter ||
+ ref_ == kShortTermFoll;
+ }
+ bool IsUnused() const { return ref_ == kUnused; }
+
+ // Values calculated per H.265 specification or taken from slice header.
+ // See spec for more details on each (some names have been converted from
+ // CamelCase in spec to Chromium-style names).
+ int nal_unit_type_;
+ bool no_rasl_output_flag_{false};
+ bool no_output_of_prior_pics_flag_{false};
+ bool pic_output_flag_{false};
+ bool valid_for_prev_tid0_pic_{false};
+ int slice_pic_order_cnt_lsb_{0};
+ int pic_order_cnt_msb_{0};
+ int pic_order_cnt_val_{0};
+
+ // Our own state variables.
+ bool irap_pic_;
+ bool first_picture_;
+ bool processed_{false};
+
+ ReferenceType ref_{kUnused};
+
+ bool outputted_{false};
+
+ protected:
+ ~H265Picture() override;
+};
+
+// DPB - Decoded Picture Buffer.
+// Stores decoded pictures that will be used for future display and/or
+// reference.
+class H265DPB {
+ public:
+ H265DPB();
+
+ H265DPB(const H265DPB&) = delete;
+ H265DPB& operator=(const H265DPB&) = delete;
+
+ ~H265DPB();
+
+ void set_max_num_pics(size_t max_num_pics);
+ size_t max_num_pics() const { return max_num_pics_; }
+
+ // Removes all entries from the DPB.
+ void Clear();
+
+ // Stores |pic| in the DPB. If |used_for_long_term| is true it'll be marked as
+ // used for long term reference, otherwise it'll be marked as used for short
+ // term reference.
+ void StorePicture(scoped_refptr<H265Picture> pic,
+ H265Picture::ReferenceType ref);
+
+ // Mark all pictures in DPB as unused for reference.
+ void MarkAllUnusedForReference();
+
+ // Removes all pictures from the DPB that do not have |pic_output_flag_| set
+ // and are marked Unused for reference.
+ void DeleteUnused();
+
+ // Returns the number of pictures in the DPB that are marked for reference.
+ int GetReferencePicCount();
+
+ // Returns a picture in the DPB which has a POC equal to |poc| and marks it
+ // with |ref| reference type. If not found, returns nullptr.
+ scoped_refptr<H265Picture> GetPicByPocAndMark(int poc,
+ H265Picture::ReferenceType ref);
+
+ // Returns a picture in the DPB which has a POC bitmasked by |mask| which
+ // equals |poc| and marks it with |ref| reference type. If not found, returns
+ // nullptr. If |mask| is zero, then no bitmasking is done.
+ scoped_refptr<H265Picture>
+ GetPicByPocMaskedAndMark(int poc, int mask, H265Picture::ReferenceType ref);
+
+ // Appends to |out| all of the pictures in the DPB that are flagged for output
+ // but have not be outputted yet.
+ void AppendPendingOutputPics(H265Picture::Vector* out);
+
+ // Appends to |out| all of the pictures in the DPB that are not marked as
+ // unused for reference.
+ void AppendReferencePics(H265Picture::Vector* out);
+
+ size_t size() const { return pics_.size(); }
+ bool IsFull() const { return pics_.size() >= max_num_pics_; }
+
+ private:
+ H265Picture::Vector pics_;
+ size_t max_num_pics_{0};
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_H265_DPB_H_ \ No newline at end of file
diff --git a/chromium/media/gpu/ipc/service/BUILD.gn b/chromium/media/gpu/ipc/service/BUILD.gn
index fe51b141ed4..e644ece9bce 100644
--- a/chromium/media/gpu/ipc/service/BUILD.gn
+++ b/chromium/media/gpu/ipc/service/BUILD.gn
@@ -38,6 +38,7 @@ target(link_target_type, "service") {
"//media/gpu",
]
deps = [
+ "//build:chromeos_buildflags",
"//gpu/command_buffer/service:gles2",
"//gpu/ipc/service",
"//media:media_buildflags",
diff --git a/chromium/media/gpu/ipc/service/vda_video_decoder.cc b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
index 576d1b990f7..4614f4c577d 100644
--- a/chromium/media/gpu/ipc/service/vda_video_decoder.cc
+++ b/chromium/media/gpu/ipc/service/vda_video_decoder.cc
@@ -12,7 +12,9 @@
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
+#include "base/memory/ptr_util.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_info.h"
#include "gpu/config/gpu_preferences.h"
@@ -277,7 +279,7 @@ void VdaVideoDecoder::Initialize(const VideoDecoderConfig& config,
// (https://crbug.com/929565). We should support reinitialization for profile
// changes. We limit this support as small as possible for safety.
const bool is_profile_change =
-#if defined(OS_CHROMEOS) && BUILDFLAG(USE_VAAPI)
+#if BUILDFLAG(IS_ASH) && BUILDFLAG(USE_VAAPI)
config_.profile() != config.profile();
#else
false;
diff --git a/chromium/media/gpu/mac/vt_config_util.h b/chromium/media/gpu/mac/vt_config_util.h
index acb58d7d846..ac3d6d0bd2d 100644
--- a/chromium/media/gpu/mac/vt_config_util.h
+++ b/chromium/media/gpu/mac/vt_config_util.h
@@ -13,7 +13,7 @@
#include "media/base/video_color_space.h"
#include "media/gpu/media_gpu_export.h"
#include "media/video/video_decode_accelerator.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
@@ -21,10 +21,7 @@ MEDIA_GPU_EXPORT CFMutableDictionaryRef
CreateFormatExtensions(CMVideoCodecType codec_type,
VideoCodecProfile profile,
const VideoColorSpace& color_space,
- base::Optional<gl::HDRMetadata> hdr_metadata);
-
-MEDIA_GPU_EXPORT gfx::ColorSpace GetImageBufferColorSpace(
- CVImageBufferRef image_buffer);
+ base::Optional<gfx::HDRMetadata> hdr_metadata);
} // namespace media
diff --git a/chromium/media/gpu/mac/vt_config_util.mm b/chromium/media/gpu/mac/vt_config_util.mm
index 37bf96fd9ec..4a0da1a0d6b 100644
--- a/chromium/media/gpu/mac/vt_config_util.mm
+++ b/chromium/media/gpu/mac/vt_config_util.mm
@@ -9,7 +9,6 @@
#include <simd/simd.h>
#include "base/mac/foundation_util.h"
-#include "base/no_destructor.h"
namespace {
@@ -173,7 +172,7 @@ CFStringRef GetMatrix(media::VideoColorSpace::MatrixID matrix_id) {
}
}
-void SetContentLightLevelInfo(const gl::HDRMetadata& hdr_metadata,
+void SetContentLightLevelInfo(const gfx::HDRMetadata& hdr_metadata,
NSMutableDictionary<NSString*, id>* extensions) {
if (@available(macos 10.13, *)) {
// This is a SMPTEST2086 Content Light Level Information box.
@@ -200,7 +199,7 @@ void SetContentLightLevelInfo(const gl::HDRMetadata& hdr_metadata,
}
}
-void SetMasteringMetadata(const gl::HDRMetadata& hdr_metadata,
+void SetMasteringMetadata(const gfx::HDRMetadata& hdr_metadata,
NSMutableDictionary<NSString*, id>* extensions) {
if (@available(macos 10.13, *)) {
// This is a SMPTEST2086 Mastering Display Color Volume box.
@@ -249,175 +248,6 @@ void SetMasteringMetadata(const gl::HDRMetadata& hdr_metadata,
}
}
-// Read the value for the key in |key| to CFString and convert it to IdType.
-// Use the list of pairs in |cfstr_id_pairs| to do the conversion (by doing a
-// linear lookup).
-template <typename IdType, typename StringIdPair>
-bool GetImageBufferProperty(CVImageBufferRef image_buffer,
- CFStringRef key,
- const std::vector<StringIdPair>& cfstr_id_pairs,
- IdType* value_as_id) {
- CFStringRef value_as_string = reinterpret_cast<CFStringRef>(
- CVBufferGetAttachment(image_buffer, key, nullptr));
- if (!value_as_string)
- return false;
-
- for (const auto& p : cfstr_id_pairs) {
- if (!CFStringCompare(value_as_string, p.cfstr, 0)) {
- *value_as_id = p.id;
- return true;
- }
- }
-
- return false;
-}
-
-gfx::ColorSpace::PrimaryID GetImageBufferPrimary(
- CVImageBufferRef image_buffer) {
- struct CVImagePrimary {
- const CFStringRef cfstr;
- const gfx::ColorSpace::PrimaryID id;
- };
- static const base::NoDestructor<std::vector<CVImagePrimary>>
- kSupportedPrimaries([] {
- std::vector<CVImagePrimary> supported_primaries;
- supported_primaries.push_back({kCVImageBufferColorPrimaries_ITU_R_709_2,
- gfx::ColorSpace::PrimaryID::BT709});
- supported_primaries.push_back({kCVImageBufferColorPrimaries_EBU_3213,
- gfx::ColorSpace::PrimaryID::BT470BG});
- supported_primaries.push_back({kCVImageBufferColorPrimaries_SMPTE_C,
- gfx::ColorSpace::PrimaryID::SMPTE240M});
- if (@available(macos 10.11, *)) {
- supported_primaries.push_back(
- {kCVImageBufferColorPrimaries_ITU_R_2020,
- gfx::ColorSpace::PrimaryID::BT2020});
- }
- return supported_primaries;
- }());
-
- // The named primaries. Default to BT709.
- auto primary_id = gfx::ColorSpace::PrimaryID::BT709;
- if (!GetImageBufferProperty(image_buffer, kCVImageBufferColorPrimariesKey,
- *kSupportedPrimaries, &primary_id)) {
- DLOG(ERROR) << "Failed to find CVImageBufferRef primaries.";
- }
- return primary_id;
-}
-
-gfx::ColorSpace::TransferID GetImageBufferTransferFn(
- CVImageBufferRef image_buffer,
- double* gamma) {
- struct CVImageTransferFn {
- const CFStringRef cfstr;
- const gfx::ColorSpace::TransferID id;
- };
- static const base::NoDestructor<std::vector<CVImageTransferFn>>
- kSupportedTransferFuncs([] {
- std::vector<CVImageTransferFn> supported_transfer_funcs;
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_ITU_R_709_2,
- gfx::ColorSpace::TransferID::BT709_APPLE});
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_SMPTE_240M_1995,
- gfx::ColorSpace::TransferID::SMPTE240M});
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_UseGamma,
- gfx::ColorSpace::TransferID::CUSTOM});
- if (@available(macos 10.11, *)) {
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_ITU_R_2020,
- gfx::ColorSpace::TransferID::BT2020_10});
- }
- if (@available(macos 10.12, *)) {
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_SMPTE_ST_428_1,
- gfx::ColorSpace::TransferID::SMPTEST428_1});
- }
- if (@available(macos 10.13, *)) {
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ,
- gfx::ColorSpace::TransferID::SMPTEST2084});
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_ITU_R_2100_HLG,
- gfx::ColorSpace::TransferID::ARIB_STD_B67});
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_sRGB,
- gfx::ColorSpace::TransferID::IEC61966_2_1});
- }
- if (@available(macos 10.14, *)) {
- supported_transfer_funcs.push_back(
- {kCVImageBufferTransferFunction_Linear,
- gfx::ColorSpace::TransferID::LINEAR});
- }
-
- return supported_transfer_funcs;
- }());
-
- // The named transfer function.
- auto transfer_id = gfx::ColorSpace::TransferID::BT709;
- if (!GetImageBufferProperty(image_buffer, kCVImageBufferTransferFunctionKey,
- *kSupportedTransferFuncs, &transfer_id)) {
- DLOG(ERROR) << "Failed to find CVImageBufferRef transfer.";
- }
-
- if (transfer_id != gfx::ColorSpace::TransferID::CUSTOM)
- return transfer_id;
-
- // If we fail to retrieve the gamma parameter, fall back to BT709.
- constexpr auto kDefaultTransferFn = gfx::ColorSpace::TransferID::BT709;
- CFNumberRef gamma_number =
- reinterpret_cast<CFNumberRef>(CVBufferGetAttachment(
- image_buffer, kCVImageBufferGammaLevelKey, nullptr));
- if (!gamma_number) {
- DLOG(ERROR) << "Failed to get CVImageBufferRef gamma level.";
- return kDefaultTransferFn;
- }
-
- // CGFloat is a double on 64-bit systems.
- CGFloat gamma_double = 0;
- if (!CFNumberGetValue(gamma_number, kCFNumberCGFloatType, &gamma_double)) {
- DLOG(ERROR) << "Failed to get CVImageBufferRef gamma level as float.";
- return kDefaultTransferFn;
- }
-
- if (gamma_double == 2.2)
- return gfx::ColorSpace::TransferID::GAMMA22;
- if (gamma_double == 2.8)
- return gfx::ColorSpace::TransferID::GAMMA28;
-
- *gamma = gamma_double;
- return transfer_id;
-}
-
-gfx::ColorSpace::MatrixID GetImageBufferMatrix(CVImageBufferRef image_buffer) {
- struct CVImageMatrix {
- const CFStringRef cfstr;
- gfx::ColorSpace::MatrixID id;
- };
- static const base::NoDestructor<std::vector<CVImageMatrix>>
- kSupportedMatrices([] {
- std::vector<CVImageMatrix> supported_matrices;
- supported_matrices.push_back({kCVImageBufferYCbCrMatrix_ITU_R_709_2,
- gfx::ColorSpace::MatrixID::BT709});
- supported_matrices.push_back({kCVImageBufferYCbCrMatrix_ITU_R_601_4,
- gfx::ColorSpace::MatrixID::SMPTE170M});
- supported_matrices.push_back({kCVImageBufferYCbCrMatrix_SMPTE_240M_1995,
- gfx::ColorSpace::MatrixID::SMPTE240M});
- if (@available(macos 10.11, *)) {
- supported_matrices.push_back({kCVImageBufferYCbCrMatrix_ITU_R_2020,
- gfx::ColorSpace::MatrixID::BT2020_NCL});
- }
- return supported_matrices;
- }());
-
- auto matrix_id = gfx::ColorSpace::MatrixID::INVALID;
- if (!GetImageBufferProperty(image_buffer, kCVImageBufferYCbCrMatrixKey,
- *kSupportedMatrices, &matrix_id)) {
- DLOG(ERROR) << "Failed to find CVImageBufferRef YUV matrix.";
- }
- return matrix_id;
-}
-
void SetVp9CodecConfigurationBox(
media::VideoCodecProfile codec_profile,
const media::VideoColorSpace& color_space,
@@ -469,7 +299,7 @@ CFMutableDictionaryRef CreateFormatExtensions(
CMVideoCodecType codec_type,
VideoCodecProfile profile,
const VideoColorSpace& color_space,
- base::Optional<gl::HDRMetadata> hdr_metadata) {
+ base::Optional<gfx::HDRMetadata> hdr_metadata) {
auto* extensions = [[NSMutableDictionary alloc] init];
SetDictionaryValue(extensions, kCMFormatDescriptionExtension_FormatName,
CMVideoCodecTypeToString(codec_type));
@@ -512,40 +342,4 @@ CFMutableDictionaryRef CreateFormatExtensions(
return base::mac::NSToCFCast(extensions);
}
-gfx::ColorSpace GetImageBufferColorSpace(CVImageBufferRef image_buffer) {
- double gamma;
- auto primary_id = GetImageBufferPrimary(image_buffer);
- auto matrix_id = GetImageBufferMatrix(image_buffer);
- auto transfer_id = GetImageBufferTransferFn(image_buffer, &gamma);
-
- // Use a matrix id that is coherent with a primary id. Useful when we fail to
- // parse the matrix. Previously it was always defaulting to MatrixID::BT709
- // See http://crbug.com/788236.
- if (matrix_id == gfx::ColorSpace::MatrixID::INVALID) {
- if (primary_id == gfx::ColorSpace::PrimaryID::BT470BG)
- matrix_id = gfx::ColorSpace::MatrixID::BT470BG;
- else
- matrix_id = gfx::ColorSpace::MatrixID::BT709;
- }
-
- // It is specified to the decoder to use luma=[16,235] chroma=[16,240] via
- // the kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange.
- //
- // TODO(crbug.com/1103432): We'll probably need support for more than limited
- // range content if we want this to be used for more than video sites.
- auto range_id = gfx::ColorSpace::RangeID::LIMITED;
-
- if (transfer_id == gfx::ColorSpace::TransferID::CUSTOM) {
- // Transfer functions can also be specified as a gamma value.
- skcms_TransferFunction custom_tr_fn = {2.2f, 1, 0, 1, 0, 0, 0};
- if (transfer_id == gfx::ColorSpace::TransferID::CUSTOM)
- custom_tr_fn.g = gamma;
-
- return gfx::ColorSpace(primary_id, gfx::ColorSpace::TransferID::CUSTOM,
- matrix_id, range_id, nullptr, &custom_tr_fn);
- }
-
- return gfx::ColorSpace(primary_id, transfer_id, matrix_id, range_id);
-}
-
} // namespace media
diff --git a/chromium/media/gpu/mac/vt_config_util_unittest.cc b/chromium/media/gpu/mac/vt_config_util_unittest.cc
index 17a95d0f08b..6144e692b52 100644
--- a/chromium/media/gpu/mac/vt_config_util_unittest.cc
+++ b/chromium/media/gpu/mac/vt_config_util_unittest.cc
@@ -11,6 +11,7 @@
#include "base/mac/mac_util.h"
#include "base/mac/sdk_forward_declarations.h"
#include "base/strings/sys_string_conversions.h"
+#include "media/base/mac/color_space_util_mac.h"
#include "media/formats/mp4/box_definitions.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -57,7 +58,7 @@ base::span<const uint8_t> GetNestedDataValue(CFDictionaryRef dict,
base::ScopedCFTypeRef<CVImageBufferRef> CreateCVImageBuffer(
media::VideoColorSpace cs) {
base::ScopedCFTypeRef<CFDictionaryRef> fmt(CreateFormatExtensions(
- kCMVideoCodecType_H264, media::H264PROFILE_MAIN, cs, gl::HDRMetadata()));
+ kCMVideoCodecType_H264, media::H264PROFILE_MAIN, cs, gfx::HDRMetadata()));
base::ScopedCFTypeRef<CVImageBufferRef> image_buffer;
OSStatus err =
@@ -74,6 +75,34 @@ base::ScopedCFTypeRef<CVImageBufferRef> CreateCVImageBuffer(
return image_buffer;
}
+base::ScopedCFTypeRef<CMFormatDescriptionRef> CreateFormatDescription(
+ CFStringRef primaries,
+ CFStringRef transfer,
+ CFStringRef matrix) {
+ base::ScopedCFTypeRef<CFMutableDictionaryRef> extensions(
+ CFDictionaryCreateMutable(kCFAllocatorDefault, 0,
+ &kCFTypeDictionaryKeyCallBacks,
+ &kCFTypeDictionaryValueCallBacks));
+
+ if (primaries) {
+ CFDictionarySetValue(
+ extensions, kCMFormatDescriptionExtension_ColorPrimaries, primaries);
+ }
+ if (transfer) {
+ CFDictionarySetValue(
+ extensions, kCMFormatDescriptionExtension_TransferFunction, transfer);
+ }
+ if (matrix) {
+ CFDictionarySetValue(extensions, kCMFormatDescriptionExtension_YCbCrMatrix,
+ matrix);
+ }
+ base::ScopedCFTypeRef<CMFormatDescriptionRef> result;
+ CMFormatDescriptionCreate(nullptr, kCMMediaType_Video,
+ kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
+ extensions.get(), result.InitializeInto());
+ return result;
+}
+
gfx::ColorSpace ToBT709_APPLE(gfx::ColorSpace cs) {
return gfx::ColorSpace(cs.GetPrimaryID(),
gfx::ColorSpace::TransferID::BT709_APPLE,
@@ -136,7 +165,7 @@ TEST(VTConfigUtil, CreateFormatExtensions_H264_BT2020_PQ) {
VideoColorSpace::TransferID::SMPTEST2084,
VideoColorSpace::MatrixID::BT2020_NCL,
gfx::ColorSpace::RangeID::FULL),
- gl::HDRMetadata()));
+ gfx::HDRMetadata()));
EXPECT_EQ("avc1", GetStrValue(fmt, kCMFormatDescriptionExtension_FormatName));
EXPECT_EQ(24, GetIntValue(fmt, kCMFormatDescriptionExtension_Depth));
@@ -160,7 +189,7 @@ TEST(VTConfigUtil, CreateFormatExtensions_H264_BT2020_HLG) {
VideoColorSpace::TransferID::ARIB_STD_B67,
VideoColorSpace::MatrixID::BT2020_NCL,
gfx::ColorSpace::RangeID::FULL),
- gl::HDRMetadata()));
+ gfx::HDRMetadata()));
EXPECT_EQ("avc1", GetStrValue(fmt, kCMFormatDescriptionExtension_FormatName));
EXPECT_EQ(24, GetIntValue(fmt, kCMFormatDescriptionExtension_Depth));
@@ -179,7 +208,7 @@ TEST(VTConfigUtil, CreateFormatExtensions_H264_BT2020_HLG) {
TEST(VTConfigUtil, CreateFormatExtensions_HDRMetadata) {
// Values from real YouTube HDR content.
- gl::HDRMetadata hdr_meta;
+ gfx::HDRMetadata hdr_meta;
hdr_meta.max_content_light_level = 1000;
hdr_meta.max_frame_average_light_level = 600;
auto& mastering = hdr_meta.mastering_metadata;
@@ -364,4 +393,26 @@ TEST(VTConfigUtil, GetImageBufferColorSpace_BT2020_HLG) {
}
}
+TEST(VTConfigUtil, FormatDescriptionInvalid) {
+ if (__builtin_available(macos 10.11, *)) {
+ auto format_descriptor =
+ CreateFormatDescription(CFSTR("Cows"), CFSTR("Go"), CFSTR("Moo"));
+ ASSERT_TRUE(format_descriptor);
+ auto cs = GetFormatDescriptionColorSpace(format_descriptor);
+ EXPECT_EQ(gfx::ColorSpace::CreateREC709(), cs);
+ }
+}
+
+TEST(VTConfigUtil, FormatDescriptionBT709) {
+ if (__builtin_available(macos 10.11, *)) {
+ auto format_descriptor = CreateFormatDescription(
+ kCMFormatDescriptionColorPrimaries_ITU_R_709_2,
+ kCMFormatDescriptionTransferFunction_ITU_R_709_2,
+ kCMFormatDescriptionYCbCrMatrix_ITU_R_709_2);
+ ASSERT_TRUE(format_descriptor);
+ auto cs = GetFormatDescriptionColorSpace(format_descriptor);
+ EXPECT_EQ(ToBT709_APPLE(gfx::ColorSpace::CreateREC709()), cs);
+ }
+}
+
} // namespace media
diff --git a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
index 33c5513744a..09e92feccf6 100644
--- a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
+++ b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.cc
@@ -42,6 +42,7 @@
#include "gpu/command_buffer/service/shared_image_factory.h"
#include "gpu/ipc/service/shared_image_stub.h"
#include "media/base/limits.h"
+#include "media/base/mac/color_space_util_mac.h"
#include "media/base/media_switches.h"
#include "media/filters/vp9_parser.h"
#include "media/gpu/mac/vp9_super_frame_bitstream_filter.h"
@@ -194,7 +195,7 @@ base::ScopedCFTypeRef<CMFormatDescriptionRef> CreateVideoFormatH264(
base::ScopedCFTypeRef<CMFormatDescriptionRef> CreateVideoFormatVP9(
media::VideoColorSpace color_space,
media::VideoCodecProfile profile,
- base::Optional<gl::HDRMetadata> hdr_metadata,
+ base::Optional<gfx::HDRMetadata> hdr_metadata,
const gfx::Size& coded_size) {
base::ScopedCFTypeRef<CFMutableDictionaryRef> format_config(
CreateFormatExtensions(kCMVideoCodecType_VP9, profile, color_space,
@@ -487,8 +488,10 @@ bool VTVideoDecodeAccelerator::FrameOrder::operator()(
VTVideoDecodeAccelerator::VTVideoDecodeAccelerator(
const GpuVideoDecodeGLClient& gl_client,
+ const gpu::GpuDriverBugWorkarounds& workarounds,
MediaLog* media_log)
: gl_client_(gl_client),
+ workarounds_(workarounds),
media_log_(media_log),
gpu_task_runner_(base::ThreadTaskRunnerHandle::Get()),
decoder_thread_("VTDecoderThread"),
@@ -600,7 +603,7 @@ bool VTVideoDecodeAccelerator::Initialize(const Config& config,
}
static const base::NoDestructor<VideoDecodeAccelerator::SupportedProfiles>
- kActualSupportedProfiles(GetSupportedProfiles());
+ kActualSupportedProfiles(GetSupportedProfiles(workarounds_));
if (std::find_if(kActualSupportedProfiles->begin(),
kActualSupportedProfiles->end(), [config](const auto& p) {
return p.profile == config.profile;
@@ -1543,13 +1546,22 @@ bool VTVideoDecodeAccelerator::SendFrame(const Frame& frame) {
gl_params.is_cleared = true;
gpu::SharedImageBackingGLCommon::UnpackStateAttribs gl_attribs;
+ // A GL texture id is needed to create the legacy mailbox, which requires
+ // that the GL context be made current.
+ const bool kCreateLegacyMailbox = true;
+ if (!gl_client_.make_context_current.Run()) {
+ DLOG(ERROR) << "Failed to make context current";
+ NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
+ return false;
+ }
+
auto shared_image = std::make_unique<gpu::SharedImageBackingGLImage>(
gl_image, mailbox, viz_resource_format, frame.image_size, color_space,
kTopLeft_GrSurfaceOrigin, kOpaque_SkAlphaType, shared_image_usage,
gl_params, gl_attribs, gl_client_.is_passthrough);
const bool success = shared_image_stub->factory()->RegisterBacking(
- std::move(shared_image), /* legacy_mailbox */ true);
+ std::move(shared_image), kCreateLegacyMailbox);
if (!success) {
DLOG(ERROR) << "Failed to register shared image";
NotifyError(PLATFORM_FAILURE, SFT_PLATFORM_ERROR);
@@ -1691,13 +1703,13 @@ bool VTVideoDecodeAccelerator::TryToSetupDecodeOnSeparateThread(
}
bool VTVideoDecodeAccelerator::SupportsSharedImagePictureBuffers() const {
- // TODO(https://crbug.com/1108909): Enable shared image use on macOS.
- return false;
+ return true;
}
// static
VideoDecodeAccelerator::SupportedProfiles
-VTVideoDecodeAccelerator::GetSupportedProfiles() {
+VTVideoDecodeAccelerator::GetSupportedProfiles(
+ const gpu::GpuDriverBugWorkarounds& workarounds) {
SupportedProfiles profiles;
if (!InitializeVideoToolbox())
return profiles;
@@ -1705,6 +1717,8 @@ VTVideoDecodeAccelerator::GetSupportedProfiles() {
for (const auto& supported_profile : kSupportedProfiles) {
if (supported_profile == VP9PROFILE_PROFILE0 ||
supported_profile == VP9PROFILE_PROFILE2) {
+ if (workarounds.disable_accelerated_vp9_decode)
+ continue;
if (!base::mac::IsAtLeastOS11())
continue;
if (!base::FeatureList::IsEnabled(kVideoToolboxVp9Decoding))
diff --git a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
index 5ec5bed52eb..9a9c95f7ae6 100644
--- a/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
+++ b/chromium/media/gpu/mac/vt_video_decode_accelerator_mac.h
@@ -17,6 +17,7 @@
#include "base/threading/thread.h"
#include "base/threading/thread_checker.h"
#include "base/trace_event/memory_dump_provider.h"
+#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "media/base/media_log.h"
#include "media/gpu/gpu_video_decode_accelerator_helpers.h"
#include "media/gpu/media_gpu_export.h"
@@ -44,6 +45,7 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
public base::trace_event::MemoryDumpProvider {
public:
VTVideoDecodeAccelerator(const GpuVideoDecodeGLClient& gl_client_,
+ const gpu::GpuDriverBugWorkarounds& workarounds,
MediaLog* media_log);
~VTVideoDecodeAccelerator() override;
@@ -74,7 +76,8 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
OSStatus status,
CVImageBufferRef image_buffer);
- static VideoDecodeAccelerator::SupportedProfiles GetSupportedProfiles();
+ static VideoDecodeAccelerator::SupportedProfiles GetSupportedProfiles(
+ const gpu::GpuDriverBugWorkarounds& workarounds);
private:
// Logged to UMA, so never reuse values. Make sure to update
@@ -213,6 +216,7 @@ class VTVideoDecodeAccelerator : public VideoDecodeAccelerator,
// GPU thread state.
//
const GpuVideoDecodeGLClient gl_client_;
+ const gpu::GpuDriverBugWorkarounds workarounds_;
MediaLog* media_log_;
VideoDecodeAccelerator::Client* client_ = nullptr;
diff --git a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
index 9566c166fa0..40301bf2c28 100644
--- a/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
+++ b/chromium/media/gpu/mac/vt_video_encode_accelerator_mac.cc
@@ -146,7 +146,8 @@ bool VTVideoEncodeAccelerator::Initialize(const Config& config,
// Clients are expected to call Flush() before reinitializing the encoder.
DCHECK_EQ(pending_encodes_, 0);
- if (config.input_format != PIXEL_FORMAT_I420) {
+ if (config.input_format != PIXEL_FORMAT_I420 &&
+ config.input_format != PIXEL_FORMAT_NV12) {
DLOG(ERROR) << "Input format not supported= "
<< VideoPixelFormatToString(config.input_format);
return false;
diff --git a/chromium/media/gpu/test/BUILD.gn b/chromium/media/gpu/test/BUILD.gn
index 92f16e166e7..12a8e2a9d39 100644
--- a/chromium/media/gpu/test/BUILD.gn
+++ b/chromium/media/gpu/test/BUILD.gn
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/ui.gni")
import("//media/gpu/args.gni")
@@ -39,6 +40,7 @@ source_set("frame_validator") {
deps = [
":helpers",
":test_helpers",
+ "//build:chromeos_buildflags",
"//media/gpu",
]
}
@@ -51,6 +53,7 @@ source_set("frame_file_writer") {
]
deps = [
":helpers",
+ "//build:chromeos_buildflags",
"//media/gpu",
"//ui/gfx/codec:codec",
]
@@ -153,6 +156,7 @@ static_library("video_player_test_environment") {
":helpers",
":video_player",
":video_test_environment",
+ "//build:chromeos_buildflags",
"//media/gpu",
]
}
@@ -204,6 +208,7 @@ if (use_vaapi || use_v4l2_codec) {
]
deps = [
":helpers",
+ "//build:chromeos_buildflags",
"//media:test_support",
"//media/gpu",
"//media/gpu/chromeos",
@@ -214,7 +219,7 @@ if (use_vaapi || use_v4l2_codec) {
}
}
-if (is_chromeos || use_vaapi) {
+if (is_ash) {
static_library("local_gpu_memory_buffer_manager") {
testonly = true
sources = [
diff --git a/chromium/media/gpu/v4l2/BUILD.gn b/chromium/media/gpu/v4l2/BUILD.gn
index 9a999e43a20..7b5b8bfb21c 100644
--- a/chromium/media/gpu/v4l2/BUILD.gn
+++ b/chromium/media/gpu/v4l2/BUILD.gn
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//media/gpu/args.gni")
@@ -108,7 +109,7 @@ source_set("v4l2") {
deps += [ ":libv4l2_stubs" ]
}
- if (is_chromeos) {
+ if (is_ash) {
sources += [
"v4l2_jpeg_encode_accelerator.cc",
"v4l2_jpeg_encode_accelerator.h",
diff --git a/chromium/media/gpu/v4l2/buffer_affinity_tracker.cc b/chromium/media/gpu/v4l2/buffer_affinity_tracker.cc
index 2e1524b2a91..fdb62c9252a 100644
--- a/chromium/media/gpu/v4l2/buffer_affinity_tracker.cc
+++ b/chromium/media/gpu/v4l2/buffer_affinity_tracker.cc
@@ -13,6 +13,8 @@ BufferAffinityTracker::BufferAffinityTracker(size_t nb_buffers) {
resize(0);
}
+BufferAffinityTracker::~BufferAffinityTracker() = default;
+
void BufferAffinityTracker::resize(size_t nb_buffers) {
base::AutoLock lock(lock_);
@@ -47,4 +49,4 @@ base::Optional<size_t> BufferAffinityTracker::get_buffer_for_id(
return v4l2_id;
}
-} // namespace media \ No newline at end of file
+} // namespace media
diff --git a/chromium/media/gpu/v4l2/buffer_affinity_tracker.h b/chromium/media/gpu/v4l2/buffer_affinity_tracker.h
index 3b3c5723539..0516efaf778 100644
--- a/chromium/media/gpu/v4l2/buffer_affinity_tracker.h
+++ b/chromium/media/gpu/v4l2/buffer_affinity_tracker.h
@@ -27,6 +27,7 @@ namespace media {
class BufferAffinityTracker {
public:
explicit BufferAffinityTracker(size_t nb_buffers);
+ ~BufferAffinityTracker();
size_t nb_buffers() const { return nb_buffers_; }
// Resize this tracker and reset its state.
void resize(size_t nb_buffers);
diff --git a/chromium/media/gpu/v4l2/v4l2_device.cc b/chromium/media/gpu/v4l2/v4l2_device.cc
index 9dd1c87aa3a..bd2a23c8775 100644
--- a/chromium/media/gpu/v4l2/v4l2_device.cc
+++ b/chromium/media/gpu/v4l2/v4l2_device.cc
@@ -1204,7 +1204,7 @@ base::Optional<V4L2WritableBufferRef> V4L2Queue::GetFreeBufferForFrame(
}
gfx::GenericSharedMemoryId id;
- if (auto gmb = frame.GetGpuMemoryBuffer()) {
+ if (auto* gmb = frame.GetGpuMemoryBuffer()) {
id = gmb->GetId();
} else if (frame.HasDmaBufs()) {
id = gfx::GenericSharedMemoryId(frame.DmabufFds()[0].get());
diff --git a/chromium/media/gpu/v4l2/v4l2_device_poller.cc b/chromium/media/gpu/v4l2/v4l2_device_poller.cc
index 4981bbc29de..8c5e0751eed 100644
--- a/chromium/media/gpu/v4l2/v4l2_device_poller.cc
+++ b/chromium/media/gpu/v4l2/v4l2_device_poller.cc
@@ -133,8 +133,8 @@ void V4L2DevicePoller::DevicePollTask() {
}
DVLOGF(4) << "Poll returned, calling event callback.";
- client_task_runner_->PostTask(FROM_HERE,
- base::Bind(event_callback_, event_pending));
+ client_task_runner_->PostTask(
+ FROM_HERE, base::BindRepeating(event_callback_, event_pending));
}
}
diff --git a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
index 00b6119c8e1..ae7099b966d 100644
--- a/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
+++ b/chromium/media/gpu/v4l2/v4l2_image_processor_backend.cc
@@ -17,8 +17,8 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/numerics/safe_conversions.h"
#include "base/task/post_task.h"
#include "base/task/task_traits.h"
@@ -96,13 +96,23 @@ void FillV4L2BufferByGpuMemoryBufferHandle(
}
bool AllocateV4L2Buffers(V4L2Queue* queue,
- size_t num_buffers,
+ const size_t num_buffers,
v4l2_memory memory_type) {
DCHECK(queue);
- if (queue->AllocateBuffers(num_buffers, memory_type) == 0u)
+
+ size_t requested_buffers = num_buffers;
+
+ // If we are using DMABUFs, then we will try to keep using the same V4L2
+ // buffer for a given input or output frame. In that case, allocate as many
+ // V4L2 buffers as we can to avoid running out of them. Unused buffers won't
+ // use backed memory and are thus virtually free.
+ if (memory_type == V4L2_MEMORY_DMABUF)
+ requested_buffers = VIDEO_MAX_FRAME;
+
+ if (queue->AllocateBuffers(requested_buffers, memory_type) == 0u)
return false;
- if (queue->AllocatedBuffersCount() != num_buffers) {
+ if (queue->AllocatedBuffersCount() < num_buffers) {
VLOGF(1) << "Failed to allocate buffers. Allocated number="
<< queue->AllocatedBuffersCount()
<< ", Requested number=" << num_buffers;
@@ -613,8 +623,26 @@ void V4L2ImageProcessorBackend::ProcessJobsTask() {
}
// We need one input and one output buffer to schedule the job
- auto input_buffer = input_queue_->GetFreeBuffer();
- auto output_buffer = output_queue_->GetFreeBuffer();
+ base::Optional<V4L2WritableBufferRef> input_buffer;
+ // If we are using DMABUF frames, try to always obtain the same V4L2 buffer.
+ if (input_memory_type_ == V4L2_MEMORY_DMABUF) {
+ const VideoFrame& input_frame =
+ *(input_job_queue_.front()->input_frame.get());
+ input_buffer = input_queue_->GetFreeBufferForFrame(input_frame);
+ }
+ if (!input_buffer)
+ input_buffer = input_queue_->GetFreeBuffer();
+
+ base::Optional<V4L2WritableBufferRef> output_buffer;
+ // If we are using DMABUF frames, try to always obtain the same V4L2 buffer.
+ if (output_memory_type_ == V4L2_MEMORY_DMABUF) {
+ const VideoFrame& output_frame =
+ *(input_job_queue_.front()->output_frame.get());
+ output_buffer = output_queue_->GetFreeBufferForFrame(output_frame);
+ }
+ if (!output_buffer)
+ output_buffer = output_queue_->GetFreeBuffer();
+
if (!input_buffer || !output_buffer)
break;
@@ -636,13 +664,15 @@ void V4L2ImageProcessorBackend::Reset() {
bool V4L2ImageProcessorBackend::ApplyCrop(const gfx::Rect& visible_rect,
enum v4l2_buf_type type) {
- struct v4l2_rect rect {};
+ struct v4l2_rect rect;
+ memset(&rect, 0, sizeof(rect));
rect.left = visible_rect.x();
rect.top = visible_rect.y();
rect.width = visible_rect.width();
rect.height = visible_rect.height();
- struct v4l2_selection selection_arg {};
+ struct v4l2_selection selection_arg;
+ memset(&selection_arg, 0, sizeof(selection_arg));
// Multiplanar buffer types are messed up in S_SELECTION API, so all drivers
// don't necessarily work with MPLANE types. This issue is resolved with
// kernel 4.13. As we use kernel < 4.13 today, we use single planar buffer
@@ -658,7 +688,8 @@ bool V4L2ImageProcessorBackend::ApplyCrop(const gfx::Rect& visible_rect,
rect = selection_arg.r;
} else {
DVLOGF(2) << "Fallback to VIDIOC_S/G_CROP";
- struct v4l2_crop crop {};
+ struct v4l2_crop crop;
+ memset(&crop, 0, sizeof(crop));
crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
crop.c = rect;
if (device_->Ioctl(VIDIOC_S_CROP, &crop) != 0) {
@@ -683,7 +714,8 @@ bool V4L2ImageProcessorBackend::ReconfigureV4L2Format(
const gfx::Size& size,
const gfx::Rect& visible_rect,
enum v4l2_buf_type type) {
- v4l2_format format{};
+ struct v4l2_format format;
+ memset(&format, 0, sizeof(format));
format.type = type;
if (device_->Ioctl(VIDIOC_G_FMT, &format) != 0) {
VPLOGF(1) << "ioctl() failed: VIDIOC_G_FMT";
diff --git a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc
index 384bcb35b22..0998c6980b6 100644
--- a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.cc
@@ -12,7 +12,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/numerics/ranges.h"
#include "base/threading/thread_task_runner_handle.h"
#include "media/gpu/chromeos/fourcc.h"
@@ -962,11 +962,17 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::Initialize() {
return false;
}
- output_buffer_pixelformat_ = V4L2_PIX_FMT_JPEG_RAW;
+ // We prefer V4L2_PIX_FMT_JPEG because V4L2_PIX_FMT_JPEG_RAW was rejected
+ // upstream.
+ output_buffer_pixelformat_ = V4L2_PIX_FMT_JPEG;
if (!device_->Open(V4L2Device::Type::kJpegEncoder,
output_buffer_pixelformat_)) {
- VLOGF(1) << "Failed to open device";
- return false;
+ output_buffer_pixelformat_ = V4L2_PIX_FMT_JPEG_RAW;
+ if (!device_->Open(V4L2Device::Type::kJpegEncoder,
+ output_buffer_pixelformat_)) {
+ VLOGF(1) << "Failed to open device";
+ return false;
+ }
}
// Capabilities check.
@@ -1147,9 +1153,11 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::SetUpJpegParameters(
struct v4l2_ext_controls ctrls;
struct v4l2_ext_control ctrl;
+ struct v4l2_query_ext_ctrl queryctrl;
memset(&ctrls, 0, sizeof(ctrls));
memset(&ctrl, 0, sizeof(ctrl));
+ memset(&queryctrl, 0, sizeof(queryctrl));
ctrls.ctrl_class = V4L2_CTRL_CLASS_JPEG;
ctrls.controls = &ctrl;
@@ -1176,6 +1184,22 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::SetUpJpegParameters(
PrepareJpegMarkers(coded_size);
break;
+ case V4L2_PIX_FMT_JPEG:
+ queryctrl.id = V4L2_CID_JPEG_COMPRESSION_QUALITY;
+ queryctrl.type = V4L2_CTRL_TYPE_INTEGER;
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERY_EXT_CTRL, &queryctrl);
+
+ // interpolate the quality value
+ // Map quality value from range 1-100 to min-max.
+ quality = queryctrl.minimum +
+ (quality - 1) * (queryctrl.maximum - queryctrl.minimum) / 99;
+ ctrl.id = V4L2_CID_JPEG_COMPRESSION_QUALITY;
+ ctrl.value = quality;
+ VLOG(1) << "JPEG Quality: max:" << queryctrl.maximum
+ << ", min:" << queryctrl.minimum << ", value:" << quality;
+ IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, &ctrls);
+ break;
+
default:
NOTREACHED();
}
@@ -1240,7 +1264,9 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::SetInputBufferFormat(
format.fmt.pix_mp.num_planes = kMaxNV12Plane;
format.fmt.pix_mp.pixelformat = input_pix_fmt;
format.fmt.pix_mp.field = V4L2_FIELD_ANY;
- format.fmt.pix_mp.width = coded_size.width();
+ // set the input buffer resolution with padding and use selection API to
+ // crop the coded size.
+ format.fmt.pix_mp.width = input_layout.planes()[0].stride;
format.fmt.pix_mp.height = coded_size.height();
auto num_planes = input_layout.num_planes();
@@ -1257,7 +1283,6 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::SetInputBufferFormat(
// Save V4L2 returned values.
input_buffer_pixelformat_ = format.fmt.pix_mp.pixelformat;
input_buffer_num_planes_ = format.fmt.pix_mp.num_planes;
- input_buffer_height_ = format.fmt.pix_mp.height;
break;
}
}
@@ -1267,13 +1292,49 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::SetInputBufferFormat(
return false;
}
- if (format.fmt.pix_mp.width != static_cast<uint32_t>(coded_size.width()) ||
- format.fmt.pix_mp.height != static_cast<uint32_t>(coded_size.height())) {
- VLOGF(1) << "Width " << coded_size.width() << "->"
- << format.fmt.pix_mp.width << ",Height " << coded_size.height()
- << "->" << format.fmt.pix_mp.height;
+ // It can't allow different width.
+ if (format.fmt.pix_mp.width !=
+ static_cast<uint32_t>(input_layout.planes()[0].stride)) {
+ LOG(WARNING) << "Different stride:" << format.fmt.pix_mp.width
+ << "!=" << input_layout.planes()[0].stride;
return false;
}
+
+ // We can allow our buffer to have larger height than encoder's requirement
+ // because we set the 2nd plane by data_offset now.
+ if (format.fmt.pix_mp.height > static_cast<uint32_t>(coded_size.height())) {
+ if (input_buffer_pixelformat_ == V4L2_PIX_FMT_NV12M) {
+ // Calculate the real buffer height of the DMA buffer from minigbm.
+ uint32_t height_with_padding =
+ input_layout.planes()[0].size / input_layout.planes()[0].stride;
+ if (format.fmt.pix_mp.height > height_with_padding) {
+ LOG(WARNING) << "Encoder requires larger height:"
+ << format.fmt.pix_mp.height << ">" << height_with_padding;
+ return false;
+ }
+ } else {
+ LOG(WARNING) << "Encoder requires larger height:"
+ << format.fmt.pix_mp.height << ">" << coded_size.height();
+ return false;
+ }
+ }
+
+ if ((uint32_t)coded_size.width() != format.fmt.pix_mp.width ||
+ (uint32_t)coded_size.height() != format.fmt.pix_mp.height) {
+ v4l2_selection selection = {};
+ selection.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ selection.target = V4L2_SEL_TGT_CROP;
+ selection.flags = V4L2_SEL_FLAG_GE | V4L2_SEL_FLAG_LE;
+ selection.r.left = 0;
+ selection.r.top = 0;
+ selection.r.width = coded_size.width();
+ selection.r.height = coded_size.height();
+ if (device_->Ioctl(VIDIOC_S_SELECTION, &selection) != 0) {
+ LOG(WARNING) << "VIDIOC_S_SELECTION Fail";
+ return false;
+ }
+ }
+
return true;
}
@@ -1295,6 +1356,7 @@ bool V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::SetOutputBufferFormat(
format.fmt.pix_mp.height = coded_size.height();
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
DCHECK_EQ(format.fmt.pix_mp.pixelformat, output_buffer_pixelformat_);
+ output_buffer_sizeimage_ = format.fmt.pix_mp.plane_fmt[0].sizeimage;
return true;
}
@@ -1534,10 +1596,32 @@ size_t V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::FinalizeJpegImage(
0xFF, JPEG_APP1, static_cast<uint8_t>(exif_segment_size / 256),
static_cast<uint8_t>(exif_segment_size % 256)};
- // Move compressed data first.
- size_t compressed_data_offset = sizeof(kJpegStart) + sizeof(kAppSegment) +
- exif_buffer_size + jpeg_markers_.size();
- memmove(dst_ptr + compressed_data_offset, dst_ptr, buffer_size);
+ if (output_buffer_pixelformat_ == V4L2_PIX_FMT_JPEG_RAW) {
+ // Move compressed data first.
+ size_t compressed_data_offset = sizeof(kJpegStart) + sizeof(kAppSegment) +
+ exif_buffer_size + jpeg_markers_.size();
+ if (buffer_size + compressed_data_offset > output_buffer_sizeimage_) {
+ LOG(WARNING) << "JPEG buffer is too small for the EXIF metadata";
+ return 0;
+ }
+ memmove(dst_ptr + compressed_data_offset, dst_ptr, buffer_size);
+ } else if (output_buffer_pixelformat_ == V4L2_PIX_FMT_JPEG) {
+ // Move data after SOI and APP0 marker for exif room.
+ // The JPEG from V4L2_PIX_FMT_JPEG is
+ // SOI-APP0-DQT-marker1-marker2-...-markerN-compressed stream-EOI
+ // |......| <- src_data_offset = len(SOI) + len(APP0)
+ // |...................| <- data_offset = len(SOI) + len(APP1)
+ size_t data_offset =
+ sizeof(kJpegStart) + sizeof(kAppSegment) + exif_buffer_size;
+ size_t app0_length = 2 + ((dst_ptr[4] << 16) | dst_ptr[5]);
+ size_t src_data_offset = sizeof(kJpegStart) + app0_length;
+ buffer_size -= src_data_offset;
+ if (buffer_size + data_offset > output_buffer_sizeimage_) {
+ LOG(WARNING) << "JPEG buffer is too small for the EXIF metadata";
+ return 0;
+ }
+ memmove(dst_ptr + data_offset, dst_ptr + src_data_offset, buffer_size);
+ }
memcpy(dst_ptr, kJpegStart, sizeof(kJpegStart));
idx += sizeof(kJpegStart);
@@ -1545,7 +1629,10 @@ size_t V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::FinalizeJpegImage(
idx += sizeof(kAppSegment);
memcpy(dst_ptr + idx, exif_buffer, exif_buffer_size);
idx += exif_buffer_size;
- } else {
+ } else if (output_buffer_pixelformat_ == V4L2_PIX_FMT_JPEG_RAW) {
+ // For no exif_shm we don't need to do anything for V4L2_PIX_FMT_JPEG.
+ // So we only need to know if the format is V4L2_PIX_FMT_JPEG_RAW.
+
// Application Segment - JFIF standard 1.01.
static const uint8_t kAppSegment[] = {
0xFF, JPEG_APP0, 0x00,
@@ -1595,6 +1682,10 @@ size_t V4L2JpegEncodeAccelerator::EncodedInstanceDmaBuf::FinalizeJpegImage(
}
break;
+ case V4L2_PIX_FMT_JPEG:
+ idx += buffer_size;
+ break;
+
default:
NOTREACHED() << "Unsupported output pixel format";
}
diff --git a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h
index 8fe97d7f59f..f025ca06636 100644
--- a/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h
+++ b/chromium/media/gpu/v4l2/v4l2_jpeg_encode_accelerator.h
@@ -358,8 +358,8 @@ class MEDIA_GPU_EXPORT V4L2JpegEncodeAccelerator
// Pixel format of output buffer.
uint32_t output_buffer_pixelformat_;
- // Height of input buffer returned by driver.
- uint32_t input_buffer_height_;
+ // sizeimage of output buffer.
+ uint32_t output_buffer_sizeimage_;
// JPEG Quantization table for V4L2_PIX_FMT_JPEG_RAW.
JpegQuantizationTable quantization_table_[2];
diff --git a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
index 9be3d4857bf..e9d961454d6 100644
--- a/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_mjpeg_decode_accelerator.cc
@@ -15,7 +15,6 @@
#include "base/big_endian.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/files/scoped_file.h"
#include "base/numerics/safe_conversions.h"
diff --git a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
index 5348cafe3c9..1df3dff74fc 100644
--- a/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_slice_video_decode_accelerator.cc
@@ -17,7 +17,6 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
diff --git a/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
index 91bed939199..ca557f5b5b6 100644
--- a/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
+++ b/chromium/media/gpu/v4l2/v4l2_vda_helpers.cc
@@ -6,7 +6,7 @@
#include "base/bind.h"
#include "media/base/color_plane_layout.h"
-#include "media/base/video_decoder_config.h"
+#include "media/base/video_codecs.h"
#include "media/gpu/chromeos/fourcc.h"
#include "media/gpu/macros.h"
#include "media/gpu/v4l2/v4l2_device.h"
@@ -231,6 +231,8 @@ bool H264InputBufferFragmentSplitter::AdvanceFrameFragment(const uint8_t* data,
case H264NALU::kAUD:
case H264NALU::kEOSeq:
case H264NALU::kEOStream:
+ case H264NALU::kFiller:
+ case H264NALU::kSPSExt:
case H264NALU::kReserved14:
case H264NALU::kReserved15:
case H264NALU::kReserved16:
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
index c0963ed4061..d921318877e 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder.cc
@@ -113,6 +113,7 @@ V4L2VideoDecoder::~V4L2VideoDecoder() {
}
void V4L2VideoDecoder::Initialize(const VideoDecoderConfig& config,
+ CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(decoder_sequence_checker_);
@@ -120,6 +121,12 @@ void V4L2VideoDecoder::Initialize(const VideoDecoderConfig& config,
DCHECK(state_ == State::kUninitialized || state_ == State::kDecoding);
DVLOGF(3);
+ if (cdm_context || config.is_encrypted()) {
+ VLOGF(1) << "V4L2 decoder does not support encrypted stream";
+ std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
+ return;
+ }
+
// Reset V4L2 device and queue if reinitializing decoder.
if (state_ != State::kUninitialized) {
if (!StopStreamV4L2Queue(true)) {
@@ -340,7 +347,8 @@ bool V4L2VideoDecoder::SetupOutputFormat(const gfx::Size& size,
if (pool) {
base::Optional<GpuBufferLayout> layout = pool->Initialize(
fourcc, adjusted_size, visible_rect,
- GetNaturalSize(visible_rect, pixel_aspect_ratio_), num_output_frames_);
+ GetNaturalSize(visible_rect, pixel_aspect_ratio_), num_output_frames_,
+ /*use_protected=*/false);
if (!layout) {
VLOGF(1) << "Failed to setup format to VFPool";
return false;
@@ -681,7 +689,8 @@ void V4L2VideoDecoder::SetState(State new_state) {
}
if (new_state == State::kError) {
- VLOGF(1) << "Error occurred.";
+ VLOGF(1) << "Error occurred, stopping queues.";
+ StopStreamV4L2Queue(true);
if (backend_)
backend_->ClearPendingRequests(DecodeStatus::DECODE_ERROR);
return;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder.h b/chromium/media/gpu/v4l2/v4l2_video_decoder.h
index b046b17dbd7..0d98e0a52fe 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder.h
@@ -23,6 +23,7 @@
#include "base/sequenced_task_runner.h"
#include "base/threading/thread.h"
#include "base/time/time.h"
+#include "media/base/cdm_context.h"
#include "media/base/video_types.h"
#include "media/gpu/chromeos/gpu_buffer_layout.h"
#include "media/gpu/chromeos/video_decoder_pipeline.h"
@@ -51,6 +52,7 @@ class MEDIA_GPU_EXPORT V4L2VideoDecoder
// DecoderInterface implementation.
void Initialize(const VideoDecoderConfig& config,
+ CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb) override;
void Reset(base::OnceClosure closure) override;
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
index 25f77a7aa37..f571d83cd68 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.cc
@@ -106,6 +106,10 @@ void V4L2StatefulVideoDecoderBackend::EnqueueDecodeTask(
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DVLOGF(3);
+ if (!buffer->end_of_stream()) {
+ has_pending_requests_ = true;
+ }
+
decode_request_queue_.push(
DecodeRequest(std::move(buffer), std::move(decode_cb), bitstream_id));
@@ -213,7 +217,10 @@ void V4L2StatefulVideoDecoderBackend::DoDecodeWork() {
}
// The V4L2 input buffer contains a decodable entity, queue it.
- std::move(*current_input_buffer_).QueueMMap();
+ if (!std::move(*current_input_buffer_).QueueMMap()) {
+ LOG(ERROR) << "Error while queuing input buffer!";
+ client_->OnBackendError();
+ }
current_input_buffer_.reset();
// If we can still progress on a decode request, do it.
@@ -297,8 +304,10 @@ void V4L2StatefulVideoDecoderBackend::EnqueueOutputBuffers() {
if (no_buffer)
break;
- if (!ret)
+ if (!ret) {
+ LOG(ERROR) << "Error while queueing output buffer!";
client_->OnBackendError();
+ }
}
DVLOGF(3) << output_queue_->QueuedBuffersCount() << "/"
@@ -406,8 +415,9 @@ void V4L2StatefulVideoDecoderBackend::OnOutputBufferDequeued(
// change event (but not the opposite), so we must make sure both events
// are processed in the correct order.
if (buffer->IsLast()){
- if (!resolution_change_cb_ && !flush_cb_)
- ProcessEventQueue();
+ // Check that we don't have a resolution change event pending. If we do
+ // then this LAST buffer was related to it.
+ ProcessEventQueue();
if (resolution_change_cb_) {
std::move(resolution_change_cb_).Run();
@@ -420,6 +430,19 @@ void V4L2StatefulVideoDecoderBackend::OnOutputBufferDequeued(
EnqueueOutputBuffers();
}
+bool V4L2StatefulVideoDecoderBackend::SendStopCommand() {
+ struct v4l2_decoder_cmd cmd;
+ memset(&cmd, 0, sizeof(cmd));
+ cmd.cmd = V4L2_DEC_CMD_STOP;
+ if (device_->Ioctl(VIDIOC_DECODER_CMD, &cmd) != 0) {
+ LOG(ERROR) << "Failed to issue STOP command";
+ client_->OnBackendError();
+ return false;
+ }
+
+ return true;
+}
+
bool V4L2StatefulVideoDecoderBackend::InitiateFlush(
VideoDecoder::DecodeCB flush_cb) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
@@ -428,29 +451,32 @@ bool V4L2StatefulVideoDecoderBackend::InitiateFlush(
// Submit any pending input buffer at the time of flush.
if (current_input_buffer_) {
- std::move(*current_input_buffer_).QueueMMap();
+ if (!std::move(*current_input_buffer_).QueueMMap()) {
+ LOG(ERROR) << "Error while queuing input buffer!";
+ client_->OnBackendError();
+ }
current_input_buffer_.reset();
}
client_->InitiateFlush();
flush_cb_ = std::move(flush_cb);
- // Special case: if our CAPTURE queue is not streaming, we cannot receive
- // the CAPTURE buffer with the LAST flag set that signals the end of flush.
- // In this case, we should complete the flush immediately.
- if (!output_queue_->IsStreaming())
+ // Special case: if we haven't received any decoding request, we could
+ // complete the flush immediately.
+ if (!has_pending_requests_)
return CompleteFlush();
- // Send the STOP command to the V4L2 device. The device will let us know
- // that the flush is completed by sending us a CAPTURE buffer with the LAST
- // flag set.
- struct v4l2_decoder_cmd cmd;
- memset(&cmd, 0, sizeof(cmd));
- cmd.cmd = V4L2_DEC_CMD_STOP;
- if (device_->Ioctl(VIDIOC_DECODER_CMD, &cmd) != 0) {
- LOG(ERROR) << "Failed to issue STOP command";
- client_->OnBackendError();
- return false;
+ if (output_queue_->IsStreaming()) {
+ // If the CAPTURE queue is streaming, send the STOP command to the V4L2
+ // device. The device will let us know that the flush is completed by
+ // sending us a CAPTURE buffer with the LAST flag set.
+ return SendStopCommand();
+ } else {
+ // If the CAPTURE queue is not streaming, this means we received the flush
+ // request before the initial resolution has been established. The flush
+ // request will be processed in OnChangeResolutionDone(), when the CAPTURE
+ // queue starts streaming.
+ DVLOGF(2) << "Flush request to be processed after CAPTURE queue starts";
}
return true;
@@ -483,6 +509,7 @@ bool V4L2StatefulVideoDecoderBackend::CompleteFlush() {
// Resume decoding if data is available.
ScheduleDecodeWork();
+ has_pending_requests_ = false;
return true;
}
@@ -515,6 +542,11 @@ void V4L2StatefulVideoDecoderBackend::ChangeResolution() {
return;
}
+ if (!gfx::Rect(pic_size).Contains(*visible_rect)) {
+ client_->OnBackendError();
+ return;
+ }
+
auto ctrl = device_->GetCtrl(V4L2_CID_MIN_BUFFERS_FOR_CAPTURE);
constexpr size_t DEFAULT_NUM_OUTPUT_BUFFERS = 7;
const size_t num_output_buffers =
@@ -577,6 +609,16 @@ void V4L2StatefulVideoDecoderBackend::OnChangeResolutionDone(bool success) {
// Enqueue all available output buffers now that they are allocated.
EnqueueOutputBuffers();
+ // If we had a flush request pending before the initial resolution change,
+ // process it now.
+ if (flush_cb_) {
+ DVLOGF(2) << "Processing pending flush request...";
+
+ client_->InitiateFlush();
+ if (!SendStopCommand())
+ return;
+ }
+
// Also try to progress on our work.
DoDecodeWork();
}
@@ -603,6 +645,8 @@ void V4L2StatefulVideoDecoderBackend::ClearPendingRequests(
std::move(decode_request_queue_.front().decode_cb).Run(status);
decode_request_queue_.pop();
}
+
+ has_pending_requests_ = false;
}
// TODO(b:149663704) move into helper function shared between both backends?
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h
index 3b41c343495..b91230cec80 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateful.h
@@ -107,6 +107,7 @@ class V4L2StatefulVideoDecoderBackend : public V4L2VideoDecoderBackend {
// becomes available.
scoped_refptr<VideoFrame> GetPoolVideoFrame();
+ bool SendStopCommand();
bool InitiateFlush(VideoDecoder::DecodeCB flush_cb);
bool CompleteFlush();
@@ -144,6 +145,12 @@ class V4L2StatefulVideoDecoderBackend : public V4L2VideoDecoderBackend {
// event completes.
base::OnceClosure resolution_change_cb_;
+ // Whether there is any decoding request coming after
+ // initialization/flush/reset is finished.
+ // This flag is set on the first decode request, and reset after a successful
+ // flush or reset.
+ bool has_pending_requests_ = false;
+
base::WeakPtr<V4L2StatefulVideoDecoderBackend> weak_this_;
base::WeakPtrFactory<V4L2StatefulVideoDecoderBackend> weak_this_factory_{
this};
diff --git a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
index a5a46320937..d38a7fc4747 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_decoder_backend_stateless.cc
@@ -11,7 +11,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/posix/eintr_wrapper.h"
#include "base/sequenced_task_runner.h"
#include "media/base/decode_status.h"
diff --git a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
index 720bd32e528..b7b8df614bd 100644
--- a/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
+++ b/chromium/media/gpu/v4l2/v4l2_video_encode_accelerator.cc
@@ -206,7 +206,7 @@ bool V4L2VideoEncodeAccelerator::Initialize(const Config& config,
// V4L2VEA doesn't support temporal layers but we let it pass here to support
// simulcast.
if (config.HasSpatialLayer()) {
- VLOGF(1) << "Spatial layer encoding is supported";
+ VLOGF(1) << "Spatial layer encoding is not yet supported";
return false;
}
@@ -253,7 +253,8 @@ bool V4L2VideoEncodeAccelerator::Initialize(const Config& config,
if (!is_flush_supported_)
VLOGF(2) << "V4L2_ENC_CMD_STOP is not supported.";
- struct v4l2_capability caps {};
+ struct v4l2_capability caps;
+ memset(&caps, 0, sizeof(caps));
const __u32 kCapsRequired = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
@@ -1024,7 +1025,8 @@ void V4L2VideoEncodeAccelerator::Enqueue() {
FROM_HERE, base::BindOnce(std::move(flush_callback_), true));
return;
}
- struct v4l2_encoder_cmd cmd{};
+ struct v4l2_encoder_cmd cmd;
+ memset(&cmd, 0, sizeof(cmd));
cmd.cmd = V4L2_ENC_CMD_STOP;
if (device_->Ioctl(VIDIOC_ENCODER_CMD, &cmd) != 0) {
VPLOGF(1) << "ioctl() failed: VIDIOC_ENCODER_CMD";
@@ -1200,7 +1202,8 @@ void V4L2VideoEncodeAccelerator::PumpBitstreamBuffers() {
child_task_runner_->PostTask(
FROM_HERE, base::BindOnce(std::move(flush_callback_), true));
// Start the encoder again.
- struct v4l2_encoder_cmd cmd{};
+ struct v4l2_encoder_cmd cmd;
+ memset(&cmd, 0, sizeof(cmd));
cmd.cmd = V4L2_ENC_CMD_START;
IOCTL_OR_ERROR_RETURN(VIDIOC_ENCODER_CMD, &cmd);
}
@@ -1482,7 +1485,8 @@ void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
}
if (current_framerate_ != framerate) {
- struct v4l2_streamparm parms {};
+ struct v4l2_streamparm parms;
+ memset(&parms, 0, sizeof(parms));
parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
// Note that we are provided "frames per second" but V4L2 expects "time per
// frame"; hence we provide the reciprocal of the framerate here.
@@ -1505,9 +1509,10 @@ bool V4L2VideoEncodeAccelerator::SetOutputFormat(
output_buffer_byte_size_ =
GetEncodeBitstreamBufferSize(encoder_input_visible_rect_.size());
+ // Sets 0 to width and height in CAPTURE queue, which should be ignored by the
+ // driver.
base::Optional<struct v4l2_format> format = output_queue_->SetFormat(
- output_format_fourcc_, encoder_input_visible_rect_.size(),
- output_buffer_byte_size_);
+ output_format_fourcc_, gfx::Size(), output_buffer_byte_size_);
if (!format) {
return false;
}
@@ -1587,7 +1592,8 @@ bool V4L2VideoEncodeAccelerator::ApplyCrop() {
visible_rect.width = encoder_input_visible_rect_.width();
visible_rect.height = encoder_input_visible_rect_.height();
- struct v4l2_selection selection_arg{};
+ struct v4l2_selection selection_arg;
+ memset(&selection_arg, 0, sizeof(selection_arg));
selection_arg.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
selection_arg.target = V4L2_SEL_TGT_CROP;
selection_arg.r = visible_rect;
@@ -1599,7 +1605,8 @@ bool V4L2VideoEncodeAccelerator::ApplyCrop() {
visible_rect = selection_arg.r;
} else {
VLOGF(2) << "Fallback to VIDIOC_S/G_CROP";
- struct v4l2_crop crop{};
+ struct v4l2_crop crop;
+ memset(&crop, 0, sizeof(crop));
crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
crop.c = visible_rect;
IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop);
diff --git a/chromium/media/gpu/vaapi/BUILD.gn b/chromium/media/gpu/vaapi/BUILD.gn
index d811256a789..af599438385 100644
--- a/chromium/media/gpu/vaapi/BUILD.gn
+++ b/chromium/media/gpu/vaapi/BUILD.gn
@@ -2,9 +2,11 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//media/gpu/args.gni")
+import("//media/media_options.gni")
import("//testing/test.gni")
import("//tools/generate_stubs/rules.gni")
import("//ui/gl/features.gni")
@@ -70,6 +72,12 @@ source_set("vaapi") {
"vp9_vaapi_video_decoder_delegate.cc",
"vp9_vaapi_video_decoder_delegate.h",
]
+ if (proprietary_codecs && enable_platform_hevc) {
+ sources += [
+ "h265_vaapi_video_decoder_delegate.cc",
+ "h265_vaapi_video_decoder_delegate.h",
+ ]
+ }
configs += [
"//build/config/linux/libva",
@@ -79,6 +87,7 @@ source_set("vaapi") {
deps = [
":common",
"//base",
+ "//build:chromeos_buildflags",
"//gpu/config",
"//gpu/ipc/common",
"//gpu/ipc/service",
@@ -99,7 +108,7 @@ source_set("vaapi") {
"//ui/gl",
]
- if (is_chromeos) {
+ if (is_ash) {
sources += [
"vaapi_jpeg_encode_accelerator.cc",
"vaapi_jpeg_encode_accelerator.h",
@@ -167,6 +176,7 @@ source_set("common") {
]
deps = [
":libva_stubs",
+ "//build:chromeos_buildflags",
"//third_party/libyuv",
"//ui/base:features",
"//ui/gfx:memory_buffer",
@@ -242,7 +252,7 @@ source_set("vaapi_image_decoder_test_common") {
}
# TODO(https://crbug.com/1043007): remove is_chromeos.
-if (is_chromeos) {
+if (is_ash) {
source_set("jpeg_decoder_unit_test") {
testonly = true
sources = [ "vaapi_jpeg_decoder_unittest.cc" ]
@@ -300,13 +310,14 @@ test("vaapi_unittest") {
":vaapi_utils_unittest",
"//base",
"//base/test:test_support",
+ "//build:chromeos_buildflags",
"//gpu",
"//media/gpu/test:helpers",
"//testing/gtest",
]
# TODO(https://crbug.com/1043007): remove is_chromeos.
- if (is_chromeos) {
+ if (is_ash) {
deps += [
":jpeg_decoder_unit_test",
":webp_decoder_unit_test",
diff --git a/chromium/media/gpu/vaapi/fuzzers/jpeg_decoder/BUILD.gn b/chromium/media/gpu/vaapi/fuzzers/jpeg_decoder/BUILD.gn
index 97155c56ad9..37b7d6f0387 100644
--- a/chromium/media/gpu/vaapi/fuzzers/jpeg_decoder/BUILD.gn
+++ b/chromium/media/gpu/vaapi/fuzzers/jpeg_decoder/BUILD.gn
@@ -6,15 +6,11 @@ import("//testing/libfuzzer/fuzzer_test.gni")
import("//third_party/protobuf/proto_library.gni")
proto_library("jpeg_decoder_fuzzer_input") {
- sources = [
- "jpeg_decoder_fuzzer_input.proto",
- ]
+ sources = [ "jpeg_decoder_fuzzer_input.proto" ]
}
fuzzer_test("vaapi_jpeg_decoder_fuzzertest") {
- sources = [
- "jpeg_decoder_fuzzertest.cc",
- ]
+ sources = [ "jpeg_decoder_fuzzertest.cc" ]
deps = [
":jpeg_decoder_fuzzer_input",
"//base",
@@ -25,6 +21,7 @@ fuzzer_test("vaapi_jpeg_decoder_fuzzertest") {
"//media/gpu:video_frame_mapper",
"//media/gpu/chromeos",
"//media/gpu/vaapi",
+ "//media/gpu/vaapi:common",
"//media/parsers",
"//third_party/libprotobuf-mutator",
"//ui/gfx/geometry",
diff --git a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
new file mode 100644
index 00000000000..fca887e04ba
--- /dev/null
+++ b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.cc
@@ -0,0 +1,520 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h"
+
+#include "base/stl_util.h"
+#include "build/chromeos_buildflags.h"
+#include "media/gpu/decode_surface_handler.h"
+#include "media/gpu/macros.h"
+#include "media/gpu/vaapi/vaapi_common.h"
+#include "media/gpu/vaapi/vaapi_wrapper.h"
+
+#include "base/strings/string_number_conversions.h"
+
+namespace media {
+
+namespace {
+// Equation 5-8 in spec.
+int Clip3(int x, int y, int z) {
+ if (z < x)
+ return x;
+ if (z > y)
+ return y;
+ return z;
+}
+
+// Fill |va_pic| with default/neutral values.
+void InitVAPicture(VAPictureHEVC* va_pic) {
+ va_pic->picture_id = VA_INVALID_ID;
+ va_pic->flags = VA_PICTURE_HEVC_INVALID;
+}
+
+constexpr int kInvalidRefPicIndex = -1;
+} // namespace
+
+using DecodeStatus = H265Decoder::H265Accelerator::Status;
+
+H265VaapiVideoDecoderDelegate::H265VaapiVideoDecoderDelegate(
+ DecodeSurfaceHandler<VASurface>* const vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper)
+ : VaapiVideoDecoderDelegate(vaapi_dec, std::move(vaapi_wrapper)) {
+ ref_pic_list_pocs_.reserve(kMaxRefIdxActive);
+}
+
+H265VaapiVideoDecoderDelegate::~H265VaapiVideoDecoderDelegate() = default;
+
+scoped_refptr<H265Picture> H265VaapiVideoDecoderDelegate::CreateH265Picture() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ const auto va_surface = vaapi_dec_->CreateSurface();
+ if (!va_surface)
+ return nullptr;
+
+ return new VaapiH265Picture(std::move(va_surface));
+}
+
+DecodeStatus H265VaapiVideoDecoderDelegate::SubmitFrameMetadata(
+ const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list,
+ scoped_refptr<H265Picture> pic) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ DCHECK(!last_slice_data_);
+
+ VAPictureParameterBufferHEVC pic_param;
+ memset(&pic_param, 0, sizeof(pic_param));
+
+ int highest_tid = sps->sps_max_sub_layers_minus1;
+#define FROM_SPS_TO_PP(a) pic_param.a = sps->a
+#define FROM_SPS_TO_PP2(a, b) pic_param.b = sps->a
+#define FROM_PPS_TO_PP(a) pic_param.a = pps->a
+#define FROM_SPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = sps->a
+#define FROM_PPS_TO_PP_PF(a) pic_param.pic_fields.bits.a = pps->a
+#define FROM_SPS_TO_PP_SPF(a) pic_param.slice_parsing_fields.bits.a = sps->a
+#define FROM_PPS_TO_PP_SPF(a) pic_param.slice_parsing_fields.bits.a = pps->a
+#define FROM_PPS_TO_PP_SPF2(a, b) pic_param.slice_parsing_fields.bits.b = pps->a
+ FROM_SPS_TO_PP(pic_width_in_luma_samples);
+ FROM_SPS_TO_PP(pic_height_in_luma_samples);
+ FROM_SPS_TO_PP_PF(chroma_format_idc);
+ FROM_SPS_TO_PP_PF(separate_colour_plane_flag);
+ FROM_SPS_TO_PP_PF(pcm_enabled_flag);
+ FROM_SPS_TO_PP_PF(scaling_list_enabled_flag);
+ FROM_PPS_TO_PP_PF(transform_skip_enabled_flag);
+ FROM_SPS_TO_PP_PF(amp_enabled_flag);
+ FROM_SPS_TO_PP_PF(strong_intra_smoothing_enabled_flag);
+ FROM_PPS_TO_PP_PF(sign_data_hiding_enabled_flag);
+ FROM_PPS_TO_PP_PF(constrained_intra_pred_flag);
+ FROM_PPS_TO_PP_PF(cu_qp_delta_enabled_flag);
+ FROM_PPS_TO_PP_PF(weighted_pred_flag);
+ FROM_PPS_TO_PP_PF(weighted_bipred_flag);
+ FROM_PPS_TO_PP_PF(transquant_bypass_enabled_flag);
+ FROM_PPS_TO_PP_PF(tiles_enabled_flag);
+ FROM_PPS_TO_PP_PF(entropy_coding_sync_enabled_flag);
+ FROM_PPS_TO_PP_PF(pps_loop_filter_across_slices_enabled_flag);
+ FROM_PPS_TO_PP_PF(loop_filter_across_tiles_enabled_flag);
+ FROM_SPS_TO_PP_PF(pcm_loop_filter_disabled_flag);
+ pic_param.pic_fields.bits.NoPicReorderingFlag =
+ (sps->sps_max_num_reorder_pics[highest_tid] == 0) ? 1 : 0;
+
+ FROM_SPS_TO_PP2(sps_max_dec_pic_buffering_minus1[highest_tid],
+ sps_max_dec_pic_buffering_minus1);
+ FROM_SPS_TO_PP(bit_depth_luma_minus8);
+ FROM_SPS_TO_PP(bit_depth_chroma_minus8);
+ FROM_SPS_TO_PP(pcm_sample_bit_depth_luma_minus1);
+ FROM_SPS_TO_PP(pcm_sample_bit_depth_chroma_minus1);
+ FROM_SPS_TO_PP(log2_min_luma_coding_block_size_minus3);
+ FROM_SPS_TO_PP(log2_diff_max_min_luma_coding_block_size);
+ FROM_SPS_TO_PP2(log2_min_luma_transform_block_size_minus2,
+ log2_min_transform_block_size_minus2);
+ FROM_SPS_TO_PP2(log2_diff_max_min_luma_transform_block_size,
+ log2_diff_max_min_transform_block_size);
+ FROM_SPS_TO_PP(log2_min_pcm_luma_coding_block_size_minus3);
+ FROM_SPS_TO_PP(log2_diff_max_min_pcm_luma_coding_block_size);
+ FROM_SPS_TO_PP(max_transform_hierarchy_depth_intra);
+ FROM_SPS_TO_PP(max_transform_hierarchy_depth_inter);
+ FROM_PPS_TO_PP(init_qp_minus26);
+ FROM_PPS_TO_PP(diff_cu_qp_delta_depth);
+ FROM_PPS_TO_PP(pps_cb_qp_offset);
+ FROM_PPS_TO_PP(pps_cr_qp_offset);
+ FROM_PPS_TO_PP(log2_parallel_merge_level_minus2);
+ FROM_PPS_TO_PP(num_tile_columns_minus1);
+ FROM_PPS_TO_PP(num_tile_rows_minus1);
+ if (pps->uniform_spacing_flag) {
+ // We need to calculate this ourselves per 6.5.1 in the spec. We subtract 1
+ // as well so it matches the 'minus1' usage in the struct.
+ for (int i = 0; i <= pps->num_tile_columns_minus1; ++i) {
+ pic_param.column_width_minus1[i] = (((i + 1) * sps->pic_width_in_ctbs_y) /
+ (pps->num_tile_columns_minus1 + 1)) -
+ ((i * sps->pic_width_in_ctbs_y) /
+ (pps->num_tile_columns_minus1 + 1)) -
+ 1;
+ }
+ for (int j = 0; j <= pps->num_tile_rows_minus1; ++j) {
+ pic_param.row_height_minus1[j] =
+ (((j + 1) * sps->pic_height_in_ctbs_y) /
+ (pps->num_tile_rows_minus1 + 1)) -
+ ((j * sps->pic_height_in_ctbs_y) / (pps->num_tile_rows_minus1 + 1)) -
+ 1;
+ }
+ } else {
+ for (int i = 0; i <= pps->num_tile_columns_minus1; ++i)
+ FROM_PPS_TO_PP(column_width_minus1[i]);
+ for (int i = 0; i <= pps->num_tile_rows_minus1; ++i)
+ FROM_PPS_TO_PP(row_height_minus1[i]);
+ }
+ FROM_PPS_TO_PP_SPF(lists_modification_present_flag);
+ FROM_SPS_TO_PP_SPF(long_term_ref_pics_present_flag);
+ FROM_SPS_TO_PP_SPF(sps_temporal_mvp_enabled_flag);
+ FROM_PPS_TO_PP_SPF(cabac_init_present_flag);
+ FROM_PPS_TO_PP_SPF(output_flag_present_flag);
+ FROM_PPS_TO_PP_SPF(dependent_slice_segments_enabled_flag);
+ FROM_PPS_TO_PP_SPF(pps_slice_chroma_qp_offsets_present_flag);
+ FROM_SPS_TO_PP_SPF(sample_adaptive_offset_enabled_flag);
+ FROM_PPS_TO_PP_SPF(deblocking_filter_override_enabled_flag);
+ FROM_PPS_TO_PP_SPF2(pps_deblocking_filter_disabled_flag,
+ pps_disable_deblocking_filter_flag);
+ FROM_PPS_TO_PP_SPF(slice_segment_header_extension_present_flag);
+ pic_param.slice_parsing_fields.bits.RapPicFlag =
+ pic->nal_unit_type_ >= H265NALU::BLA_W_LP &&
+ pic->nal_unit_type_ <= H265NALU::CRA_NUT;
+ pic_param.slice_parsing_fields.bits.IdrPicFlag =
+ pic->nal_unit_type_ >= H265NALU::IDR_W_RADL &&
+ pic->nal_unit_type_ <= H265NALU::IDR_N_LP;
+ pic_param.slice_parsing_fields.bits.IntraPicFlag = pic->irap_pic_;
+
+ FROM_SPS_TO_PP(log2_max_pic_order_cnt_lsb_minus4);
+ FROM_SPS_TO_PP(num_short_term_ref_pic_sets);
+ FROM_SPS_TO_PP2(num_long_term_ref_pics_sps, num_long_term_ref_pic_sps);
+ FROM_PPS_TO_PP(num_ref_idx_l0_default_active_minus1);
+ FROM_PPS_TO_PP(num_ref_idx_l1_default_active_minus1);
+ FROM_PPS_TO_PP(pps_beta_offset_div2);
+ FROM_PPS_TO_PP(pps_tc_offset_div2);
+ FROM_PPS_TO_PP(num_extra_slice_header_bits);
+#undef FROM_SPS_TO_PP
+#undef FROM_SPS_TO_PP2
+#undef FROM_PPS_TO_PP
+#undef FROM_SPS_TO_PP_PF
+#undef FROM_PPS_TO_PP_PF
+#undef FROM_SPS_TO_PP_SPF
+#undef FROM_PPS_TO_PP_SPF
+#undef FROM_PPS_TO_PP_SPF2
+ if (slice_hdr->short_term_ref_pic_set_sps_flag)
+ pic_param.st_rps_bits = 0;
+ else
+ pic_param.st_rps_bits = slice_hdr->st_rps_bits;
+
+ InitVAPicture(&pic_param.CurrPic);
+ FillVAPicture(&pic_param.CurrPic, std::move(pic));
+
+ // Init reference pictures' array.
+ for (size_t i = 0; i < base::size(pic_param.ReferenceFrames); ++i)
+ InitVAPicture(&pic_param.ReferenceFrames[i]);
+
+ // And fill it with picture info from DPB.
+ FillVARefFramesFromRefList(ref_pic_list, pic_param.ReferenceFrames);
+
+ if (!vaapi_wrapper_->SubmitBuffer(VAPictureParameterBufferType, &pic_param)) {
+ DLOG(ERROR) << "Failure on submitting pic param buffer";
+ return DecodeStatus::kFail;
+ }
+
+ if (!sps->scaling_list_enabled_flag)
+ return DecodeStatus::kOk;
+
+ VAIQMatrixBufferHEVC iq_matrix_buf;
+ memset(&iq_matrix_buf, 0, sizeof(iq_matrix_buf));
+
+ // We already populated the IQMatrix with default values in the parser if they
+ // are not present in the stream, so just fill them all in.
+ const H265ScalingListData& scaling_list =
+ pps->pps_scaling_list_data_present_flag ? pps->scaling_list_data
+ : sps->scaling_list_data;
+
+ // We need another one of these since we can't use |scaling_list| above in
+ // the static_assert checks below.
+ H265ScalingListData checker;
+ static_assert((base::size(checker.scaling_list_4x4) ==
+ base::size(iq_matrix_buf.ScalingList4x4)) &&
+ (base::size(checker.scaling_list_4x4[0]) ==
+ base::size(iq_matrix_buf.ScalingList4x4[0])) &&
+ (base::size(checker.scaling_list_8x8) ==
+ base::size(iq_matrix_buf.ScalingList8x8)) &&
+ (base::size(checker.scaling_list_8x8[0]) ==
+ base::size(iq_matrix_buf.ScalingList8x8[0])) &&
+ (base::size(checker.scaling_list_16x16) ==
+ base::size(iq_matrix_buf.ScalingList16x16)) &&
+ (base::size(checker.scaling_list_16x16[0]) ==
+ base::size(iq_matrix_buf.ScalingList16x16[0])) &&
+ (base::size(checker.scaling_list_32x32) / 3 ==
+ base::size(iq_matrix_buf.ScalingList32x32)) &&
+ (base::size(checker.scaling_list_32x32[0]) ==
+ base::size(iq_matrix_buf.ScalingList32x32[0])) &&
+ (base::size(checker.scaling_list_dc_coef_16x16) ==
+ base::size(iq_matrix_buf.ScalingListDC16x16)) &&
+ (base::size(checker.scaling_list_dc_coef_32x32) / 3 ==
+ base::size(iq_matrix_buf.ScalingListDC32x32)),
+ "Mismatched HEVC scaling list matrix sizes");
+
+ for (int i = 0; i < H265ScalingListData::kNumScalingListMatrices; ++i) {
+ for (int j = 0; j < H265ScalingListData::kScalingListSizeId0Count; ++j)
+ iq_matrix_buf.ScalingList4x4[i][j] = scaling_list.scaling_list_4x4[i][j];
+ }
+
+ for (int i = 0; i < H265ScalingListData::kNumScalingListMatrices; ++i) {
+ for (int j = 0; j < H265ScalingListData::kScalingListSizeId1To3Count; ++j)
+ iq_matrix_buf.ScalingList8x8[i][j] = scaling_list.scaling_list_8x8[i][j];
+ }
+
+ for (int i = 0; i < H265ScalingListData::kNumScalingListMatrices; ++i) {
+ for (int j = 0; j < H265ScalingListData::kScalingListSizeId1To3Count; ++j)
+ iq_matrix_buf.ScalingList16x16[i][j] =
+ scaling_list.scaling_list_16x16[i][j];
+ }
+
+ for (int i = 0; i < H265ScalingListData::kNumScalingListMatrices; i += 3) {
+ for (int j = 0; j < H265ScalingListData::kScalingListSizeId1To3Count; ++j)
+ iq_matrix_buf.ScalingList32x32[i / 3][j] =
+ scaling_list.scaling_list_32x32[i][j];
+ }
+
+ for (int i = 0; i < H265ScalingListData::kNumScalingListMatrices; ++i)
+ iq_matrix_buf.ScalingListDC16x16[i] =
+ scaling_list.scaling_list_dc_coef_16x16[i];
+
+ for (int i = 0; i < H265ScalingListData::kNumScalingListMatrices; i += 3) {
+ iq_matrix_buf.ScalingListDC32x32[i / 3] =
+ scaling_list.scaling_list_dc_coef_32x32[i];
+ }
+
+ return vaapi_wrapper_->SubmitBuffer(VAIQMatrixBufferType, &iq_matrix_buf)
+ ? DecodeStatus::kOk
+ : DecodeStatus::kFail;
+}
+
+DecodeStatus H265VaapiVideoDecoderDelegate::SubmitSlice(
+ const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list0,
+ const H265Picture::Vector& ref_pic_list1,
+ scoped_refptr<H265Picture> pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (!SubmitPriorSliceDataIfPresent(false)) {
+ DLOG(ERROR) << "Failure submitting prior slice data";
+ return DecodeStatus::kFail;
+ }
+
+ memset(&slice_param_, 0, sizeof(slice_param_));
+
+ slice_param_.slice_data_size = slice_hdr->nalu_size;
+ slice_param_.slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+ slice_param_.slice_data_byte_offset = slice_hdr->header_size;
+
+#define SHDR_TO_SP(a) slice_param_.a = slice_hdr->a
+#define SHDR_TO_SP2(a, b) slice_param_.b = slice_hdr->a
+#define SHDR_TO_SP_LSF(a) slice_param_.LongSliceFlags.fields.a = slice_hdr->a
+#define SHDR_TO_SP_LSF2(a, b) \
+ slice_param_.LongSliceFlags.fields.a = slice_hdr->b
+ SHDR_TO_SP(slice_segment_address);
+ const auto ref_pic_list0_size = ref_pic_list0.size();
+ const auto ref_pic_list1_size = ref_pic_list1.size();
+ // Fill in ref pic lists.
+ if (ref_pic_list0_size > base::size(slice_param_.RefPicList[0]) ||
+ ref_pic_list1_size > base::size(slice_param_.RefPicList[1])) {
+ DLOG(ERROR) << "Error, slice reference picture list is larger than 15";
+ return DecodeStatus::kFail;
+ }
+
+ constexpr int kVaInvalidRefPicIndex = 0xFF;
+ std::fill_n(slice_param_.RefPicList[0],
+ base::size(slice_param_.RefPicList[0]), kVaInvalidRefPicIndex);
+ std::fill_n(slice_param_.RefPicList[1],
+ base::size(slice_param_.RefPicList[1]), kVaInvalidRefPicIndex);
+ // There may be null entries in |ref_pic_list0| or |ref_pic_list1| for missing
+ // reference pictures, just leave those marked as 0xFF and the accelerator
+ // will do the right thing to deal with missing reference pictures.
+ for (size_t i = 0; i < ref_pic_list0_size; ++i) {
+ if (ref_pic_list0[i]) {
+ int idx = GetRefPicIndex(ref_pic_list0[i]->pic_order_cnt_val_);
+ if (idx == kInvalidRefPicIndex) {
+ DLOG(ERROR)
+ << "Error, slice reference picture is not in reference list";
+ return DecodeStatus::kFail;
+ }
+ slice_param_.RefPicList[0][i] = idx;
+ }
+ }
+ for (size_t i = 0; i < ref_pic_list1_size; ++i) {
+ if (ref_pic_list1[i]) {
+ int idx = GetRefPicIndex(ref_pic_list1[i]->pic_order_cnt_val_);
+ if (idx == kInvalidRefPicIndex) {
+ DLOG(ERROR)
+ << "Error, slice reference picture is not in reference list";
+ return DecodeStatus::kFail;
+ }
+ slice_param_.RefPicList[1][i] = idx;
+ }
+ }
+
+ SHDR_TO_SP_LSF(dependent_slice_segment_flag);
+ SHDR_TO_SP_LSF(slice_type);
+ SHDR_TO_SP_LSF2(color_plane_id, colour_plane_id);
+ SHDR_TO_SP_LSF(slice_sao_luma_flag);
+ SHDR_TO_SP_LSF(slice_sao_chroma_flag);
+ SHDR_TO_SP_LSF(mvd_l1_zero_flag);
+ SHDR_TO_SP_LSF(cabac_init_flag);
+ SHDR_TO_SP_LSF(slice_temporal_mvp_enabled_flag);
+ SHDR_TO_SP_LSF(slice_deblocking_filter_disabled_flag);
+ SHDR_TO_SP_LSF(collocated_from_l0_flag);
+ SHDR_TO_SP_LSF(slice_loop_filter_across_slices_enabled_flag);
+ if (!slice_hdr->slice_temporal_mvp_enabled_flag)
+ slice_param_.collocated_ref_idx = kVaInvalidRefPicIndex;
+ else
+ SHDR_TO_SP(collocated_ref_idx);
+
+ slice_param_.num_ref_idx_l0_active_minus1 =
+ ref_pic_list0_size ? (ref_pic_list0_size - 1) : 0;
+ slice_param_.num_ref_idx_l1_active_minus1 =
+ ref_pic_list1_size ? (ref_pic_list1_size - 1) : 0;
+ SHDR_TO_SP(slice_qp_delta);
+ SHDR_TO_SP(slice_cb_qp_offset);
+ SHDR_TO_SP(slice_cr_qp_offset);
+ SHDR_TO_SP(slice_beta_offset_div2);
+ SHDR_TO_SP(slice_tc_offset_div2);
+ SHDR_TO_SP2(pred_weight_table.luma_log2_weight_denom, luma_log2_weight_denom);
+ SHDR_TO_SP2(pred_weight_table.delta_chroma_log2_weight_denom,
+ delta_chroma_log2_weight_denom);
+ for (int i = 0; i < kMaxRefIdxActive; ++i) {
+ SHDR_TO_SP2(pred_weight_table.delta_luma_weight_l0[i],
+ delta_luma_weight_l0[i]);
+ SHDR_TO_SP2(pred_weight_table.luma_offset_l0[i], luma_offset_l0[i]);
+ if (slice_hdr->IsBSlice()) {
+ SHDR_TO_SP2(pred_weight_table.delta_luma_weight_l1[i],
+ delta_luma_weight_l1[i]);
+ SHDR_TO_SP2(pred_weight_table.luma_offset_l1[i], luma_offset_l1[i]);
+ }
+ for (int j = 0; j < 2; ++j) {
+ SHDR_TO_SP2(pred_weight_table.delta_chroma_weight_l0[i][j],
+ delta_chroma_weight_l0[i][j]);
+ int chroma_weight_l0 =
+ (1 << slice_hdr->pred_weight_table.chroma_log2_weight_denom) +
+ slice_hdr->pred_weight_table.delta_chroma_weight_l0[i][j];
+ slice_param_.ChromaOffsetL0[i][j] =
+ Clip3(-sps->wp_offset_half_range_c, sps->wp_offset_half_range_c - 1,
+ (sps->wp_offset_half_range_c +
+ slice_hdr->pred_weight_table.delta_chroma_offset_l0[i][j] -
+ ((sps->wp_offset_half_range_c * chroma_weight_l0) >>
+ slice_hdr->pred_weight_table.chroma_log2_weight_denom)));
+ if (slice_hdr->IsBSlice()) {
+ SHDR_TO_SP2(pred_weight_table.delta_chroma_weight_l1[i][j],
+ delta_chroma_weight_l1[i][j]);
+ int chroma_weight_l1 =
+ (1 << slice_hdr->pred_weight_table.chroma_log2_weight_denom) +
+ slice_hdr->pred_weight_table.delta_chroma_weight_l1[i][j];
+ slice_param_.ChromaOffsetL1[i][j] =
+ Clip3(-sps->wp_offset_half_range_c, sps->wp_offset_half_range_c - 1,
+ (sps->wp_offset_half_range_c +
+ slice_hdr->pred_weight_table.delta_chroma_offset_l1[i][j] -
+ ((sps->wp_offset_half_range_c * chroma_weight_l1) >>
+ slice_hdr->pred_weight_table.chroma_log2_weight_denom)));
+ }
+ }
+ }
+ SHDR_TO_SP(five_minus_max_num_merge_cand);
+
+ // TODO(jkardatzke): Remove this guard once Chrome has libva uprev'd to 2.6.0.
+#if BUILDFLAG(IS_CHROMEOS_ASH)
+ slice_param_.slice_data_num_emu_prevn_bytes =
+ slice_hdr->header_emulation_prevention_bytes;
+#endif
+
+ last_slice_data_ = data;
+ last_slice_size_ = size;
+ return DecodeStatus::kOk;
+}
+
+DecodeStatus H265VaapiVideoDecoderDelegate::SubmitDecode(
+ scoped_refptr<H265Picture> pic) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ if (!SubmitPriorSliceDataIfPresent(true)) {
+ DLOG(ERROR) << "Failure submitting prior slice data";
+ return DecodeStatus::kFail;
+ }
+
+ ref_pic_list_pocs_.clear();
+ return vaapi_wrapper_->ExecuteAndDestroyPendingBuffers(
+ pic->AsVaapiH265Picture()->va_surface()->id())
+ ? DecodeStatus::kOk
+ : DecodeStatus::kFail;
+}
+
+bool H265VaapiVideoDecoderDelegate::OutputPicture(
+ scoped_refptr<H265Picture> pic) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+
+ const VaapiH265Picture* vaapi_pic = pic->AsVaapiH265Picture();
+ vaapi_dec_->SurfaceReady(vaapi_pic->va_surface(), vaapi_pic->bitstream_id(),
+ vaapi_pic->visible_rect(),
+ vaapi_pic->get_colorspace());
+ return true;
+}
+
+void H265VaapiVideoDecoderDelegate::Reset() {
+ DETACH_FROM_SEQUENCE(sequence_checker_);
+ vaapi_wrapper_->DestroyPendingBuffers();
+ ref_pic_list_pocs_.clear();
+ last_slice_data_ = nullptr;
+}
+
+void H265VaapiVideoDecoderDelegate::FillVAPicture(
+ VAPictureHEVC* va_pic,
+ scoped_refptr<H265Picture> pic) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ va_pic->picture_id = pic->AsVaapiH265Picture()->va_surface()->id();
+ va_pic->pic_order_cnt = pic->pic_order_cnt_val_;
+ va_pic->flags = 0;
+
+ switch (pic->ref_) {
+ case H265Picture::kShortTermCurrBefore:
+ va_pic->flags |= VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE;
+ break;
+ case H265Picture::kShortTermCurrAfter:
+ va_pic->flags |= VA_PICTURE_HEVC_RPS_ST_CURR_AFTER;
+ break;
+ case H265Picture::kLongTermCurr:
+ va_pic->flags |= VA_PICTURE_HEVC_RPS_LT_CURR;
+ break;
+ default: // We don't flag the other ref pic types.
+ break;
+ }
+
+ if (pic->IsLongTermRef())
+ va_pic->flags |= VA_PICTURE_HEVC_LONG_TERM_REFERENCE;
+}
+
+void H265VaapiVideoDecoderDelegate::FillVARefFramesFromRefList(
+ const H265Picture::Vector& ref_pic_list,
+ VAPictureHEVC* va_pics) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ ref_pic_list_pocs_.clear();
+ for (auto& it : ref_pic_list) {
+ if (!it->IsUnused()) {
+ FillVAPicture(&va_pics[ref_pic_list_pocs_.size()], it);
+ ref_pic_list_pocs_.push_back(it->pic_order_cnt_val_);
+ }
+ }
+}
+
+int H265VaapiVideoDecoderDelegate::GetRefPicIndex(int poc) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ for (size_t i = 0; i < ref_pic_list_pocs_.size(); ++i) {
+ if (ref_pic_list_pocs_[i] == poc)
+ return static_cast<int>(i);
+ }
+ return kInvalidRefPicIndex;
+}
+
+bool H265VaapiVideoDecoderDelegate::SubmitPriorSliceDataIfPresent(
+ bool last_slice) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ if (!last_slice_data_) {
+ // No prior slice data to submit.
+ return true;
+ }
+ if (last_slice)
+ slice_param_.LongSliceFlags.fields.LastSliceOfPic = 1;
+
+ const bool success = vaapi_wrapper_->SubmitBuffers(
+ {{VASliceParameterBufferType, sizeof(slice_param_), &slice_param_},
+ {VASliceDataBufferType, last_slice_size_, last_slice_data_}});
+ last_slice_data_ = nullptr;
+ return success;
+}
+
+} // namespace media
diff --git a/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
new file mode 100644
index 00000000000..6fd06e5e72e
--- /dev/null
+++ b/chromium/media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h
@@ -0,0 +1,87 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_GPU_VAAPI_H265_VAAPI_VIDEO_DECODER_DELEGATE_H_
+#define MEDIA_GPU_VAAPI_H265_VAAPI_VIDEO_DECODER_DELEGATE_H_
+
+#include <va/va.h>
+
+#include "base/memory/scoped_refptr.h"
+#include "media/gpu/h265_decoder.h"
+#include "media/gpu/h265_dpb.h"
+#include "media/gpu/vaapi/vaapi_video_decoder_delegate.h"
+#include "media/video/h265_parser.h"
+
+// Verbatim from va/va.h, where typedef is used.
+typedef struct _VAPictureHEVC VAPictureHEVC;
+
+namespace media {
+
+class H265Picture;
+
+class H265VaapiVideoDecoderDelegate : public H265Decoder::H265Accelerator,
+ public VaapiVideoDecoderDelegate {
+ public:
+ H265VaapiVideoDecoderDelegate(DecodeSurfaceHandler<VASurface>* vaapi_dec,
+ scoped_refptr<VaapiWrapper> vaapi_wrapper);
+
+ H265VaapiVideoDecoderDelegate(const H265VaapiVideoDecoderDelegate&) = delete;
+ H265VaapiVideoDecoderDelegate& operator=(
+ const H265VaapiVideoDecoderDelegate&) = delete;
+
+ ~H265VaapiVideoDecoderDelegate() override;
+
+ // H265Decoder::H265Accelerator implementation.
+ scoped_refptr<H265Picture> CreateH265Picture() override;
+ Status SubmitFrameMetadata(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list,
+ scoped_refptr<H265Picture> pic) override;
+ Status SubmitSlice(const H265SPS* sps,
+ const H265PPS* pps,
+ const H265SliceHeader* slice_hdr,
+ const H265Picture::Vector& ref_pic_list0,
+ const H265Picture::Vector& ref_pic_list1,
+ scoped_refptr<H265Picture> pic,
+ const uint8_t* data,
+ size_t size,
+ const std::vector<SubsampleEntry>& subsamples) override;
+ Status SubmitDecode(scoped_refptr<H265Picture> pic) override;
+ bool OutputPicture(scoped_refptr<H265Picture> pic) override;
+ void Reset() override;
+
+ private:
+ void FillVAPicture(VAPictureHEVC* va_pic, scoped_refptr<H265Picture> pic);
+ void FillVARefFramesFromRefList(const H265Picture::Vector& ref_pic_list,
+ VAPictureHEVC* va_pics);
+
+ // Returns |kInvalidRefPicIndex| if it cannot find a picture.
+ int GetRefPicIndex(int poc);
+
+ // Submits the slice data to the decoder for the prior slice that was just
+ // submitted to us. This allows us to handle multi-slice pictures properly.
+ // |last_slice| is set to true when submitting the last slice, false
+ // otherwise.
+ bool SubmitPriorSliceDataIfPresent(bool last_slice);
+
+ // Stores the POCs (picture order counts) in the ReferenceFrames submitted as
+ // the frame metadata so we can determine the indices for the reference frames
+ // in the slice metadata.
+ std::vector<int> ref_pic_list_pocs_;
+
+ // Data from the prior/current slice for handling multi slice so we can
+ // properly set the flag for the last slice.
+ VASliceParameterBufferHEVC slice_param_;
+ // We can hold onto the slice data pointer because we process all frames as
+ // one DecoderBuffer, so the memory will still be accessible until the frame
+ // is done. |last_slice_data_| being non-null indicates we have a valid
+ // |slice_param_| filled.
+ const uint8_t* last_slice_data_{nullptr};
+ size_t last_slice_size_{0};
+};
+
+} // namespace media
+
+#endif // MEDIA_GPU_VAAPI_H265_VAAPI_VIDEO_DECODER_DELEGATE_H_
diff --git a/chromium/media/gpu/vaapi/vaapi_common.cc b/chromium/media/gpu/vaapi/vaapi_common.cc
index af89a6add14..415f84acf89 100644
--- a/chromium/media/gpu/vaapi/vaapi_common.cc
+++ b/chromium/media/gpu/vaapi/vaapi_common.cc
@@ -9,16 +9,27 @@ namespace media {
VaapiH264Picture::VaapiH264Picture(scoped_refptr<VASurface> va_surface)
: va_surface_(va_surface) {}
-VaapiH264Picture::~VaapiH264Picture() {}
+VaapiH264Picture::~VaapiH264Picture() = default;
VaapiH264Picture* VaapiH264Picture::AsVaapiH264Picture() {
return this;
}
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+VaapiH265Picture::VaapiH265Picture(scoped_refptr<VASurface> va_surface)
+ : va_surface_(va_surface) {}
+
+VaapiH265Picture::~VaapiH265Picture() = default;
+
+VaapiH265Picture* VaapiH265Picture::AsVaapiH265Picture() {
+ return this;
+}
+#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
+
VaapiVP8Picture::VaapiVP8Picture(scoped_refptr<VASurface> va_surface)
: va_surface_(va_surface) {}
-VaapiVP8Picture::~VaapiVP8Picture() {}
+VaapiVP8Picture::~VaapiVP8Picture() = default;
VaapiVP8Picture* VaapiVP8Picture::AsVaapiVP8Picture() {
return this;
@@ -27,7 +38,7 @@ VaapiVP8Picture* VaapiVP8Picture::AsVaapiVP8Picture() {
VaapiVP9Picture::VaapiVP9Picture(scoped_refptr<VASurface> va_surface)
: va_surface_(va_surface) {}
-VaapiVP9Picture::~VaapiVP9Picture() {}
+VaapiVP9Picture::~VaapiVP9Picture() = default;
VaapiVP9Picture* VaapiVP9Picture::AsVaapiVP9Picture() {
return this;
diff --git a/chromium/media/gpu/vaapi/vaapi_common.h b/chromium/media/gpu/vaapi/vaapi_common.h
index 588961ffc83..1c71ce6c813 100644
--- a/chromium/media/gpu/vaapi/vaapi_common.h
+++ b/chromium/media/gpu/vaapi/vaapi_common.h
@@ -8,6 +8,11 @@
#include "media/gpu/vaapi/va_surface.h"
#include "media/gpu/vp8_picture.h"
#include "media/gpu/vp9_picture.h"
+#include "media/media_buildflags.h"
+
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+#include "media/gpu/h265_dpb.h"
+#endif
namespace media {
@@ -33,6 +38,27 @@ class VaapiH264Picture : public H264Picture {
DISALLOW_COPY_AND_ASSIGN(VaapiH264Picture);
};
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+class VaapiH265Picture : public H265Picture {
+ public:
+ explicit VaapiH265Picture(scoped_refptr<VASurface> va_surface);
+
+ VaapiH265Picture(const VaapiH265Picture&) = delete;
+ VaapiH265Picture& operator=(const VaapiH265Picture&) = delete;
+
+ VaapiH265Picture* AsVaapiH265Picture() override;
+
+ scoped_refptr<VASurface> va_surface() const { return va_surface_; }
+ VASurfaceID GetVASurfaceID() const { return va_surface_->id(); }
+
+ protected:
+ ~VaapiH265Picture() override;
+
+ private:
+ scoped_refptr<VASurface> va_surface_;
+};
+#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
+
class VaapiVP8Picture : public VP8Picture {
public:
explicit VaapiVP8Picture(scoped_refptr<VASurface> va_surface);
diff --git a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
index e23f1ff8801..6674b1e282f 100644
--- a/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.cc
@@ -5,7 +5,7 @@
#include "media/gpu/vaapi/vaapi_dmabuf_video_frame_mapper.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
#include "build/build_config.h"
#include "media/base/color_plane_layout.h"
diff --git a/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc
index 093fc3fc6bf..644f04f58aa 100644
--- a/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc
+++ b/chromium/media/gpu/vaapi/vaapi_image_decode_accelerator_worker.cc
@@ -113,8 +113,8 @@ VaapiImageDecodeAcceleratorWorker::Create() {
// Media.VaapiImageDecodeAcceleratorWorker.VAAPIError UMA to be able to record
// WebP and JPEG failures separately.
const auto uma_cb =
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiImageDecodeAcceleratorWorker.VAAPIError");
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiImageDecodeAcceleratorWorker.VAAPIError");
VaapiImageDecoderVector decoders;
auto jpeg_decoder = std::make_unique<VaapiJpegDecoder>();
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
index 7b18d2744a4..d94c070be88 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encode_accelerator.cc
@@ -10,7 +10,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/memory/writable_shared_memory_region.h"
#include "base/metrics/histogram_functions.h"
@@ -500,8 +500,8 @@ VaapiJpegEncodeAccelerator::Initialize(
client_ = client;
scoped_refptr<VaapiWrapper> vaapi_wrapper = VaapiWrapper::Create(
VaapiWrapper::kEncode, VAProfileJPEGBaseline,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiJpegEncodeAccelerator.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiJpegEncodeAccelerator.VAAPIError"));
if (!vaapi_wrapper) {
VLOGF(1) << "Failed initializing VAAPI";
@@ -510,8 +510,8 @@ VaapiJpegEncodeAccelerator::Initialize(
scoped_refptr<VaapiWrapper> vpp_vaapi_wrapper = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiJpegEncodeAccelerator.Vpp.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiJpegEncodeAccelerator.Vpp.VAAPIError"));
if (!vpp_vaapi_wrapper) {
VLOGF(1) << "Failed initializing VAAPI wrapper for VPP";
return PLATFORM_FAILURE;
diff --git a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc
index 664df42fb08..e6e71f10b2d 100644
--- a/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_jpeg_encoder.cc
@@ -205,8 +205,16 @@ size_t FillJpegHeader(const gfx::Size& input_size,
const JpegQuantizationTable& quant_table = kDefaultQuantTable[i];
for (size_t j = 0; j < kDctSize; ++j) {
+ // The iHD media driver shifts the quantization values
+ // by 50 while encoding. We should add 50 here to
+ // ensure the correctness in the packed header that is
+ // directly stuffed into the bitstream as JPEG headers.
+ // GStreamer test cases show a psnr improvement in
+ // Y plane (41.27 to 48.31) with this quirk.
+ const static uint32_t shift =
+ VaapiWrapper::GetImplementationType() == VAImplementation::kIntelIHD ? 50 : 0;
uint32_t scaled_quant_value =
- (quant_table.value[kZigZag8x8[j]] * quality_normalized) / 100;
+ (quant_table.value[kZigZag8x8[j]] * quality_normalized + shift) / 100;
scaled_quant_value = base::ClampToRange(scaled_quant_value, 1u, 255u);
header[idx++] = static_cast<uint8_t>(scaled_quant_value);
}
diff --git a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
index 3dc8a8016a5..3bbac0d7a98 100644
--- a/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_mjpeg_decode_accelerator.cc
@@ -12,7 +12,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/files/scoped_file.h"
#include "base/location.h"
@@ -134,16 +133,16 @@ bool VaapiMjpegDecodeAccelerator::Initialize(
client_ = client;
- if (!decoder_.Initialize(
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiMjpegDecodeAccelerator.VAAPIError"))) {
+ if (!decoder_.Initialize(base::BindRepeating(
+ &ReportVaapiErrorToUMA,
+ "Media.VaapiMjpegDecodeAccelerator.VAAPIError"))) {
return false;
}
vpp_vaapi_wrapper_ = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiMjpegDecodeAccelerator.Vpp.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiMjpegDecodeAccelerator.Vpp.VAAPIError"));
if (!vpp_vaapi_wrapper_) {
VLOGF(1) << "Failed initializing VAAPI for VPP";
return false;
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_factory.cc b/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
index 7d850cc1d1a..50d5f245f83 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_factory.cc
@@ -22,6 +22,25 @@
namespace media {
+namespace {
+
+template <typename PictureType>
+std::unique_ptr<VaapiPicture> CreateVaapiPictureNativeImpl(
+ scoped_refptr<VaapiWrapper> vaapi_wrapper,
+ const MakeGLContextCurrentCallback& make_context_current_cb,
+ const BindGLImageCallback& bind_image_cb,
+ const PictureBuffer& picture_buffer,
+ const gfx::Size& visible_size,
+ uint32_t client_texture_id,
+ uint32_t service_texture_id) {
+ return std::make_unique<PictureType>(
+ std::move(vaapi_wrapper), make_context_current_cb, bind_image_cb,
+ picture_buffer.id(), picture_buffer.size(), visible_size,
+ service_texture_id, client_texture_id, picture_buffer.texture_target());
+}
+
+} // namespace
+
VaapiPictureFactory::VaapiPictureFactory() {
vaapi_impl_pairs_.insert(
std::make_pair(gl::kGLImplementationEGLGLES2,
@@ -36,6 +55,8 @@ VaapiPictureFactory::VaapiPictureFactory() {
VaapiPictureFactory::kVaapiImplementationX11));
}
#endif
+
+ DeterminePictureCreationAndDownloadingMechanism();
}
VaapiPictureFactory::~VaapiPictureFactory() = default;
@@ -61,13 +82,6 @@ std::unique_ptr<VaapiPicture> VaapiPictureFactory::Create(
: 0;
// Select DRM(egl) / TFP(glx) at runtime with --use-gl=egl / --use-gl=desktop
-
-#if defined(USE_OZONE)
- if (features::IsUsingOzonePlatform())
- return CreateVaapiPictureNativeForOzone(
- vaapi_wrapper, make_context_current_cb, bind_image_cb, picture_buffer,
- visible_size, client_texture_id, service_texture_id);
-#endif
return CreateVaapiPictureNative(vaapi_wrapper, make_context_current_cb,
bind_image_cb, picture_buffer, visible_size,
client_texture_id, service_texture_id);
@@ -96,37 +110,62 @@ gfx::BufferFormat VaapiPictureFactory::GetBufferFormat() {
return gfx::BufferFormat::RGBX_8888;
}
-#if defined(USE_OZONE)
-std::unique_ptr<VaapiPicture>
-VaapiPictureFactory::CreateVaapiPictureNativeForOzone(
- scoped_refptr<VaapiWrapper> vaapi_wrapper,
- const MakeGLContextCurrentCallback& make_context_current_cb,
- const BindGLImageCallback& bind_image_cb,
- const PictureBuffer& picture_buffer,
- const gfx::Size& visible_size,
- uint32_t client_texture_id,
- uint32_t service_texture_id) {
- DCHECK(features::IsUsingOzonePlatform());
+void VaapiPictureFactory::DeterminePictureCreationAndDownloadingMechanism() {
switch (GetVaapiImplementation(gl::GetGLImplementation())) {
+#if defined(USE_OZONE)
// We can be called without GL initialized, which is valid if we use Ozone.
case kVaapiImplementationNone:
- FALLTHROUGH;
+ if (features::IsUsingOzonePlatform()) {
+ create_picture_cb_ = base::BindRepeating(
+ &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapOzone>);
+ needs_vpp_for_downloading_ = true;
+ }
+
+ // This is reached by unit tests which don't require create_picture_cb_
+ // to be initialized or called.
+ break;
+#endif // defined(USE_OZONE)
+#if defined(USE_X11)
+ case kVaapiImplementationX11:
+ DCHECK(!features::IsUsingOzonePlatform());
+ create_picture_cb_ =
+ base::BindRepeating(&CreateVaapiPictureNativeImpl<VaapiTFPPicture>);
+ // Neither VaapiTFPPicture or VaapiPictureNativePixmapAngle needs the VPP.
+ needs_vpp_for_downloading_ = false;
+ break;
+ case kVaapiImplementationAngle:
+ DCHECK(!features::IsUsingOzonePlatform());
+ create_picture_cb_ = base::BindRepeating(
+ &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapAngle>);
+ // Neither VaapiTFPPicture or VaapiPictureNativePixmapAngle needs the VPP.
+ needs_vpp_for_downloading_ = false;
+ break;
+#endif // defined(USE_X11)
case kVaapiImplementationDrm:
- return std::make_unique<VaapiPictureNativePixmapOzone>(
- std::move(vaapi_wrapper), make_context_current_cb, bind_image_cb,
- picture_buffer.id(), picture_buffer.size(), visible_size,
- service_texture_id, client_texture_id,
- picture_buffer.texture_target());
+#if defined(USE_OZONE)
+ if (features::IsUsingOzonePlatform()) {
+ create_picture_cb_ = base::BindRepeating(
+ &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapOzone>);
+ needs_vpp_for_downloading_ = true;
+ break;
+ }
+#endif // defined(USE_OZONE)
+#if defined(USE_EGL)
+ create_picture_cb_ = base::BindRepeating(
+ &CreateVaapiPictureNativeImpl<VaapiPictureNativePixmapEgl>);
+ needs_vpp_for_downloading_ = true;
break;
-
+#endif // defined(USE_EGL)
+ // ozone or egl must be used to use the DRM implementation.
+ NOTREACHED();
default:
NOTREACHED();
- return nullptr;
}
+}
- return nullptr;
+bool VaapiPictureFactory::NeedsProcessingPipelineForDownloading() const {
+ return needs_vpp_for_downloading_;
}
-#endif // USE_OZONE
std::unique_ptr<VaapiPicture> VaapiPictureFactory::CreateVaapiPictureNative(
scoped_refptr<VaapiWrapper> vaapi_wrapper,
@@ -136,40 +175,10 @@ std::unique_ptr<VaapiPicture> VaapiPictureFactory::CreateVaapiPictureNative(
const gfx::Size& visible_size,
uint32_t client_texture_id,
uint32_t service_texture_id) {
- switch (GetVaapiImplementation(gl::GetGLImplementation())) {
-#if defined(USE_EGL)
- case kVaapiImplementationDrm:
- return std::make_unique<VaapiPictureNativePixmapEgl>(
- std::move(vaapi_wrapper), make_context_current_cb, bind_image_cb,
- picture_buffer.id(), picture_buffer.size(), visible_size,
- service_texture_id, client_texture_id,
- picture_buffer.texture_target());
-#endif // USE_EGL
-
-#if defined(USE_X11)
- case kVaapiImplementationX11:
- DCHECK(!features::IsUsingOzonePlatform());
- return std::make_unique<VaapiTFPPicture>(
- std::move(vaapi_wrapper), make_context_current_cb, bind_image_cb,
- picture_buffer.id(), picture_buffer.size(), visible_size,
- service_texture_id, client_texture_id,
- picture_buffer.texture_target());
- break;
- case kVaapiImplementationAngle:
- return std::make_unique<VaapiPictureNativePixmapAngle>(
- std::move(vaapi_wrapper), make_context_current_cb, bind_image_cb,
- picture_buffer.id(), picture_buffer.size(), visible_size,
- service_texture_id, client_texture_id,
- picture_buffer.texture_target());
- break;
-#endif // USE_X11
-
- default:
- NOTREACHED();
- return nullptr;
- }
-
- return nullptr;
+ CHECK(create_picture_cb_);
+ return create_picture_cb_.Run(
+ std::move(vaapi_wrapper), make_context_current_cb, bind_image_cb,
+ picture_buffer, visible_size, client_texture_id, service_texture_id);
}
} // namespace media
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_factory.h b/chromium/media/gpu/vaapi/vaapi_picture_factory.h
index 9bb34535e1e..e894581f35c 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_factory.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_factory.h
@@ -18,6 +18,15 @@ namespace media {
class PictureBuffer;
class VaapiWrapper;
+using CreatePictureCB = base::RepeatingCallback<std::unique_ptr<VaapiPicture>(
+ scoped_refptr<VaapiWrapper>,
+ const MakeGLContextCurrentCallback&,
+ const BindGLImageCallback&,
+ const PictureBuffer&,
+ const gfx::Size&,
+ uint32_t,
+ uint32_t)>;
+
// Factory of platform dependent VaapiPictures.
class MEDIA_GPU_EXPORT VaapiPictureFactory {
public:
@@ -44,6 +53,10 @@ class MEDIA_GPU_EXPORT VaapiPictureFactory {
// implementation.
VaapiImplementation GetVaapiImplementation(gl::GLImplementation gl_impl);
+ // Determines whether the DownloadFromSurface() method of the VaapiPictures
+ // created by this factory requires a processing pipeline VaapiWrapper.
+ bool NeedsProcessingPipelineForDownloading() const;
+
// Gets the texture target used to bind EGLImages (either GL_TEXTURE_2D on X11
// or GL_TEXTURE_EXTERNAL_OES on DRM).
uint32_t GetGLTextureTarget();
@@ -52,17 +65,6 @@ class MEDIA_GPU_EXPORT VaapiPictureFactory {
// the format decoded frames in VASurfaces are converted into.
gfx::BufferFormat GetBufferFormat();
-#if defined(USE_OZONE)
- std::unique_ptr<VaapiPicture> CreateVaapiPictureNativeForOzone(
- scoped_refptr<VaapiWrapper> vaapi_wrapper,
- const MakeGLContextCurrentCallback& make_context_current_cb,
- const BindGLImageCallback& bind_image_cb,
- const PictureBuffer& picture_buffer,
- const gfx::Size& visible_size,
- uint32_t client_texture_id,
- uint32_t service_texture_id);
-#endif
-
std::unique_ptr<VaapiPicture> CreateVaapiPictureNative(
scoped_refptr<VaapiWrapper> vaapi_wrapper,
const MakeGLContextCurrentCallback& make_context_current_cb,
@@ -76,6 +78,11 @@ class MEDIA_GPU_EXPORT VaapiPictureFactory {
vaapi_impl_pairs_;
private:
+ void DeterminePictureCreationAndDownloadingMechanism();
+
+ CreatePictureCB create_picture_cb_;
+ bool needs_vpp_for_downloading_ = false;
+
DISALLOW_COPY_AND_ASSIGN(VaapiPictureFactory);
};
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
index af038950392..2f070bf61ed 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.cc
@@ -17,10 +17,10 @@ namespace media {
namespace {
-inline ::Pixmap CreatePixmap(const gfx::Size& size) {
+x11::Pixmap CreatePixmap(const gfx::Size& size) {
auto* connection = x11::Connection::Get();
if (!connection->Ready())
- return base::strict_cast<::Pixmap>(x11::Pixmap::None);
+ return x11::Pixmap::None;
auto root = connection->default_root();
@@ -28,20 +28,20 @@ inline ::Pixmap CreatePixmap(const gfx::Size& size) {
if (auto reply = connection->GetGeometry({root}).Sync())
depth = reply->depth;
else
- return base::strict_cast<::Pixmap>(x11::Pixmap::None);
+ return x11::Pixmap::None;
// TODO(tmathmeyer) should we use the depth from libva instead of root window?
auto pixmap = connection->GenerateId<x11::Pixmap>();
uint16_t pixmap_width, pixmap_height;
if (!base::CheckedNumeric<int>(size.width()).AssignIfValid(&pixmap_width) ||
!base::CheckedNumeric<int>(size.height()).AssignIfValid(&pixmap_height)) {
- return base::strict_cast<::Pixmap>(x11::Pixmap::None);
+ return x11::Pixmap::None;
}
auto req = connection->CreatePixmap(
{depth, pixmap, root, pixmap_width, pixmap_height});
if (req.Sync().error)
pixmap = x11::Pixmap::None;
- return base::strict_cast<::Pixmap>(pixmap);
+ return pixmap;
}
} // namespace
@@ -79,8 +79,8 @@ VaapiPictureNativePixmapAngle::~VaapiPictureNativePixmapAngle() {
DCHECK_EQ(glGetError(), static_cast<GLenum>(GL_NO_ERROR));
}
- if (x_pixmap_)
- x11::Connection::Get()->FreePixmap({static_cast<x11::Pixmap>(x_pixmap_)});
+ if (x_pixmap_ != x11::Pixmap::None)
+ x11::Connection::Get()->FreePixmap({x_pixmap_});
}
Status VaapiPictureNativePixmapAngle::Allocate(gfx::BufferFormat format) {
@@ -98,7 +98,7 @@ Status VaapiPictureNativePixmapAngle::Allocate(gfx::BufferFormat format) {
return StatusCode::kVaapiNoImage;
x_pixmap_ = CreatePixmap(visible_size_);
- if (!x_pixmap_)
+ if (x_pixmap_ == x11::Pixmap::None)
return StatusCode::kVaapiNoPixmap;
if (!image->Initialize(x_pixmap_))
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h
index 41f52376dc9..f29068773e5 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_native_pixmap_angle.h
@@ -13,6 +13,7 @@
#include "media/gpu/vaapi/vaapi_picture_native_pixmap.h"
#include "ui/gfx/buffer_types.h"
#include "ui/gfx/geometry/size.h"
+#include "ui/gfx/x/xproto.h"
#include "ui/gl/gl_bindings.h"
namespace media {
@@ -46,7 +47,7 @@ class VaapiPictureNativePixmapAngle : public VaapiPictureNativePixmap {
VASurfaceID va_surface_id() const override;
private:
- ::Pixmap x_pixmap_ = 0;
+ x11::Pixmap x_pixmap_ = x11::Pixmap::None;
DISALLOW_COPY_AND_ASSIGN(VaapiPictureNativePixmapAngle);
};
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
index a571b4b2b41..7b604f19f33 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
+++ b/chromium/media/gpu/vaapi/vaapi_picture_tfp.cc
@@ -8,7 +8,6 @@
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "ui/base/ui_base_features.h"
#include "ui/gfx/x/connection.h"
-#include "ui/gfx/x/x11_types.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_image_glx.h"
#include "ui/gl/scoped_binders.h"
@@ -35,7 +34,7 @@ VaapiTFPPicture::VaapiTFPPicture(
client_texture_id,
texture_target),
connection_(x11::Connection::Get()),
- x_pixmap_(0) {
+ x_pixmap_(x11::Pixmap::None) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!features::IsUsingOzonePlatform());
DCHECK(texture_id);
@@ -49,13 +48,13 @@ VaapiTFPPicture::~VaapiTFPPicture() {
DCHECK_EQ(glGetError(), static_cast<GLenum>(GL_NO_ERROR));
}
- if (x_pixmap_)
- connection_->FreePixmap({static_cast<x11::Pixmap>(x_pixmap_)});
+ if (x_pixmap_ != x11::Pixmap::None)
+ connection_->FreePixmap({x_pixmap_});
}
Status VaapiTFPPicture::Initialize() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
- DCHECK(x_pixmap_);
+ DCHECK_NE(x_pixmap_, x11::Pixmap::None);
if (make_context_current_cb_ && !make_context_current_cb_.Run())
return StatusCode::kVaapiBadContext;
@@ -111,7 +110,7 @@ Status VaapiTFPPicture::Allocate(gfx::BufferFormat format) {
DLOG(ERROR) << "Failed creating an X Pixmap for TFP";
return StatusCode::kVaapiNoPixmap;
} else {
- x_pixmap_ = base::strict_cast<::Pixmap>(pixmap);
+ x_pixmap_ = pixmap;
}
return Initialize();
diff --git a/chromium/media/gpu/vaapi/vaapi_picture_tfp.h b/chromium/media/gpu/vaapi/vaapi_picture_tfp.h
index 8da323717e6..53d6c53fd42 100644
--- a/chromium/media/gpu/vaapi/vaapi_picture_tfp.h
+++ b/chromium/media/gpu/vaapi/vaapi_picture_tfp.h
@@ -50,7 +50,7 @@ class VaapiTFPPicture : public VaapiPicture {
x11::Connection* const connection_;
- ::Pixmap x_pixmap_;
+ x11::Pixmap x_pixmap_;
scoped_refptr<gl::GLImageGLX> glx_image_;
DISALLOW_COPY_AND_ASSIGN(VaapiTFPPicture);
diff --git a/chromium/media/gpu/vaapi/vaapi_unittest.cc b/chromium/media/gpu/vaapi/vaapi_unittest.cc
index f1a2a6887b6..80ef0a94fbc 100644
--- a/chromium/media/gpu/vaapi/vaapi_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_unittest.cc
@@ -22,8 +22,10 @@
#include "base/strings/string_split.h"
#include "base/test/launcher/unit_test_launcher.h"
#include "base/test/test_suite.h"
+#include "build/chromeos_buildflags.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "media/gpu/vaapi/vaapi_wrapper.h"
+#include "media/media_buildflags.h"
namespace media {
namespace {
@@ -31,13 +33,22 @@ namespace {
base::Optional<VAProfile> ConvertToVAProfile(VideoCodecProfile profile) {
// A map between VideoCodecProfile and VAProfile.
const std::map<VideoCodecProfile, VAProfile> kProfileMap = {
- // VAProfileH264Baseline is deprecated in <va/va.h> from libva 2.0.0.
- {H264PROFILE_BASELINE, VAProfileH264ConstrainedBaseline},
- {H264PROFILE_MAIN, VAProfileH264Main},
- {H264PROFILE_HIGH, VAProfileH264High},
- {VP8PROFILE_ANY, VAProfileVP8Version0_3},
- {VP9PROFILE_PROFILE0, VAProfileVP9Profile0},
- {VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
+ // VAProfileH264Baseline is deprecated in <va/va.h> from libva 2.0.0.
+ {H264PROFILE_BASELINE, VAProfileH264ConstrainedBaseline},
+ {H264PROFILE_MAIN, VAProfileH264Main},
+ {H264PROFILE_HIGH, VAProfileH264High},
+ {VP8PROFILE_ANY, VAProfileVP8Version0_3},
+ {VP9PROFILE_PROFILE0, VAProfileVP9Profile0},
+ {VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
+#if BUILDFLAG(IS_ASH)
+ // TODO(hiroh): Remove if-macro once libva for linux-chrome is upreved to
+ // 2.9.0 or newer.
+ // https://source.chromium.org/chromium/chromium/src/+/master:build/linux/sysroot_scripts/generated_package_lists/sid.amd64
+ {AV1PROFILE_PROFILE_MAIN, VAProfileAV1Profile0},
+#endif
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+ {HEVCPROFILE_MAIN, VAProfileHEVCMain},
+#endif
};
auto it = kProfileMap.find(profile);
return it != kProfileMap.end() ? base::make_optional<VAProfile>(it->second)
@@ -47,17 +58,26 @@ base::Optional<VAProfile> ConvertToVAProfile(VideoCodecProfile profile) {
// Converts the given string to VAProfile
base::Optional<VAProfile> StringToVAProfile(const std::string& va_profile) {
const std::map<std::string, VAProfile> kStringToVAProfile = {
- {"VAProfileNone", VAProfileNone},
- {"VAProfileH264ConstrainedBaseline", VAProfileH264ConstrainedBaseline},
- // Even though it's deprecated, we leave VAProfileH264Baseline's
- // translation here to assert we never encounter it.
- {"VAProfileH264Baseline", VAProfileH264Baseline},
- {"VAProfileH264Main", VAProfileH264Main},
- {"VAProfileH264High", VAProfileH264High},
- {"VAProfileJPEGBaseline", VAProfileJPEGBaseline},
- {"VAProfileVP8Version0_3", VAProfileVP8Version0_3},
- {"VAProfileVP9Profile0", VAProfileVP9Profile0},
- {"VAProfileVP9Profile2", VAProfileVP9Profile2},
+ {"VAProfileNone", VAProfileNone},
+ {"VAProfileH264ConstrainedBaseline", VAProfileH264ConstrainedBaseline},
+ // Even though it's deprecated, we leave VAProfileH264Baseline's
+ // translation here to assert we never encounter it.
+ {"VAProfileH264Baseline", VAProfileH264Baseline},
+ {"VAProfileH264Main", VAProfileH264Main},
+ {"VAProfileH264High", VAProfileH264High},
+ {"VAProfileJPEGBaseline", VAProfileJPEGBaseline},
+ {"VAProfileVP8Version0_3", VAProfileVP8Version0_3},
+ {"VAProfileVP9Profile0", VAProfileVP9Profile0},
+ {"VAProfileVP9Profile2", VAProfileVP9Profile2},
+#if BUILDFLAG(IS_ASH)
+ // TODO(hiroh): Remove if-macro once libva for linux-chrome is upreved to
+ // 2.9.0 or newer.
+ // https://source.chromium.org/chromium/chromium/src/+/master:build/linux/sysroot_scripts/generated_package_lists/sid.amd64
+ {"VAProfileAV1Profile0", VAProfileAV1Profile0},
+#endif
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+ {"VAProfileHEVCMain", VAProfileHEVCMain},
+#endif
};
auto it = kStringToVAProfile.find(va_profile);
diff --git a/chromium/media/gpu/vaapi/vaapi_utils.h b/chromium/media/gpu/vaapi/vaapi_utils.h
index 0db323e96da..7420256ef2c 100644
--- a/chromium/media/gpu/vaapi/vaapi_utils.h
+++ b/chromium/media/gpu/vaapi/vaapi_utils.h
@@ -7,8 +7,8 @@
#include <va/va.h>
-#include "base/bind_helpers.h"
#include "base/callback_forward.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/thread_annotations.h"
#include "ui/gfx/geometry/size.h"
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
index 3772ad5859e..49950ea388a 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator.cc
@@ -10,7 +10,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/cpu.h"
#include "base/files/scoped_file.h"
#include "base/json/json_writer.h"
@@ -205,8 +205,8 @@ bool VaapiVideoDecodeAccelerator::Initialize(const Config& config,
vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
VaapiWrapper::kDecode, profile,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiVideoDecodeAccelerator.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiVideoDecodeAccelerator.VAAPIError"));
UMA_HISTOGRAM_BOOLEAN("Media.VAVDA.VaapiWrapperCreationSuccess",
vaapi_wrapper_.get());
@@ -349,7 +349,7 @@ void VaapiVideoDecodeAccelerator::QueueInputBuffer(
auto input_buffer = std::make_unique<InputBuffer>(
bitstream_id, std::move(buffer),
BindToCurrentLoop(
- base::Bind(&Client::NotifyEndOfBitstreamBuffer, client_)));
+ base::BindOnce(&Client::NotifyEndOfBitstreamBuffer, client_)));
input_buffers_.push(std::move(input_buffer));
}
@@ -615,8 +615,8 @@ void VaapiVideoDecodeAccelerator::TryFinishSurfaceSetChange() {
profile_ = new_profile;
auto new_vaapi_wrapper = VaapiWrapper::CreateForVideoCodec(
VaapiWrapper::kDecode, profile_,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiVideoDecodeAccelerator.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiVideoDecodeAccelerator.VAAPIError"));
RETURN_AND_NOTIFY_ON_FAILURE(new_vaapi_wrapper.get(),
"Failed creating VaapiWrapper",
INVALID_ARGUMENT, );
@@ -699,19 +699,19 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
std::vector<VASurfaceID> va_surface_ids;
scoped_refptr<VaapiWrapper> vaapi_wrapper_for_picture = vaapi_wrapper_;
- // The X11/ANGLE implementation can use |vaapi_wrapper_| to copy from an
- // internal libva buffer into an X Pixmap without having to use a processing
- // wrapper.
-#if !defined(USE_X11)
- // If we aren't in BufferAllocationMode::kNone, we have to allocate a
- // |vpp_vaapi_wrapper_| for VaapiPicture to DownloadFromSurface() the VA's
- // internal decoded frame.
- if (buffer_allocation_mode_ != BufferAllocationMode::kNone) {
+ const bool requires_vpp =
+ vaapi_picture_factory_->NeedsProcessingPipelineForDownloading();
+ // If we aren't in BufferAllocationMode::kNone mode and the VaapiPicture
+ // implementation we get from |vaapi_picture_factory_| requires the video
+ // processing pipeline for downloading the decoded frame from the internal
+ // surface, we need to create a |vpp_vaapi_wrapper_|.
+ if (requires_vpp && buffer_allocation_mode_ != BufferAllocationMode::kNone) {
if (!vpp_vaapi_wrapper_) {
vpp_vaapi_wrapper_ = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiVideoDecodeAccelerator.Vpp.VAAPIError"));
+ base::BindRepeating(
+ &ReportVaapiErrorToUMA,
+ "Media.VaapiVideoDecodeAccelerator.Vpp.VAAPIError"));
RETURN_AND_NOTIFY_ON_FAILURE(vpp_vaapi_wrapper_,
"Failed to initialize VppVaapiWrapper",
PLATFORM_FAILURE, );
@@ -723,8 +723,6 @@ void VaapiVideoDecodeAccelerator::AssignPictureBuffers(
vaapi_wrapper_for_picture = vpp_vaapi_wrapper_;
}
-#endif // !defined(USE_X11)
-
for (size_t i = 0; i < buffers.size(); ++i) {
// TODO(b/139460315): Create with buffers[i] once the AMD driver issue is
// resolved.
@@ -1195,9 +1193,12 @@ VaapiVideoDecodeAccelerator::GetSupportedProfiles(
const gpu::GpuDriverBugWorkarounds& workarounds) {
VideoDecodeAccelerator::SupportedProfiles profiles =
VaapiWrapper::GetSupportedDecodeProfiles(workarounds);
- // VaVDA never supported VP9 Profile 2, but VaapiWrapper does. Filter it out.
+ // VaVDA never supported VP9 Profile 2 and AV1, but VaapiWrapper does. Filter
+ // them out.
base::EraseIf(profiles, [](const auto& profile) {
- return profile.profile == VP9PROFILE_PROFILE2;
+ return profile.profile == VP9PROFILE_PROFILE2 ||
+ VideoCodecProfileToVideoCodec(profile.profile) ==
+ VideoCodec::kCodecAV1;
});
return profiles;
}
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
index 447185c120b..0b3517cdb27 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decode_accelerator_unittest.cc
@@ -150,11 +150,12 @@ class VaapiVideoDecodeAcceleratorTest : public TestWithParam<TestParams>,
public VideoDecodeAccelerator::Client {
public:
VaapiVideoDecodeAcceleratorTest()
- : vda_(base::Bind([] { return true; }),
- base::Bind([](uint32_t client_texture_id,
- uint32_t texture_target,
- const scoped_refptr<gl::GLImage>& image,
- bool can_bind_to_sampler) { return true; })),
+ : vda_(
+ base::BindRepeating([] { return true; }),
+ base::BindRepeating([](uint32_t client_texture_id,
+ uint32_t texture_target,
+ const scoped_refptr<gl::GLImage>& image,
+ bool can_bind_to_sampler) { return true; })),
decoder_thread_("VaapiVideoDecodeAcceleratorTestThread"),
mock_decoder_(new ::testing::StrictMock<MockAcceleratedVideoDecoder>),
mock_vaapi_picture_factory_(new MockVaapiPictureFactory()),
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
index f4d735b0dbd..9672aaffed5 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.cc
@@ -8,7 +8,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/trace_event/trace_event.h"
@@ -26,6 +26,11 @@
#include "media/gpu/vaapi/vaapi_wrapper.h"
#include "media/gpu/vaapi/vp8_vaapi_video_decoder_delegate.h"
#include "media/gpu/vaapi/vp9_vaapi_video_decoder_delegate.h"
+#include "media/media_buildflags.h"
+
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+#include "media/gpu/vaapi/h265_vaapi_video_decoder_delegate.h"
+#endif
namespace media {
@@ -76,7 +81,12 @@ std::unique_ptr<DecoderInterface> VaapiVideoDecoder::Create(
SupportedVideoDecoderConfigs VaapiVideoDecoder::GetSupportedConfigs(
const gpu::GpuDriverBugWorkarounds& workarounds) {
return ConvertFromSupportedProfiles(
- VaapiWrapper::GetSupportedDecodeProfiles(workarounds), false);
+ VaapiWrapper::GetSupportedDecodeProfiles(workarounds),
+#if BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
+ true /* allow_encrypted */);
+#else
+ false /* allow_encrypted */);
+#endif
}
VaapiVideoDecoder::VaapiVideoDecoder(
@@ -121,6 +131,7 @@ VaapiVideoDecoder::~VaapiVideoDecoder() {
}
void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
+ CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb) {
DVLOGF(2) << config.AsHumanReadableString();
@@ -136,6 +147,12 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
return;
}
+ if (cdm_context || config.is_encrypted()) {
+ VLOGF(1) << "Vaapi decoder does not support encrypted stream";
+ std::move(init_cb).Run(StatusCode::kEncryptedContentUnsupported);
+ return;
+ }
+
// We expect the decoder to have released all output buffers (by the client
// triggering a flush or reset), even if the
// DecoderInterface API doesn't explicitly specify this.
@@ -164,7 +181,8 @@ void VaapiVideoDecoder::Initialize(const VideoDecoderConfig& config,
const VideoCodecProfile profile = config.profile();
vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
VaapiWrapper::kDecode, profile,
- base::Bind(&ReportVaapiErrorToUMA, "Media.VaapiVideoDecoder.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiVideoDecoder.VAAPIError"));
UMA_HISTOGRAM_BOOLEAN("Media.VaapiVideoDecoder.VaapiWrapperCreationSuccess",
vaapi_wrapper_.get());
if (!vaapi_wrapper_.get()) {
@@ -454,9 +472,11 @@ void VaapiVideoDecoder::ApplyResolutionChange() {
CHECK(format);
auto format_fourcc = Fourcc::FromVideoPixelFormat(*format);
CHECK(format_fourcc);
- if (!frame_pool_->Initialize(*format_fourcc, pic_size, visible_rect,
- natural_size,
- decoder_->GetRequiredNumOfPictures())) {
+ // TODO(jkardatzke): Pass true for the last argument when we are in protected
+ // mode.
+ if (!frame_pool_->Initialize(
+ *format_fourcc, pic_size, visible_rect, natural_size,
+ decoder_->GetRequiredNumOfPictures(), /*use_protected=*/false)) {
DLOG(WARNING) << "Failed Initialize()ing the frame pool.";
SetState(State::kError);
return;
@@ -477,8 +497,8 @@ void VaapiVideoDecoder::ApplyResolutionChange() {
profile_ = decoder_->GetProfile();
auto new_vaapi_wrapper = VaapiWrapper::CreateForVideoCodec(
VaapiWrapper::kDecode, profile_,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiVideoDecoder.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiVideoDecoder.VAAPIError"));
if (!new_vaapi_wrapper.get()) {
DLOG(WARNING) << "Failed creating VaapiWrapper";
SetState(State::kError);
@@ -623,6 +643,15 @@ Status VaapiVideoDecoder::CreateAcceleratedVideoDecoder() {
decoder_.reset(
new VP9Decoder(std::move(accelerator), profile_, color_space_));
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+ } else if (profile_ >= HEVCPROFILE_MIN && profile_ <= HEVCPROFILE_MAX) {
+ auto accelerator =
+ std::make_unique<H265VaapiVideoDecoderDelegate>(this, vaapi_wrapper_);
+ decoder_delegate_ = accelerator.get();
+
+ decoder_.reset(
+ new H265Decoder(std::move(accelerator), profile_, color_space_));
+#endif // BUILDFLAG(ENABLE_PLATFORM_HEVC)
} else {
return Status(StatusCode::kDecoderUnsupportedProfile)
.WithData("profile", profile_);
diff --git a/chromium/media/gpu/vaapi/vaapi_video_decoder.h b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
index 6e71913711c..65b70882fab 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_decoder.h
+++ b/chromium/media/gpu/vaapi/vaapi_video_decoder.h
@@ -22,6 +22,7 @@
#include "base/optional.h"
#include "base/sequence_checker.h"
#include "base/time/time.h"
+#include "media/base/cdm_context.h"
#include "media/base/status.h"
#include "media/base/video_codecs.h"
#include "media/base/video_frame_layout.h"
@@ -57,6 +58,7 @@ class VaapiVideoDecoder : public DecoderInterface,
// DecoderInterface implementation.
void Initialize(const VideoDecoderConfig& config,
+ CdmContext* cdm_context,
InitCB init_cb,
const OutputCB& output_cb) override;
void Decode(scoped_refptr<DecoderBuffer> buffer, DecodeCB decode_cb) override;
diff --git a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
index 9388b57a6ff..6dee785a05c 100644
--- a/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
+++ b/chromium/media/gpu/vaapi/vaapi_video_encode_accelerator.cc
@@ -15,7 +15,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/bits.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
@@ -271,7 +270,7 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
// VaapiVEA supports temporal layers for VP9 only, but we also allow VP8 to
// support VP8 simulcast.
if (config.HasSpatialLayer()) {
- VLOGF(1) << "Spatial layer encoding is supported";
+ VLOGF(1) << "Spatial layer encoding is not yet supported";
return false;
}
@@ -340,8 +339,8 @@ bool VaapiVideoEncodeAccelerator::Initialize(const Config& config,
: VaapiWrapper::kEncode;
vaapi_wrapper_ = VaapiWrapper::CreateForVideoCodec(
mode, config.output_profile,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiVideoEncodeAccelerator.VAAPIError"));
+ base::BindRepeating(&ReportVaapiErrorToUMA,
+ "Media.VaapiVideoEncodeAccelerator.VAAPIError"));
if (!vaapi_wrapper_) {
VLOGF(1) << "Failed initializing VAAPI for profile "
<< GetProfileName(config.output_profile);
@@ -732,8 +731,9 @@ std::unique_ptr<VaapiEncodeJob> VaapiVideoEncodeAccelerator::CreateEncodeJob(
if (!vpp_vaapi_wrapper_) {
vpp_vaapi_wrapper_ = VaapiWrapper::Create(
VaapiWrapper::kVideoProcess, VAProfileNone,
- base::Bind(&ReportVaapiErrorToUMA,
- "Media.VaapiVideoEncodeAccelerator.Vpp.VAAPIError"));
+ base::BindRepeating(
+ &ReportVaapiErrorToUMA,
+ "Media.VaapiVideoEncodeAccelerator.Vpp.VAAPIError"));
if (!vpp_vaapi_wrapper_) {
NOTIFY_ERROR(kPlatformFailureError,
"Failed to initialize VppVaapiWrapper");
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.cc b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
index 2751b144624..d31fae09b17 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.cc
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.cc
@@ -19,7 +19,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/bits.h"
#include "base/callback_helpers.h"
#include "base/cpu.h"
@@ -37,11 +36,14 @@
#include "base/system/sys_info.h"
#include "base/trace_event/trace_event.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "ui/base/ui_base_features.h"
#include "media/base/media_switches.h"
#include "media/base/video_frame.h"
#include "media/base/video_types.h"
+#include "media/gpu/macros.h"
+#include "media/media_buildflags.h"
// Auto-generated for dlopen libva libraries
#include "media/gpu/vaapi/va_stubs.h"
@@ -58,13 +60,13 @@
#include "ui/gl/gl_implementation.h"
#if defined(USE_X11)
-#include "ui/gfx/x/x11_types.h" // nogncheck
-
typedef XID Drawable;
extern "C" {
#include "media/gpu/vaapi/va_x11.sigs"
}
+
+#include "ui/gfx/x/connection.h" // nogncheck
#endif
#if defined(USE_OZONE)
@@ -342,17 +344,28 @@ const ProfileCodecMap& GetProfileCodecMap() {
static const base::NoDestructor<ProfileCodecMap> kMediaToVAProfileMap({
// VAProfileH264Baseline is deprecated in <va/va.h> since libva 2.0.0.
{H264PROFILE_BASELINE, VAProfileH264ConstrainedBaseline},
- {H264PROFILE_MAIN, VAProfileH264Main},
- // TODO(posciak): See if we can/want to support other variants of
- // H264PROFILE_HIGH*.
- {H264PROFILE_HIGH, VAProfileH264High},
- {VP8PROFILE_ANY, VAProfileVP8Version0_3},
- {VP9PROFILE_PROFILE0, VAProfileVP9Profile0},
- // VaapiWrapper does not support VP9 Profile 1, see b/153680337.
- // {VP9PROFILE_PROFILE1, VAProfileVP9Profile1},
- {VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
+ {H264PROFILE_MAIN, VAProfileH264Main},
+ // TODO(posciak): See if we can/want to support other variants of
+ // H264PROFILE_HIGH*.
+ {H264PROFILE_HIGH, VAProfileH264High},
+ {VP8PROFILE_ANY, VAProfileVP8Version0_3},
+ {VP9PROFILE_PROFILE0, VAProfileVP9Profile0},
+ // VaapiWrapper does not support VP9 Profile 1, see b/153680337.
+ // {VP9PROFILE_PROFILE1, VAProfileVP9Profile1},
+ {VP9PROFILE_PROFILE2, VAProfileVP9Profile2},
// VaapiWrapper does not support Profile 3.
//{VP9PROFILE_PROFILE3, VAProfileVP9Profile3},
+#if BUILDFLAG(IS_ASH)
+ // TODO(hiroh): Remove if-macro once libva for linux-chrome is upreved
+ // to 2.9.0 or newer.
+ // https://source.chromium.org/chromium/chromium/src/+/master:build/linux/sysroot_scripts/generated_package_lists/sid.amd64
+ {AV1PROFILE_PROFILE_MAIN, VAProfileAV1Profile0},
+#endif // BUILDFLAG(IS_ASH)
+ // VaapiWrapper does not support AV1 Profile 1.
+ // {AV1PROFILE_PROFILE_HIGH, VAProfileAV1Profile1},
+#if BUILDFLAG(ENABLE_PLATFORM_HEVC)
+ {HEVCPROFILE_MAIN, VAProfileHEVCMain},
+#endif
});
return *kMediaToVAProfileMap;
}
@@ -378,8 +391,15 @@ bool IsVAProfileSupported(VAProfile va_profile) {
}
bool IsBlockedDriver(VaapiWrapper::CodecMode mode, VAProfile va_profile) {
- if (!IsModeEncoding(mode))
+ if (!IsModeEncoding(mode)) {
+#if BUILDFLAG(IS_ASH)
+ if (va_profile == VAProfileAV1Profile0 &&
+ !base::FeatureList::IsEnabled(kVaapiAV1Decoder)) {
+ return true;
+ }
+#endif // BUILDFLAG(IS_ASH)
return false;
+ }
// TODO(posciak): Remove once VP8 encoding is to be enabled by default.
if (va_profile == VAProfileVP8Version0_3 &&
@@ -495,7 +515,7 @@ bool VADisplayState::InitializeVaDisplay_Locked() {
case gl::kGLImplementationDesktopGL:
#if defined(USE_X11)
if (!features::IsUsingOzonePlatform()) {
- va_display_ = vaGetDisplay(gfx::GetXDisplay());
+ va_display_ = vaGetDisplay(x11::Connection::Get()->GetXlibDisplay());
if (!vaDisplayIsValid(va_display_))
va_display_ = vaGetDisplayDRM(drm_fd_.get());
}
@@ -504,14 +524,14 @@ bool VADisplayState::InitializeVaDisplay_Locked() {
case gl::kGLImplementationEGLANGLE:
#if defined(USE_X11)
if (!features::IsUsingOzonePlatform())
- va_display_ = vaGetDisplay(gfx::GetXDisplay());
+ va_display_ = vaGetDisplay(x11::Connection::Get()->GetXlibDisplay());
#endif // USE_X11
break;
// Cannot infer platform from GL, try all available displays
case gl::kGLImplementationNone:
#if defined(USE_X11)
if (!features::IsUsingOzonePlatform()) {
- va_display_ = vaGetDisplay(gfx::GetXDisplay());
+ va_display_ = vaGetDisplay(x11::Connection::Get()->GetXlibDisplay());
if (vaDisplayIsValid(va_display_))
break;
}
@@ -538,7 +558,7 @@ bool VADisplayState::InitializeVaDriver_Locked() {
int major_version, minor_version;
VAStatus va_res = vaInitialize(va_display_, &major_version, &minor_version);
if (va_res != VA_STATUS_SUCCESS) {
- LOG(ERROR) << "vaInitialize failed: " << vaErrorStr(va_res);
+ VLOGF(1) << "vaInitialize failed: " << vaErrorStr(va_res);
return false;
}
const std::string va_vendor_string = vaQueryVendorString(va_display_);
@@ -558,9 +578,9 @@ bool VADisplayState::InitializeVaDriver_Locked() {
// guaranteed to be backward (and not forward) compatible.
if (VA_MAJOR_VERSION > major_version ||
(VA_MAJOR_VERSION == major_version && VA_MINOR_VERSION > minor_version)) {
- LOG(ERROR) << "The system version " << major_version << "." << minor_version
- << " should be greater than or equal to "
- << VA_MAJOR_VERSION << "." << VA_MINOR_VERSION;
+ VLOGF(1) << "The system version " << major_version << "." << minor_version
+ << " should be greater than or equal to " << VA_MAJOR_VERSION
+ << "." << VA_MINOR_VERSION;
return false;
}
return true;
@@ -1886,7 +1906,7 @@ bool VaapiWrapper::MapAndCopyAndExecute(
#if defined(USE_X11)
bool VaapiWrapper::PutSurfaceIntoPixmap(VASurfaceID va_surface_id,
- Pixmap x_pixmap,
+ x11::Pixmap x_pixmap,
gfx::Size dest_size) {
DCHECK(!features::IsUsingOzonePlatform());
base::AutoLock auto_lock(*va_lock_);
@@ -1895,12 +1915,10 @@ bool VaapiWrapper::PutSurfaceIntoPixmap(VASurfaceID va_surface_id,
VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVASyncSurface, false);
// Put the data into an X Pixmap.
- va_res = vaPutSurface(va_display_,
- va_surface_id,
- x_pixmap,
- 0, 0, dest_size.width(), dest_size.height(),
- 0, 0, dest_size.width(), dest_size.height(),
- NULL, 0, 0);
+ va_res =
+ vaPutSurface(va_display_, va_surface_id, static_cast<uint32_t>(x_pixmap),
+ 0, 0, dest_size.width(), dest_size.height(), 0, 0,
+ dest_size.width(), dest_size.height(), nullptr, 0, 0);
VA_SUCCESS_OR_RETURN(va_res, VaapiFunctions::kVAPutSurface, false);
return true;
}
@@ -1954,7 +1972,7 @@ bool VaapiWrapper::UploadVideoFrameToSurface(const VideoFrame& frame,
needs_va_put_image = true;
}
base::ScopedClosureRunner vaimage_deleter(
- base::Bind(&DestroyVAImage, va_display_, image));
+ base::BindOnce(&DestroyVAImage, va_display_, image));
if (image.format.fourcc != VA_FOURCC_NV12) {
LOG(ERROR) << "Unsupported image format: " << image.format.fourcc;
@@ -2256,7 +2274,7 @@ void VaapiWrapper::PreSandboxInitialization() {
static bool result = InitializeStubs(paths);
if (!result) {
static const char kErrorMsg[] = "Failed to initialize VAAPI libs";
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
// When Chrome runs on Linux with target_os="chromeos", do not log error
// message without VAAPI libraries.
LOG_IF(ERROR, base::SysInfo::IsRunningOnChromeOS()) << kErrorMsg;
diff --git a/chromium/media/gpu/vaapi/vaapi_wrapper.h b/chromium/media/gpu/vaapi/vaapi_wrapper.h
index 1871410e8ee..fd1fd823ee1 100644
--- a/chromium/media/gpu/vaapi/vaapi_wrapper.h
+++ b/chromium/media/gpu/vaapi/vaapi_wrapper.h
@@ -35,7 +35,7 @@
#include "ui/gfx/geometry/size.h"
#if defined(USE_X11)
-#include "ui/gfx/x/x11.h"
+#include "ui/gfx/x/xproto.h" // nogncheck
#endif // USE_X11
namespace gfx {
@@ -352,7 +352,7 @@ class MEDIA_GPU_EXPORT VaapiWrapper
// Put data from |va_surface_id| into |x_pixmap| of size
// |dest_size|, converting/scaling to it.
bool PutSurfaceIntoPixmap(VASurfaceID va_surface_id,
- Pixmap x_pixmap,
+ x11::Pixmap x_pixmap,
gfx::Size dest_size) WARN_UNUSED_RESULT;
#endif // USE_X11
diff --git a/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc b/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc
index 1a5fe128362..689506089bf 100644
--- a/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc
+++ b/chromium/media/gpu/vaapi/vp9_encoder_unittest.cc
@@ -8,8 +8,8 @@
#include <numeric>
#include <tuple>
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/numerics/safe_conversions.h"
#include "base/optional.h"
diff --git a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
index 9912778a8c8..1282d2cfe8e 100644
--- a/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_perf_tests.cc
@@ -49,10 +49,10 @@ constexpr const char* help_msg =
" will be stored in the current working directory.\n"
" --use_vd use the new VD-based video decoders, instead of\n"
" the default VDA-based video decoders.\n"
- " --use_vd_vda use the new VD-based video decoders with a wrapper"
- " that translates to the VDA interface, used to test"
- " interaction with older components expecting the VDA"
- " interface.\n"
+ " --use_vd_vda use the new VD-based video decoders with a\n"
+ " wrapper that translates to the VDA interface,\n"
+ " used to test interaction with older components\n"
+ " expecting the VDA interface.\n"
" --gtest_help display the gtest help and exit.\n"
" --help display this help and exit.\n";
diff --git a/chromium/media/gpu/video_decode_accelerator_tests.cc b/chromium/media/gpu/video_decode_accelerator_tests.cc
index 78c09f6fd00..b9e75cd2506 100644
--- a/chromium/media/gpu/video_decode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_decode_accelerator_tests.cc
@@ -47,11 +47,10 @@ constexpr const char* help_msg =
" e.g. --vmodule=*media/gpu*=2.\n\n"
" --disable_validator disable frame validation.\n"
" --use_vd use the new VD-based video decoders, instead of\n"
- " the default VDA-based video decoders.\n\n"
- " --use_vd_vda use the new VD-based video decoders with a wrapper"
- " that translates to the VDA interface, used to test"
- " interaction with older components expecting the VDA"
- " interface.\n"
+ " the default VDA-based video decoders.\n"
+ " --use_vd_vda use the new VD-based video decoders with a\n"
+ " wrapper that translates to the VDA interface,\n"
+ " used to test interaction with older components\n"
" --output_frames write the selected video frames to disk, possible\n"
" values are \"all|corrupt\".\n"
" --output_format set the format of frames saved to disk, supported\n"
diff --git a/chromium/media/gpu/video_encode_accelerator_tests.cc b/chromium/media/gpu/video_encode_accelerator_tests.cc
index 56c0ec70a6a..4219a9b3685 100644
--- a/chromium/media/gpu/video_encode_accelerator_tests.cc
+++ b/chromium/media/gpu/video_encode_accelerator_tests.cc
@@ -12,6 +12,7 @@
#include "media/base/media_util.h"
#include "media/base/test_data_util.h"
#include "media/base/video_bitrate_allocation.h"
+#include "media/base/video_codecs.h"
#include "media/base/video_decoder_config.h"
#include "media/gpu/buildflags.h"
#include "media/gpu/test/video.h"
@@ -88,6 +89,10 @@ constexpr base::FilePath::CharType kDefaultTestVideoPath[] =
constexpr size_t kNumFramesToEncodeForBitrateCheck = 300;
// Tolerance factor for how encoded bitrate can differ from requested bitrate.
constexpr double kBitrateTolerance = 0.1;
+// The event timeout used in bitrate check tests because encoding 2160p and
+// validating |kNumFramesToEncodeBitrateCheck| frames take much time.
+constexpr base::TimeDelta kBitrateCheckEventTimeout =
+ base::TimeDelta::FromSeconds(180);
media::test::VideoEncoderTestEnvironment* g_env;
@@ -297,6 +302,9 @@ TEST_F(VideoEncoderTest, BitrateCheck) {
auto config = GetDefaultConfig();
config.num_frames_to_encode = kNumFramesToEncodeForBitrateCheck;
auto encoder = CreateVideoEncoder(g_env->Video(), config);
+ // Set longer event timeout than the default (30 sec) because encoding 2160p
+ // and validating the stream take much time.
+ encoder->SetEventWaitTimeout(kBitrateCheckEventTimeout);
encoder->Encode();
EXPECT_TRUE(encoder->WaitForFlushDone());
@@ -308,10 +316,13 @@ TEST_F(VideoEncoderTest, BitrateCheck) {
kBitrateTolerance * config.bitrate);
}
-TEST_F(VideoEncoderTest, DynamicBitrateChange) {
+TEST_F(VideoEncoderTest, BitrateCheck_DynamicBitrate) {
auto config = GetDefaultConfig();
config.num_frames_to_encode = kNumFramesToEncodeForBitrateCheck * 2;
auto encoder = CreateVideoEncoder(g_env->Video(), config);
+ // Set longer event timeout than the default (30 sec) because encoding 2160p
+ // and validating the stream take much time.
+ encoder->SetEventWaitTimeout(kBitrateCheckEventTimeout);
// Encode the video with the first bitrate.
const uint32_t first_bitrate = config.bitrate;
@@ -336,10 +347,13 @@ TEST_F(VideoEncoderTest, DynamicBitrateChange) {
EXPECT_TRUE(encoder->WaitForBitstreamProcessors());
}
-TEST_F(VideoEncoderTest, DynamicFramerateChange) {
+TEST_F(VideoEncoderTest, BitrateCheck_DynamicFramerate) {
auto config = GetDefaultConfig();
config.num_frames_to_encode = kNumFramesToEncodeForBitrateCheck * 2;
auto encoder = CreateVideoEncoder(g_env->Video(), config);
+ // Set longer event timeout than the default (30 sec) because encoding 2160p
+ // and validating the stream take much time.
+ encoder->SetEventWaitTimeout(kBitrateCheckEventTimeout);
// Encode the video with the first framerate.
const uint32_t first_framerate = config.framerate;
diff --git a/chromium/media/gpu/video_encode_accelerator_unittest.cc b/chromium/media/gpu/video_encode_accelerator_unittest.cc
index 8b9f4162095..177eb122c83 100644
--- a/chromium/media/gpu/video_encode_accelerator_unittest.cc
+++ b/chromium/media/gpu/video_encode_accelerator_unittest.cc
@@ -13,8 +13,8 @@
#include "base/at_exit.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/bits.h"
+#include "base/callback_helpers.h"
#include "base/cancelable_callback.h"
#include "base/command_line.h"
#include "base/containers/queue.h"
@@ -44,6 +44,7 @@
#include "base/time/time.h"
#include "base/timer/timer.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "gpu/config/gpu_driver_bug_workarounds.h"
#include "gpu/config/gpu_preferences.h"
#include "gpu/ipc/service/gpu_memory_buffer_factory.h"
@@ -313,7 +314,7 @@ static bool IsVP9(VideoCodecProfile profile) {
return profile >= VP9PROFILE_MIN && profile <= VP9PROFILE_MAX;
}
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
// Determine the test is known-to-fail and should be skipped.
bool ShouldSkipTest(VideoPixelFormat format) {
struct Pattern {
@@ -361,7 +362,7 @@ bool ShouldSkipTest(VideoPixelFormat format) {
return false;
}
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
// Helper functions to do string conversions.
static base::FilePath::StringType StringToFilePathStringType(
@@ -2792,10 +2793,10 @@ TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) {
const bool force_level = std::get<8>(GetParam());
const bool scale = std::get<9>(GetParam());
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
if (ShouldSkipTest(g_env->test_streams_[0]->pixel_format))
GTEST_SKIP();
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
if (force_level) {
// Skip ForceLevel test if "--force_level=false".
@@ -2927,10 +2928,10 @@ TEST_P(VideoEncodeAcceleratorSimpleTest, TestSimpleEncode) {
const int test_type = GetParam();
ASSERT_LT(test_type, 2) << "Invalid test type=" << test_type;
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
if (ShouldSkipTest(g_env->test_streams_[0]->pixel_format))
GTEST_SKIP();
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
if (test_type == 0)
SimpleTestFunc<VEANoInputClient>();
@@ -3184,7 +3185,7 @@ class VEATestSuite : public base::TestSuite {
void Initialize() override {
base::TestSuite::Initialize();
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
task_environment_ = std::make_unique<base::test::TaskEnvironment>(
base::test::TaskEnvironment::MainThreadType::UI);
#else
@@ -3276,7 +3277,7 @@ int main(int argc, char** argv) {
}
if (it->first == "native_input") {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
media::g_native_input = true;
#else
LOG(FATAL) << "Unsupported option";
diff --git a/chromium/media/gpu/vp9_decoder.cc b/chromium/media/gpu/vp9_decoder.cc
index a36e20438ba..29cd176fa7d 100644
--- a/chromium/media/gpu/vp9_decoder.cc
+++ b/chromium/media/gpu/vp9_decoder.cc
@@ -10,6 +10,7 @@
#include "base/feature_list.h"
#include "base/logging.h"
#include "build/build_config.h"
+#include "build/chromeos_buildflags.h"
#include "media/base/limits.h"
#include "media/base/media_switches.h"
#include "media/gpu/vp9_decoder.h"
@@ -19,7 +20,7 @@ namespace media {
namespace {
std::vector<uint32_t> GetSpatialLayerFrameSize(
const DecoderBuffer& decoder_buffer) {
-#if defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+#if defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
const uint32_t* cue_data =
reinterpret_cast<const uint32_t*>(decoder_buffer.side_data());
if (!cue_data) {
@@ -36,7 +37,7 @@ std::vector<uint32_t> GetSpatialLayerFrameSize(
return {};
}
return std::vector<uint32_t>(cue_data, cue_data + num_of_layers);
-#endif // defined(ARCH_CPU_X86_FAMILY) && defined(OS_CHROMEOS)
+#endif // defined(ARCH_CPU_X86_FAMILY) && BUILDFLAG(IS_ASH)
return {};
}
diff --git a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc
index 6698af9f724..96e72747877 100644
--- a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc
+++ b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.cc
@@ -18,7 +18,7 @@ namespace media {
CopyingTexture2DWrapper::CopyingTexture2DWrapper(
const gfx::Size& size,
std::unique_ptr<Texture2DWrapper> output_wrapper,
- std::unique_ptr<VideoProcessorProxy> processor,
+ scoped_refptr<VideoProcessorProxy> processor,
ComD3D11Texture2D output_texture,
base::Optional<gfx::ColorSpace> output_color_space)
: size_(size),
@@ -107,7 +107,7 @@ Status CopyingTexture2DWrapper::Init(
}
void CopyingTexture2DWrapper::SetStreamHDRMetadata(
- const gl::HDRMetadata& stream_metadata) {
+ const gfx::HDRMetadata& stream_metadata) {
auto dxgi_stream_metadata =
gl::HDRMetadataHelperWin::HDRMetadataToDXGI(stream_metadata);
video_processor_->SetStreamHDRMetadata(dxgi_stream_metadata);
diff --git a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h
index 7c46c5745d2..5fb2fffef9b 100644
--- a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h
+++ b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper.h
@@ -26,7 +26,7 @@ class MEDIA_GPU_EXPORT CopyingTexture2DWrapper : public Texture2DWrapper {
// be given to the swap chain directly, or video processed later).
CopyingTexture2DWrapper(const gfx::Size& size,
std::unique_ptr<Texture2DWrapper> output_wrapper,
- std::unique_ptr<VideoProcessorProxy> processor,
+ scoped_refptr<VideoProcessorProxy> processor,
ComD3D11Texture2D output_texture,
base::Optional<gfx::ColorSpace> output_color_space);
~CopyingTexture2DWrapper() override;
@@ -40,13 +40,13 @@ class MEDIA_GPU_EXPORT CopyingTexture2DWrapper : public Texture2DWrapper {
ComD3D11Texture2D texture,
size_t array_slice) override;
- void SetStreamHDRMetadata(const gl::HDRMetadata& stream_metadata) override;
+ void SetStreamHDRMetadata(const gfx::HDRMetadata& stream_metadata) override;
void SetDisplayHDRMetadata(
const DXGI_HDR_METADATA_HDR10& dxgi_display_metadata) override;
private:
gfx::Size size_;
- std::unique_ptr<VideoProcessorProxy> video_processor_;
+ scoped_refptr<VideoProcessorProxy> video_processor_;
std::unique_ptr<Texture2DWrapper> output_texture_wrapper_;
ComD3D11Texture2D output_texture_;
// If set, then this is the desired output color space for the copy.
diff --git a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc
index 3c913dd17a9..a999fe50fe3 100644
--- a/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_copying_texture_wrapper_unittest.cc
@@ -6,7 +6,7 @@
#include <utility>
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/test/task_environment.h"
#include "media/gpu/windows/d3d11_copying_texture_wrapper.h"
#include "media/gpu/windows/d3d11_texture_wrapper.h"
@@ -80,6 +80,9 @@ class MockVideoProcessorProxy : public VideoProcessorProxy {
base::Optional<gfx::ColorSpace> last_output_color_space_;
base::Optional<DXGI_HDR_METADATA_HDR10> last_stream_metadata_;
base::Optional<DXGI_HDR_METADATA_HDR10> last_display_metadata_;
+
+ private:
+ ~MockVideoProcessorProxy() override = default;
};
class MockTexture2DWrapper : public Texture2DWrapper {
@@ -106,7 +109,7 @@ class MockTexture2DWrapper : public Texture2DWrapper {
MOCK_METHOD0(MockInit, Status());
MOCK_METHOD0(MockProcessTexture, Status());
MOCK_METHOD1(SetStreamHDRMetadata,
- void(const gl::HDRMetadata& stream_metadata));
+ void(const gfx::HDRMetadata& stream_metadata));
MOCK_METHOD1(SetDisplayHDRMetadata,
void(const DXGI_HDR_METADATA_HDR10& dxgi_display_metadata));
@@ -136,8 +139,8 @@ class D3D11CopyingTexture2DWrapperTest
gpu_task_runner_ = task_environment_.GetMainThreadTaskRunner();
}
- std::unique_ptr<MockVideoProcessorProxy> ExpectProcessorProxy() {
- auto result = std::make_unique<MockVideoProcessorProxy>();
+ scoped_refptr<MockVideoProcessorProxy> ExpectProcessorProxy() {
+ auto result = base::MakeRefCounted<MockVideoProcessorProxy>();
ON_CALL(*result.get(), MockInit(_, _))
.WillByDefault(Return(GetProcessorProxyInit()
? StatusCode::kOk
@@ -215,8 +218,7 @@ TEST_P(D3D11CopyingTexture2DWrapperTest,
auto texture_wrapper = ExpectTextureWrapper();
MockTexture2DWrapper* texture_wrapper_raw = texture_wrapper.get();
auto wrapper = std::make_unique<CopyingTexture2DWrapper>(
- size, std::move(texture_wrapper), std::move(processor), nullptr,
- copy_color_space);
+ size, std::move(texture_wrapper), processor, nullptr, copy_color_space);
// TODO: check |gpu_task_runner_|.
@@ -256,15 +258,15 @@ TEST_P(D3D11CopyingTexture2DWrapperTest,
}
TEST_P(D3D11CopyingTexture2DWrapperTest, HDRMetadataIsSentToVideoProcessor) {
- gl::HDRMetadata metadata;
+ gfx::HDRMetadata metadata;
metadata.mastering_metadata.primary_r =
- gl::MasteringMetadata::Chromaticity(0.1, 0.2);
+ gfx::MasteringMetadata::Chromaticity(0.1, 0.2);
metadata.mastering_metadata.primary_g =
- gl::MasteringMetadata::Chromaticity(0.3, 0.4);
+ gfx::MasteringMetadata::Chromaticity(0.3, 0.4);
metadata.mastering_metadata.primary_b =
- gl::MasteringMetadata::Chromaticity(0.5, 0.6);
+ gfx::MasteringMetadata::Chromaticity(0.5, 0.6);
metadata.mastering_metadata.white_point =
- gl::MasteringMetadata::Chromaticity(0.7, 0.8);
+ gfx::MasteringMetadata::Chromaticity(0.7, 0.8);
metadata.mastering_metadata.luminance_max = 0.9;
metadata.mastering_metadata.luminance_min = 0.05;
metadata.max_content_light_level = 1000;
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
index 215d240c58e..ae4d5b09709 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.cc
@@ -73,7 +73,7 @@ bool D3D11DecoderConfigurator::SupportsDevice(
return false;
}
-ErrorOr<ComD3D11Texture2D> D3D11DecoderConfigurator::CreateOutputTexture(
+StatusOr<ComD3D11Texture2D> D3D11DecoderConfigurator::CreateOutputTexture(
ComD3D11Device device,
gfx::Size size,
uint32_t array_size) {
diff --git a/chromium/media/gpu/windows/d3d11_decoder_configurator.h b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
index 716f0e3fda4..b1f3c7934a4 100644
--- a/chromium/media/gpu/windows/d3d11_decoder_configurator.h
+++ b/chromium/media/gpu/windows/d3d11_decoder_configurator.h
@@ -41,9 +41,9 @@ class MEDIA_GPU_EXPORT D3D11DecoderConfigurator {
bool SupportsDevice(ComD3D11VideoDevice video_device);
// Create the decoder's output texture.
- ErrorOr<ComD3D11Texture2D> CreateOutputTexture(ComD3D11Device device,
- gfx::Size size,
- uint32_t array_size);
+ StatusOr<ComD3D11Texture2D> CreateOutputTexture(ComD3D11Device device,
+ gfx::Size size,
+ uint32_t array_size);
const D3D11_VIDEO_DECODER_DESC* DecoderDescriptor() const {
return &decoder_desc_;
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer.h b/chromium/media/gpu/windows/d3d11_picture_buffer.h
index 70d48fc7ba2..8ceadaa3ed1 100644
--- a/chromium/media/gpu/windows/d3d11_picture_buffer.h
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer.h
@@ -80,12 +80,19 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
size_t picture_index() const { return picture_index_; }
// Is this PictureBuffer backing a VideoFrame right now?
- bool in_client_use() const { return in_client_use_; }
+ bool in_client_use() const { return in_client_use_ > 0; }
// Is this PictureBuffer holding an image that's in use by the decoder?
bool in_picture_use() const { return in_picture_use_; }
- void set_in_client_use(bool use) { in_client_use_ = use; }
+ void add_client_use() {
+ in_client_use_++;
+ DCHECK_GT(in_client_use_, 0);
+ }
+ void remove_client_use() {
+ DCHECK_GT(in_client_use_, 0);
+ in_client_use_--;
+ }
void set_in_picture_use(bool use) { in_picture_use_ = use; }
const ComD3D11VideoDecoderOutputView& output_view() const {
@@ -108,7 +115,7 @@ class MEDIA_GPU_EXPORT D3D11PictureBuffer
std::unique_ptr<Texture2DWrapper> texture_wrapper_;
gfx::Size size_;
bool in_picture_use_ = false;
- bool in_client_use_ = false;
+ int in_client_use_ = 0;
size_t picture_index_;
ComD3D11VideoDecoderOutputView output_view_;
diff --git a/chromium/media/gpu/windows/d3d11_picture_buffer_unittest.cc b/chromium/media/gpu/windows/d3d11_picture_buffer_unittest.cc
new file mode 100644
index 00000000000..546dee28ca3
--- /dev/null
+++ b/chromium/media/gpu/windows/d3d11_picture_buffer_unittest.cc
@@ -0,0 +1,48 @@
+// Copyright 2021 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <utility>
+
+#include "base/callback_helpers.h"
+#include "base/test/task_environment.h"
+#include "media/gpu/windows/d3d11_picture_buffer.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+class D3D11PictureBufferTest : public ::testing::Test {
+ public:
+ D3D11PictureBufferTest() {
+ picture_buffer_ = base::MakeRefCounted<D3D11PictureBuffer>(
+ task_environment_.GetMainThreadTaskRunner(), nullptr, 0, nullptr,
+ gfx::Size(), 0);
+ }
+
+ base::test::TaskEnvironment task_environment_;
+
+ scoped_refptr<D3D11PictureBuffer> picture_buffer_;
+};
+
+// The processor proxy wraps the VideoDevice/VideoContext and stores some of the
+// d3d11 types. Make sure that the arguments we give these methods are passed
+// through correctly.
+TEST_F(D3D11PictureBufferTest, InClientUse) {
+ EXPECT_FALSE(picture_buffer_->in_client_use());
+
+ // Add two client refs.
+ picture_buffer_->add_client_use();
+ EXPECT_TRUE(picture_buffer_->in_client_use());
+ picture_buffer_->add_client_use();
+ EXPECT_TRUE(picture_buffer_->in_client_use());
+
+ // Remove them. Should still be in use by the client until the second one has
+ // been removed.
+ picture_buffer_->remove_client_use();
+ EXPECT_TRUE(picture_buffer_->in_client_use());
+ picture_buffer_->remove_client_use();
+ EXPECT_FALSE(picture_buffer_->in_client_use());
+}
+
+} // namespace media \ No newline at end of file
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector.cc b/chromium/media/gpu/windows/d3d11_texture_selector.cc
index 14348f8cc9a..422a69b91c6 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector.cc
@@ -16,8 +16,15 @@
namespace media {
TextureSelector::TextureSelector(VideoPixelFormat pixfmt,
- DXGI_FORMAT output_dxgifmt)
- : pixel_format_(pixfmt), output_dxgifmt_(output_dxgifmt) {}
+ DXGI_FORMAT output_dxgifmt,
+ ComD3D11VideoDevice video_device,
+ ComD3D11DeviceContext device_context)
+ : pixel_format_(pixfmt),
+ output_dxgifmt_(output_dxgifmt),
+ video_device_(std::move(video_device)),
+ device_context_(std::move(device_context)) {}
+
+TextureSelector::~TextureSelector() = default;
bool SupportsZeroCopy(const gpu::GpuPreferences& preferences,
const gpu::GpuDriverBugWorkarounds& workarounds) {
@@ -37,6 +44,8 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
DXGI_FORMAT decoder_output_format,
TextureSelector::HDRMode hdr_output_mode,
const FormatSupportChecker* format_checker,
+ ComD3D11VideoDevice video_device,
+ ComD3D11DeviceContext device_context,
MediaLog* media_log) {
VideoPixelFormat output_pixel_format;
DXGI_FORMAT output_dxgi_format;
@@ -119,18 +128,11 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
output_color_space.reset();
}
- // TODO(liberato): Handle HLG, if we can get the input color space.
- // The rough outline looks something like this:
-#if 0
- if (hlg) {
- video_context1->VideoProcessorSetStreamColorSpace1(
- d3d11_processor_.Get(), 0,
- DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020);
- video_context1->VideoProcessorSetOutputColorSpace1(
- d3d11_processor_.Get(), DXGI_COLOR_SPACE_RGB_FULL_G2084_NONE_P2020);
- dx11_converter_output_color_space_ = color_space.GetAsFullRangeRGB();
- }
-#endif
+ // TODO(liberato): Handle HLG, if we can get the input color space. The
+ // VideoProcessor doesn't support HLG, so we need to use it only for YUV
+ // -> RGB conversion by setting the input color space to PQ YUV and the
+ // output color space to PQ RGB. The texture should then be marked as
+ // full range HLG so that Chrome's color management can fix it up.
}
break;
}
@@ -158,21 +160,20 @@ std::unique_ptr<TextureSelector> TextureSelector::Create(
MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder is copying textures";
return std::make_unique<CopyTextureSelector>(
output_pixel_format, decoder_output_format, output_dxgi_format,
- output_color_space);
+ output_color_space, std::move(video_device), std::move(device_context));
} else {
MEDIA_LOG(INFO, media_log) << "D3D11VideoDecoder is binding textures";
// Binding can't change the color space. The consumer has to do it, if they
// want to.
DCHECK(!output_color_space);
- return std::make_unique<TextureSelector>(output_pixel_format,
- output_dxgi_format);
+ return std::make_unique<TextureSelector>(
+ output_pixel_format, output_dxgi_format, std::move(video_device),
+ std::move(device_context));
}
}
std::unique_ptr<Texture2DWrapper> TextureSelector::CreateTextureWrapper(
ComD3D11Device device,
- ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext device_context,
gfx::Size size) {
// TODO(liberato): If the output format is rgb, then create a pbuffer wrapper.
return std::make_unique<DefaultTexture2DWrapper>(size, OutputDXGIFormat(),
@@ -187,16 +188,22 @@ CopyTextureSelector::CopyTextureSelector(
VideoPixelFormat pixfmt,
DXGI_FORMAT input_dxgifmt,
DXGI_FORMAT output_dxgifmt,
- base::Optional<gfx::ColorSpace> output_color_space)
- : TextureSelector(pixfmt, output_dxgifmt),
- output_color_space_(std::move(output_color_space)) {}
+ base::Optional<gfx::ColorSpace> output_color_space,
+ ComD3D11VideoDevice video_device,
+ ComD3D11DeviceContext device_context)
+ : TextureSelector(pixfmt,
+ output_dxgifmt,
+ std::move(video_device),
+ std::move(device_context)),
+ output_color_space_(std::move(output_color_space)),
+ video_processor_proxy_(
+ base::MakeRefCounted<VideoProcessorProxy>(this->video_device(),
+ this->device_context())) {}
CopyTextureSelector::~CopyTextureSelector() = default;
std::unique_ptr<Texture2DWrapper> CopyTextureSelector::CreateTextureWrapper(
ComD3D11Device device,
- ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext device_context,
gfx::Size size) {
D3D11_TEXTURE2D_DESC texture_desc = {};
texture_desc.MipLevels = 1;
@@ -218,8 +225,7 @@ std::unique_ptr<Texture2DWrapper> CopyTextureSelector::CreateTextureWrapper(
size,
std::make_unique<DefaultTexture2DWrapper>(size, OutputDXGIFormat(),
PixelFormat()),
- std::make_unique<VideoProcessorProxy>(video_device, device_context),
- out_texture, output_color_space_);
+ video_processor_proxy_, out_texture, output_color_space_);
}
bool CopyTextureSelector::WillCopyForTesting() const {
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector.h b/chromium/media/gpu/windows/d3d11_texture_selector.h
index 59dc981dfe6..7de0a4d891f 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector.h
+++ b/chromium/media/gpu/windows/d3d11_texture_selector.h
@@ -30,8 +30,11 @@ class MEDIA_GPU_EXPORT TextureSelector {
kSDROrHDR = 1,
};
- TextureSelector(VideoPixelFormat pixfmt, DXGI_FORMAT output_dxgifmt);
- virtual ~TextureSelector() = default;
+ TextureSelector(VideoPixelFormat pixfmt,
+ DXGI_FORMAT output_dxgifmt,
+ ComD3D11VideoDevice video_device,
+ ComD3D11DeviceContext d3d11_device_context);
+ virtual ~TextureSelector();
static std::unique_ptr<TextureSelector> Create(
const gpu::GpuPreferences& gpu_preferences,
@@ -39,12 +42,12 @@ class MEDIA_GPU_EXPORT TextureSelector {
DXGI_FORMAT decoder_output_format,
HDRMode hdr_output_mode,
const FormatSupportChecker* format_checker,
+ ComD3D11VideoDevice video_device,
+ ComD3D11DeviceContext device_context,
MediaLog* media_log);
virtual std::unique_ptr<Texture2DWrapper> CreateTextureWrapper(
ComD3D11Device device,
- ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext,
gfx::Size size);
VideoPixelFormat PixelFormat() const { return pixel_format_; }
@@ -52,11 +55,21 @@ class MEDIA_GPU_EXPORT TextureSelector {
virtual bool WillCopyForTesting() const;
+ protected:
+ const ComD3D11VideoDevice& video_device() const { return video_device_; }
+
+ const ComD3D11DeviceContext& device_context() const {
+ return device_context_;
+ }
+
private:
friend class CopyTextureSelector;
const VideoPixelFormat pixel_format_;
const DXGI_FORMAT output_dxgifmt_;
+
+ ComD3D11VideoDevice video_device_;
+ ComD3D11DeviceContext device_context_;
};
class MEDIA_GPU_EXPORT CopyTextureSelector : public TextureSelector {
@@ -65,19 +78,20 @@ class MEDIA_GPU_EXPORT CopyTextureSelector : public TextureSelector {
CopyTextureSelector(VideoPixelFormat pixfmt,
DXGI_FORMAT input_dxgifmt,
DXGI_FORMAT output_dxgifmt,
- base::Optional<gfx::ColorSpace> output_color_space);
+ base::Optional<gfx::ColorSpace> output_color_space,
+ ComD3D11VideoDevice video_device,
+ ComD3D11DeviceContext d3d11_device_context);
~CopyTextureSelector() override;
std::unique_ptr<Texture2DWrapper> CreateTextureWrapper(
ComD3D11Device device,
- ComD3D11VideoDevice video_device,
- ComD3D11DeviceContext,
gfx::Size size) override;
bool WillCopyForTesting() const override;
private:
base::Optional<gfx::ColorSpace> output_color_space_;
+ scoped_refptr<VideoProcessorProxy> video_processor_proxy_;
};
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc b/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
index 09e70280713..1793fdfe5cd 100644
--- a/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_selector_unittest.cc
@@ -60,7 +60,8 @@ class D3D11TextureSelectorUnittest : public ::testing::Test {
zero_copy_disabled_by_workaround == ZeroCopyDisabledByWorkaround::kTrue;
auto media_log = std::make_unique<NullMediaLog>();
return TextureSelector::Create(prefs, workarounds, decoder_output_format,
- hdr_mode, &format_checker_, media_log.get());
+ hdr_mode, &format_checker_, nullptr, nullptr,
+ media_log.get());
}
// Set the format checker to succeed any check, except for |disallowed|.
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
index 358833b32fe..ad97d4c5695 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper.cc
@@ -179,7 +179,7 @@ void DefaultTexture2DWrapper::OnError(Status status) {
}
void DefaultTexture2DWrapper::SetStreamHDRMetadata(
- const gl::HDRMetadata& stream_metadata) {}
+ const gfx::HDRMetadata& stream_metadata) {}
void DefaultTexture2DWrapper::SetDisplayHDRMetadata(
const DXGI_HDR_METADATA_HDR10& dxgi_display_metadata) {}
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper.h b/chromium/media/gpu/windows/d3d11_texture_wrapper.h
index 171d1ea4e11..5c097ec9cac 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper.h
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper.h
@@ -21,11 +21,11 @@
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/windows/d3d11_com_defs.h"
#include "ui/gfx/color_space.h"
+#include "ui/gfx/hdr_metadata.h"
#include "ui/gl/gl_bindings.h"
#include "ui/gl/gl_context.h"
#include "ui/gl/gl_image_dxgi.h"
#include "ui/gl/gl_surface_egl.h"
-#include "ui/gl/hdr_metadata.h"
#include "ui/gl/scoped_binders.h"
namespace media {
@@ -57,7 +57,8 @@ class MEDIA_GPU_EXPORT Texture2DWrapper {
MailboxHolderArray* mailbox_dest_out,
gfx::ColorSpace* output_color_space) = 0;
- virtual void SetStreamHDRMetadata(const gl::HDRMetadata& stream_metadata) = 0;
+ virtual void SetStreamHDRMetadata(
+ const gfx::HDRMetadata& stream_metadata) = 0;
virtual void SetDisplayHDRMetadata(
const DXGI_HDR_METADATA_HDR10& dxgi_display_metadata) = 0;
};
@@ -87,7 +88,7 @@ class MEDIA_GPU_EXPORT DefaultTexture2DWrapper : public Texture2DWrapper {
MailboxHolderArray* mailbox_dest,
gfx::ColorSpace* output_color_space) override;
- void SetStreamHDRMetadata(const gl::HDRMetadata& stream_metadata) override;
+ void SetStreamHDRMetadata(const gfx::HDRMetadata& stream_metadata) override;
void SetDisplayHDRMetadata(
const DXGI_HDR_METADATA_HDR10& dxgi_display_metadata) override;
diff --git a/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc b/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc
index 836975be555..33e03d828e6 100644
--- a/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_texture_wrapper_unittest.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/single_thread_task_runner.h"
#include "base/test/task_environment.h"
#include "base/win/windows_version.h"
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.cc b/chromium/media/gpu/windows/d3d11_video_decoder.cc
index 8351fe0e2a7..98bbf65a59d 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.cc
@@ -9,8 +9,8 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/debug/crash_logging.h"
#include "base/debug/dump_without_crashing.h"
#include "base/feature_list.h"
@@ -187,7 +187,7 @@ HRESULT D3D11VideoDecoder::InitializeAcceleratedDecoder(
return hr;
}
-ErrorOr<std::tuple<ComD3D11VideoDecoder>>
+StatusOr<std::tuple<ComD3D11VideoDecoder>>
D3D11VideoDecoder::CreateD3D11Decoder() {
HRESULT hr;
@@ -215,7 +215,7 @@ D3D11VideoDecoder::CreateD3D11Decoder() {
decoder_configurator_->TextureFormat(),
is_hdr_supported_ ? TextureSelector::HDRMode::kSDROrHDR
: TextureSelector::HDRMode::kSDROnly,
- &format_checker, media_log_.get());
+ &format_checker, video_device_, device_context_, media_log_.get());
if (!texture_selector_)
return StatusCode::kCreateTextureSelectorFailed;
@@ -453,7 +453,7 @@ void D3D11VideoDecoder::ReceivePictureBufferFromClient(
// We may decode into this buffer again.
// Note that |buffer| might no longer be in |picture_buffers_| if we've
// replaced them. That's okay.
- buffer->set_in_client_use(false);
+ buffer->remove_client_use();
// Also re-start decoding in case it was waiting for more pictures.
DoDecode();
@@ -595,26 +595,31 @@ void D3D11VideoDecoder::DoDecode() {
return;
CreatePictureBuffers();
} else if (result == media::AcceleratedVideoDecoder::kConfigChange) {
- // TODO(liberato): I think we support this now, as long as it's the same
- // decoder. Should update |config_| though.
- if (profile_ != accelerated_video_decoder_->GetProfile()) {
- // TODO(crbug.com/1022246): Handle profile change.
- LOG(ERROR) << "Profile change is not supported";
- NotifyError("Profile change is not supported");
- return;
- }
// Before the first frame, we get a config change that we should ignore.
// We only want to take action if this is a mid-stream config change. We
// could wait until now to allocate the first D3D11VideoDecoder, but we
// don't, so that init can fail rather than decoding if there's a problem
- // creating it. If there's a config change at the start of the stream,
- // then this might not work.
- if (!picture_buffers_.size())
+ // creating it. We could also unconditionally re-allocate the decoder,
+ // but we keep it if it's ready to go.
+ const auto new_profile = accelerated_video_decoder_->GetProfile();
+ const auto new_coded_size = accelerated_video_decoder_->GetPicSize();
+ if (new_profile == config_.profile() &&
+ new_coded_size == config_.coded_size()) {
continue;
+ }
// Update the config.
- const auto new_coded_size = accelerated_video_decoder_->GetPicSize();
+ MEDIA_LOG(INFO, media_log_)
+ << "D3D11VideoDecoder config change: profile: "
+ << static_cast<int>(new_profile) << " coded_size: ("
+ << new_coded_size.width() << ", " << new_coded_size.height() << ")";
+ profile_ = new_profile;
+ config_.set_profile(profile_);
config_.set_coded_size(new_coded_size);
+
+ // Replace the decoder, and clear any picture buffers we have. It's okay
+ // if we don't have any picture buffer yet; this might be before the
+ // accelerated decoder asked for any.
auto video_decoder_or_error = CreateD3D11Decoder();
if (video_decoder_or_error.has_error()) {
NotifyError(video_decoder_or_error.error());
@@ -688,7 +693,7 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
DCHECK(texture_selector_);
gfx::Size size = accelerated_video_decoder_->GetPicSize();
- gl::HDRMetadata stream_metadata;
+ gfx::HDRMetadata stream_metadata;
if (config_.hdr_metadata())
stream_metadata = *config_.hdr_metadata();
// else leave |stream_metadata| default-initialized. We might use it anyway.
@@ -727,8 +732,7 @@ void D3D11VideoDecoder::CreatePictureBuffers() {
DCHECK(!!in_texture);
- auto tex_wrapper = texture_selector_->CreateTextureWrapper(
- device_, video_device_, device_context_, size);
+ auto tex_wrapper = texture_selector_->CreateTextureWrapper(device_, size);
if (!tex_wrapper) {
NotifyError(StatusCode::kAllocateTextureForCopyingWrapperFailed);
return;
@@ -780,13 +784,20 @@ D3D11PictureBuffer* D3D11VideoDecoder::GetPicture() {
return nullptr;
}
+void D3D11VideoDecoder::UpdateTimestamp(D3D11PictureBuffer* picture_buffer) {
+ // A picture is being reused with a different timestamp; since we've already
+ // generated a VideoFrame from the previous picture buffer, we can just stamp
+ // the new timestamp directly onto the buffer.
+ picture_buffer->timestamp_ = current_timestamp_;
+}
+
bool D3D11VideoDecoder::OutputResult(const CodecPicture* picture,
D3D11PictureBuffer* picture_buffer) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(texture_selector_);
TRACE_EVENT0("gpu", "D3D11VideoDecoder::OutputResult");
- picture_buffer->set_in_client_use(true);
+ picture_buffer->add_client_use();
// Note: The pixel format doesn't matter.
gfx::Rect visible_rect = picture->visible_rect();
@@ -913,25 +924,6 @@ D3D11VideoDecoder::GetSupportedVideoDecoderConfigs(
GetD3D11DeviceCB get_d3d11_device_cb) {
const std::string uma_name("Media.D3D11.WasVideoSupported");
- // This workaround accounts for almost half of all startup results, and it's
- // unclear that it's relevant here. If it's off, or if we're allowed to copy
- // pictures in case binding isn't allowed, then proceed with init.
- // NOTE: experimentation showed that, yes, it does actually matter.
- if (!base::FeatureList::IsEnabled(kD3D11VideoDecoderCopyPictures)) {
- // Must allow zero-copy of nv12 textures.
- if (!gpu_preferences.enable_zero_copy_dxgi_video) {
- UMA_HISTOGRAM_ENUMERATION(uma_name,
- NotSupportedReason::kZeroCopyNv12Required);
- return {};
- }
-
- if (gpu_workarounds.disable_dxgi_zero_copy_video) {
- UMA_HISTOGRAM_ENUMERATION(uma_name,
- NotSupportedReason::kZeroCopyVideoRequired);
- return {};
- }
- }
-
if (!base::FeatureList::IsEnabled(kD3D11VideoDecoderIgnoreWorkarounds)) {
// Allow all of d3d11 to be turned off by workaround.
if (gpu_workarounds.disable_d3d11_video_decoder) {
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder.h b/chromium/media/gpu/windows/d3d11_video_decoder.h
index 9ddf03bff1e..ba30775a443 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder.h
@@ -82,6 +82,7 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
// D3D11VideoDecoderClient implementation.
D3D11PictureBuffer* GetPicture() override;
+ void UpdateTimestamp(D3D11PictureBuffer* picture_buffer) override;
bool OutputResult(const CodecPicture* picture,
D3D11PictureBuffer* picture_buffer) override;
void SetDecoderCB(const SetAcceleratorDecoderCB&) override;
@@ -143,10 +144,10 @@ class MEDIA_GPU_EXPORT D3D11VideoDecoder : public VideoDecoder,
void CreatePictureBuffers();
// Create a D3D11VideoDecoder, if possible, based on the current config.
- // TODO(liberato): we use a tuple only because ErrorOr<ComD3D111VideoDecoder>
+ // TODO(liberato): we use a tuple only because StatusOr<ComD3D111VideoDecoder>
// doesn't work. Something about base::Optional trying to convert to void*,
// but the conversion is ambiguous.
- ErrorOr<std::tuple<ComD3D11VideoDecoder>> CreateD3D11Decoder();
+ StatusOr<std::tuple<ComD3D11VideoDecoder>> CreateD3D11Decoder();
enum class NotSupportedReason {
kVideoIsSupported = 0,
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_client.h b/chromium/media/gpu/windows/d3d11_video_decoder_client.h
index 0286ad41ade..efd0d711d4a 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_client.h
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_client.h
@@ -22,6 +22,7 @@ class D3D11VideoDecoderClient {
base::RepeatingCallback<void(ComD3D11VideoDecoder)>;
virtual D3D11PictureBuffer* GetPicture() = 0;
+ virtual void UpdateTimestamp(D3D11PictureBuffer* picture_buffer) = 0;
virtual bool OutputResult(const CodecPicture* picture,
D3D11PictureBuffer* picture_buffer) = 0;
diff --git a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
index 0de40c89ba8..fc25f8e53f6 100644
--- a/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_video_decoder_unittest.cc
@@ -9,7 +9,7 @@
#include <initguid.h>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
#include "base/optional.h"
#include "base/run_loop.h"
@@ -350,22 +350,6 @@ TEST_F(D3D11VideoDecoderTest, DoesNotSupportEncryptionWithoutFlag) {
StatusCode::kDecoderInitializeNeverCompleted);
}
-TEST_F(D3D11VideoDecoderTest, DoesNotSupportZeroCopyPreference) {
- gpu_preferences_.enable_zero_copy_dxgi_video = false;
- CreateDecoder();
- InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN),
- StatusCode::kDecoderInitializeNeverCompleted);
-}
-
-TEST_F(D3D11VideoDecoderTest, DoesNotSupportZeroCopyWorkaround) {
- gpu_workarounds_.disable_dxgi_zero_copy_video = true;
- CreateDecoder();
- InitializeDecoder(
- TestVideoConfig::NormalCodecProfile(kCodecH264, H264PROFILE_MAIN),
- StatusCode::kDecoderInitializeNeverCompleted);
-}
-
TEST_F(D3D11VideoDecoderTest, IgnoreWorkaroundsIgnoresWorkaround) {
// k...IgnoreWorkarounds should enable the decoder even if it's turned off
// for gpu workarounds.
diff --git a/chromium/media/gpu/windows/d3d11_video_processor_proxy.h b/chromium/media/gpu/windows/d3d11_video_processor_proxy.h
index 5e156110dc2..366695eb8cd 100644
--- a/chromium/media/gpu/windows/d3d11_video_processor_proxy.h
+++ b/chromium/media/gpu/windows/d3d11_video_processor_proxy.h
@@ -9,21 +9,22 @@
#include <wrl/client.h>
#include <cstdint>
+#include "base/memory/ref_counted.h"
#include "media/base/status.h"
#include "media/gpu/media_gpu_export.h"
#include "media/gpu/windows/d3d11_com_defs.h"
#include "ui/gfx/color_space.h"
-#include "ui/gl/hdr_metadata.h"
+#include "ui/gfx/hdr_metadata.h"
namespace media {
// Wrap ID3D11VideoProcessor to provide nicer methods for initialization,
// color space modification, and output/input view creation.
-class MEDIA_GPU_EXPORT VideoProcessorProxy {
+class MEDIA_GPU_EXPORT VideoProcessorProxy
+ : public base::RefCounted<VideoProcessorProxy> {
public:
VideoProcessorProxy(ComD3D11VideoDevice video_device,
ComD3D11DeviceContext d3d11_device_context);
- virtual ~VideoProcessorProxy();
virtual Status Init(uint32_t width, uint32_t height);
@@ -57,6 +58,10 @@ class MEDIA_GPU_EXPORT VideoProcessorProxy {
UINT stream_count,
D3D11_VIDEO_PROCESSOR_STREAM* streams);
+ protected:
+ virtual ~VideoProcessorProxy();
+ friend class base::RefCounted<VideoProcessorProxy>;
+
private:
ComD3D11VideoDevice video_device_;
ComD3D11VideoProcessorEnumerator processor_enumerator_;
diff --git a/chromium/media/gpu/windows/d3d11_video_processor_proxy_unittest.cc b/chromium/media/gpu/windows/d3d11_video_processor_proxy_unittest.cc
index d46a469e212..1ddc97ca178 100644
--- a/chromium/media/gpu/windows/d3d11_video_processor_proxy_unittest.cc
+++ b/chromium/media/gpu/windows/d3d11_video_processor_proxy_unittest.cc
@@ -5,7 +5,7 @@
#include <random>
#include <utility>
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "media/base/win/d3d11_mocks.h"
#include "media/gpu/windows/d3d11_copying_texture_wrapper.h"
#include "media/gpu/windows/d3d11_texture_wrapper.h"
@@ -35,7 +35,7 @@ class D3D11VideoProcessorProxyUnittest : public ::testing::Test {
MockD3D11VideoProcessorEnumerator enumerator_;
MockD3D11VideoProcessor proc_;
- std::unique_ptr<VideoProcessorProxy> CreateProxy() {
+ scoped_refptr<VideoProcessorProxy> CreateProxy() {
dev_ = MakeComPtr<D3D11VideoDeviceMock>();
ctx_ = MakeComPtr<D3D11DeviceContextMock>();
vctx_ = MakeComPtr<D3D11VideoContextMock>();
@@ -51,7 +51,7 @@ class D3D11VideoProcessorProxyUnittest : public ::testing::Test {
EXPECT_CALL(*ctx_.Get(), QueryInterface(_, _))
.WillOnce(SetComPointeeAndReturnOk<1>(vctx_.Get()));
- return std::make_unique<VideoProcessorProxy>(dev_, ctx_);
+ return base::MakeRefCounted<VideoProcessorProxy>(dev_, ctx_);
}
// Pull a random pointer off the stack, rather than relying on nullptrs.
diff --git a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
index 7fe0f7f7eca..9fbfcd255af 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
+++ b/chromium/media/gpu/windows/d3d11_vp9_accelerator.cc
@@ -61,7 +61,7 @@ scoped_refptr<VP9Picture> D3D11VP9Accelerator::CreateVP9Picture() {
D3D11PictureBuffer* picture_buffer = client_->GetPicture();
if (!picture_buffer)
return nullptr;
- return base::MakeRefCounted<D3D11VP9Picture>(picture_buffer);
+ return base::MakeRefCounted<D3D11VP9Picture>(picture_buffer, client_);
}
bool D3D11VP9Accelerator::BeginFrame(const D3D11VP9Picture& pic) {
diff --git a/chromium/media/gpu/windows/d3d11_vp9_picture.cc b/chromium/media/gpu/windows/d3d11_vp9_picture.cc
index 5efa82b5be0..913fefec589 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_picture.cc
+++ b/chromium/media/gpu/windows/d3d11_vp9_picture.cc
@@ -6,8 +6,10 @@
namespace media {
-D3D11VP9Picture::D3D11VP9Picture(D3D11PictureBuffer* picture_buffer)
+D3D11VP9Picture::D3D11VP9Picture(D3D11PictureBuffer* picture_buffer,
+ D3D11VideoDecoderClient* client)
: picture_buffer_(picture_buffer),
+ client_(client),
picture_index_(picture_buffer_->picture_index()) {
picture_buffer_->set_in_picture_use(true);
}
@@ -16,4 +18,11 @@ D3D11VP9Picture::~D3D11VP9Picture() {
picture_buffer_->set_in_picture_use(false);
}
+scoped_refptr<VP9Picture> D3D11VP9Picture::CreateDuplicate() {
+ // We've already sent off the base frame for rendering, so we can just stamp
+ // |picture_buffer_| with the updated timestamp.
+ client_->UpdateTimestamp(picture_buffer_);
+ return this;
+}
+
} // namespace media
diff --git a/chromium/media/gpu/windows/d3d11_vp9_picture.h b/chromium/media/gpu/windows/d3d11_vp9_picture.h
index 27b144402cc..68b29988f89 100644
--- a/chromium/media/gpu/windows/d3d11_vp9_picture.h
+++ b/chromium/media/gpu/windows/d3d11_vp9_picture.h
@@ -8,6 +8,7 @@
#include "media/gpu/vp9_picture.h"
#include "media/gpu/windows/d3d11_picture_buffer.h"
+#include "media/gpu/windows/d3d11_video_decoder_client.h"
namespace media {
@@ -15,7 +16,8 @@ class D3D11PictureBuffer;
class D3D11VP9Picture : public VP9Picture {
public:
- explicit D3D11VP9Picture(D3D11PictureBuffer* picture_buffer);
+ explicit D3D11VP9Picture(D3D11PictureBuffer* picture_buffer,
+ D3D11VideoDecoderClient* client);
D3D11PictureBuffer* picture_buffer() const { return picture_buffer_; }
@@ -24,8 +26,11 @@ class D3D11VP9Picture : public VP9Picture {
protected:
~D3D11VP9Picture() override;
+ scoped_refptr<VP9Picture> CreateDuplicate() override;
+
private:
D3D11PictureBuffer* picture_buffer_;
+ D3D11VideoDecoderClient* client_;
size_t picture_index_;
};
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
index f2cafa5780d..753b70533c2 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.cc
@@ -23,7 +23,6 @@
#include "base/bind.h"
#include "base/callback.h"
#include "base/command_line.h"
-#include "base/debug/alias.h"
#include "base/file_version_info.h"
#include "base/files/file_path.h"
#include "base/location.h"
@@ -277,13 +276,18 @@ HRESULT CreateCOMObjectFromDll(HMODULE dll,
ConfigChangeDetector::~ConfigChangeDetector() {}
+bool ConfigChangeDetector::IsYUV420() const {
+ NOTIMPLEMENTED();
+ return false;
+}
+
// Provides functionality to detect H.264 stream configuration changes.
// TODO(ananta)
// Move this to a common place so that all VDA's can use this.
class H264ConfigChangeDetector : public ConfigChangeDetector {
public:
- H264ConfigChangeDetector();
- ~H264ConfigChangeDetector() override;
+ H264ConfigChangeDetector() {}
+ ~H264ConfigChangeDetector() override {}
// Detects stream configuration changes.
// Returns false on failure.
@@ -292,30 +296,26 @@ class H264ConfigChangeDetector : public ConfigChangeDetector {
const gfx::Rect& container_visible_rect) const override;
VideoColorSpace current_color_space(
const VideoColorSpace& container_color_space) const override;
+ bool IsYUV420() const override;
private:
// These fields are used to track the SPS/PPS in the H.264 bitstream and
// are eventually compared against the SPS/PPS in the bitstream to detect
// a change.
- int last_sps_id_;
+ int last_sps_id_ = 0;
std::vector<uint8_t> last_sps_;
- int last_pps_id_;
+ int last_pps_id_ = 0;
std::vector<uint8_t> last_pps_;
// We want to indicate configuration changes only after we see IDR slices.
// This flag tracks that we potentially have a configuration change which
// we want to honor after we see an IDR slice.
- bool pending_config_changed_;
+ bool pending_config_changed_ = false;
std::unique_ptr<H264Parser> parser_;
DISALLOW_COPY_AND_ASSIGN(H264ConfigChangeDetector);
};
-H264ConfigChangeDetector::H264ConfigChangeDetector()
- : last_sps_id_(0), last_pps_id_(0), pending_config_changed_(false) {}
-
-H264ConfigChangeDetector::~H264ConfigChangeDetector() {}
-
bool H264ConfigChangeDetector::DetectConfig(const uint8_t* stream,
unsigned int size) {
std::vector<uint8_t> sps;
@@ -440,6 +440,13 @@ VideoColorSpace H264ConfigChangeDetector::current_color_space(
return container_color_space;
}
+bool H264ConfigChangeDetector::IsYUV420() const {
+ if (!parser_)
+ return true;
+ const H264SPS* sps = parser_->GetSPS(last_sps_id_);
+ return !sps || sps->chroma_format_idc == 1;
+}
+
// Doesn't actually detect config changes, only stream metadata.
class VP9ConfigChangeDetector : public ConfigChangeDetector {
public:
@@ -577,7 +584,7 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
const gpu::GpuDriverBugWorkarounds& workarounds,
const gpu::GpuPreferences& gpu_preferences,
MediaLog* media_log)
- : client_(NULL),
+ : client_(nullptr),
dev_manager_reset_token_(0),
dx11_dev_manager_reset_token_(0),
egl_config_(NULL),
@@ -623,7 +630,7 @@ DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
}
DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
- client_ = NULL;
+ client_ = nullptr;
}
bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
@@ -674,10 +681,17 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
decoder_output_p010_or_p016_ = true;
}
- // Unfortunately, the profile is currently unreliable for
- // VP9 (https://crbug.com/592074) so also try to use fp16 if HDR is on.
- if (config.target_color_space.IsHDR()) {
+ // While we can rely on the profile to indicate HBD status for other codecs,
+ // AV1 may have both 8-bit SDR and 10-bit HDR in the same profile, so also
+ // check the color space to determine if HDR should be used. It's possible for
+ // HDR 8-bit content to be created too, it's just rare.
+ if (config.container_color_space.ToGfxColorSpace().IsHDR()) {
use_fp16_ = true;
+ if (config.profile == AV1PROFILE_PROFILE_PRO ||
+ config.profile == AV1PROFILE_PROFILE_MAIN ||
+ config.profile == AV1PROFILE_PROFILE_HIGH) {
+ decoder_output_p010_or_p016_ = true;
+ }
}
// Not all versions of Windows 7 and later include Media Foundation DLLs.
@@ -756,6 +770,7 @@ bool DXVAVideoDecodeAccelerator::Initialize(const Config& config,
if (codec_ == kCodecVP9)
config_change_detector_.reset(new VP9ConfigChangeDetector());
+ processing_config_changed_ = false;
SetState(kNormal);
UMA_HISTOGRAM_ENUMERATION("Media.DXVAVDA.PictureBufferMechanism",
@@ -1954,7 +1969,7 @@ void DXVAVideoDecodeAccelerator::StopOnError(
if (client_)
client_->NotifyError(error);
- client_ = NULL;
+ client_ = nullptr;
#ifdef _DEBUG
if (using_debug_device_) {
@@ -1990,16 +2005,34 @@ void DXVAVideoDecodeAccelerator::StopOnError(
}
}
-void DXVAVideoDecodeAccelerator::Invalidate() {
+void DXVAVideoDecodeAccelerator::Invalidate(bool for_config_change) {
+ DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
+
if (GetState() == kUninitialized)
return;
// Best effort to make the GL context current.
- make_context_current_cb_.Run();
+ if (!make_context_current_cb_.Run()) {
+ // TODO(crbug.com/1139489): This may not be the right fix.
+ for (auto& kv : output_picture_buffers_) {
+ if (auto* fence = kv.second->reuse_fence())
+ fence->Invalidate();
+ }
+ for (auto& kv : stale_output_picture_buffers_) {
+ if (auto* fence = kv.second->reuse_fence())
+ fence->Invalidate();
+ }
+
+ // Since this is called by StopOnError() we can't call it directly.
+ DLOG(ERROR) << "Failed to make context current.";
+ for_config_change = false;
+ if (client_) {
+ client_->NotifyError(PLATFORM_FAILURE);
+ client_ = nullptr;
+ }
+ }
StopDecoderThread();
- weak_this_factory_.InvalidateWeakPtrs();
- weak_ptr_ = weak_this_factory_.GetWeakPtr();
pending_output_samples_.clear();
decoder_.Reset();
config_change_detector_.reset();
@@ -2009,7 +2042,10 @@ void DXVAVideoDecodeAccelerator::Invalidate() {
// output picture buffers may need to be recreated in case the video
// resolution changes. We already handle that in the
// HandleResolutionChanged() function.
- if (GetState() != kConfigChange) {
+ if (!for_config_change) {
+ weak_this_factory_.InvalidateWeakPtrs();
+ weak_ptr_ = weak_this_factory_.GetWeakPtr();
+
output_picture_buffers_.clear();
stale_output_picture_buffers_.clear();
// We want to continue processing pending input after detecting a config
@@ -2039,29 +2075,6 @@ void DXVAVideoDecodeAccelerator::Invalidate() {
}
void DXVAVideoDecodeAccelerator::StopDecoderThread() {
- // Try to determine what, if any exception last happened before a hang. See
- // http://crbug.com/613701
- uint64_t last_process_output_time = g_last_process_output_time;
- HRESULT last_device_removed_reason = g_last_device_removed_reason;
- LARGE_INTEGER perf_frequency;
- ::QueryPerformanceFrequency(&perf_frequency);
- uint32_t output_array_size = output_array_size_;
- size_t sample_count;
- {
- base::AutoLock lock(decoder_lock_);
- sample_count = pending_output_samples_.size();
- }
- size_t stale_output_picture_buffers_size =
- stale_output_picture_buffers_.size();
- PictureBufferMechanism mechanism = GetPictureBufferMechanism();
-
- base::debug::Alias(&last_process_output_time);
- base::debug::Alias(&last_device_removed_reason);
- base::debug::Alias(&perf_frequency.QuadPart);
- base::debug::Alias(&output_array_size);
- base::debug::Alias(&sample_count);
- base::debug::Alias(&stale_output_picture_buffers_size);
- base::debug::Alias(&mechanism);
decoder_thread_.Stop();
}
@@ -2207,7 +2220,6 @@ void DXVAVideoDecodeAccelerator::FlushInternal() {
FROM_HERE, base::BindOnce(&DXVAVideoDecodeAccelerator::NotifyFlushDone,
weak_ptr_));
} else {
- processing_config_changed_ = false;
main_thread_task_runner_->PostTask(
FROM_HERE, base::BindOnce(&DXVAVideoDecodeAccelerator::ConfigChanged,
weak_ptr_, config_));
@@ -2224,7 +2236,8 @@ void DXVAVideoDecodeAccelerator::DecodeInternal(
if (GetState() == kUninitialized)
return;
- if (OutputSamplesPresent() || !pending_input_buffers_.empty()) {
+ if (OutputSamplesPresent() || !pending_input_buffers_.empty() ||
+ processing_config_changed_) {
pending_input_buffers_.push_back(sample);
return;
}
@@ -2237,6 +2250,11 @@ void DXVAVideoDecodeAccelerator::DecodeInternal(
RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to check video stream config",
PLATFORM_FAILURE, );
+ // https://crbug.com/1160623 -- non 4:2:0 content hangs the decoder.
+ RETURN_AND_NOTIFY_ON_FAILURE(
+ codec_ != kCodecH264 || config_change_detector_->IsYUV420(),
+ "Only 4:2:0 H.264 content is supported", PLATFORM_FAILURE, );
+
processing_config_changed_ = config_changed;
if (config_changed) {
@@ -2894,11 +2912,9 @@ bool DXVAVideoDecodeAccelerator::InitializeID3D11VideoProcessor(
// Since the video processor doesn't support HLG, lets just do the YUV->RGB
// conversion and let the output color space be HLG. This won't work well
// unless color management is on, but if color management is off we don't
- // support HLG anyways.
- if (color_space == gfx::ColorSpace(gfx::ColorSpace::PrimaryID::BT2020,
- gfx::ColorSpace::TransferID::ARIB_STD_B67,
- gfx::ColorSpace::MatrixID::BT709,
- gfx::ColorSpace::RangeID::LIMITED)) {
+ // support HLG anyways. See https://crbug.com/1144260#c6.
+ if (color_space.GetTransferID() ==
+ gfx::ColorSpace::TransferID::ARIB_STD_B67) {
video_context1->VideoProcessorSetStreamColorSpace1(
d3d11_processor_.Get(), 0,
DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020);
@@ -2953,7 +2969,7 @@ void DXVAVideoDecodeAccelerator::SetDX11ProcessorHDRMetadataIfNeeded() {
// stream metadata. For the Radeon 5700, at least, this seems to do
// something sane. Not setting the metadata crashes intermittently.
if (config_.hdr_metadata || use_empty_video_hdr_metadata_) {
- gl::HDRMetadata stream_metadata;
+ gfx::HDRMetadata stream_metadata;
if (config_.hdr_metadata)
stream_metadata = *config_.hdr_metadata;
@@ -3058,9 +3074,7 @@ HRESULT DXVAVideoDecodeAccelerator::CheckConfigChanged(IMFSample* sample,
void DXVAVideoDecodeAccelerator::ConfigChanged(const Config& config) {
DCHECK(main_thread_task_runner_->BelongsToCurrentThread());
-
- SetState(kConfigChange);
- Invalidate();
+ Invalidate(/*for_config_change=*/true);
Initialize(config_, client_);
decoder_thread_task_runner_->PostTask(
FROM_HERE,
diff --git a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
index 430b01bb72b..0f5bfa44d04 100644
--- a/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
+++ b/chromium/media/gpu/windows/dxva_video_decode_accelerator_win.h
@@ -65,6 +65,7 @@ class ConfigChangeDetector {
const gfx::Rect& container_visible_rect) const = 0;
virtual VideoColorSpace current_color_space(
const VideoColorSpace& container_color_space) const = 0;
+ virtual bool IsYUV420() const;
bool config_changed() const { return config_changed_; }
protected:
@@ -85,7 +86,6 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
kResetting, // upon received Reset(), before ResetDone()
kStopped, // upon output EOS received.
kFlushing, // upon flush request received.
- kConfigChange, // stream configuration change detected.
};
// Does not take ownership of |client| which must outlive |*this|.
@@ -226,7 +226,7 @@ class MEDIA_GPU_EXPORT DXVAVideoDecodeAccelerator
// Transitions the decoder to the uninitialized state. The decoder will stop
// accepting requests in this state.
- void Invalidate();
+ void Invalidate(bool for_config_change = false);
// Stop and join on the decoder thread.
void StopDecoderThread();
diff --git a/chromium/media/gpu/windows/supported_profile_helpers.cc b/chromium/media/gpu/windows/supported_profile_helpers.cc
index 7d8622286f1..76a557d1825 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers.cc
+++ b/chromium/media/gpu/windows/supported_profile_helpers.cc
@@ -163,7 +163,8 @@ media::SupportedResolutionRange GetResolutionsForGUID(
ID3D11VideoDevice* video_device,
const GUID& decoder_guid,
const std::vector<gfx::Size>& resolutions_to_test,
- DXGI_FORMAT format = DXGI_FORMAT_NV12) {
+ DXGI_FORMAT format = DXGI_FORMAT_NV12,
+ const gfx::Size& min_resolution = kMinResolution) {
media::SupportedResolutionRange result;
// Verify input is in ascending order by height.
@@ -191,7 +192,7 @@ media::SupportedResolutionRange GetResolutionsForGUID(
}
if (!result.max_landscape_resolution.IsEmpty())
- result.min_resolution = kMinResolution;
+ result.min_resolution = min_resolution;
return result;
}
@@ -299,10 +300,15 @@ SupportedResolutionRangeMap GetSupportedD3D11VideoDecoderResolutions(
if (!workarounds.disable_accelerated_vp8_decode &&
profile_id == D3D11_DECODER_PROFILE_VP8_VLD &&
base::FeatureList::IsEnabled(kMediaFoundationVP8Decoding)) {
- supported_resolutions[VP8PROFILE_ANY] =
- GetResolutionsForGUID(video_device.Get(), profile_id,
- {gfx::Size(4096, 2160), gfx::Size(4096, 2304),
- gfx::Size(4096, 4096)});
+ // VP8 decoding is cheap on modern devices compared to other codecs, so
+ // much so that hardware decoding performance is actually worse at low
+ // resolutions than software decoding. See https://crbug.com/1136495.
+ constexpr gfx::Size kMinVp8Resolution = gfx::Size(640, 480);
+
+ supported_resolutions[VP8PROFILE_ANY] = GetResolutionsForGUID(
+ video_device.Get(), profile_id,
+ {gfx::Size(4096, 2160), gfx::Size(4096, 2304), gfx::Size(4096, 4096)},
+ DXGI_FORMAT_NV12, kMinVp8Resolution);
continue;
}
diff --git a/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc b/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
index 70c9203d397..31a4fc6b08b 100644
--- a/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
+++ b/chromium/media/gpu/windows/supported_profile_helpers_unittest.cc
@@ -219,7 +219,19 @@ TEST_F(SupportedResolutionResolverTest, VP8Supports4k) {
base::test::ScopedFeatureList scoped_feature_list;
scoped_feature_list.InitAndEnableFeature(kMediaFoundationVP8Decoding);
- TestDecoderSupport(D3D11_DECODER_PROFILE_VP8_VLD, VP8PROFILE_ANY);
+
+ EnableDecoders({D3D11_DECODER_PROFILE_VP8_VLD});
+ SetMaxResolution(D3D11_DECODER_PROFILE_VP8_VLD, kSquare4k);
+
+ const auto supported_resolutions = GetSupportedD3D11VideoDecoderResolutions(
+ mock_d3d11_device_, gpu_workarounds_);
+ auto it = supported_resolutions.find(VP8PROFILE_ANY);
+ ASSERT_NE(it, supported_resolutions.end());
+ EXPECT_EQ(kSquare4k, it->second.max_landscape_resolution);
+ EXPECT_EQ(kSquare4k, it->second.max_portrait_resolution);
+
+ constexpr gfx::Size kMinVp8Resolution = gfx::Size(640, 480);
+ EXPECT_EQ(kMinVp8Resolution, it->second.min_resolution);
}
TEST_F(SupportedResolutionResolverTest, VP9Profile0Supports8k) {
diff --git a/chromium/media/learning/impl/learning_session_impl.cc b/chromium/media/learning/impl/learning_session_impl.cc
index 2680a881dd4..cfe5ababfe7 100644
--- a/chromium/media/learning/impl/learning_session_impl.cc
+++ b/chromium/media/learning/impl/learning_session_impl.cc
@@ -37,12 +37,11 @@ class WeakLearningTaskController : public LearningTaskController {
for (auto& id : outstanding_observations_) {
const base::Optional<TargetValue>& default_value = id.second;
if (default_value) {
- controller_->Post(FROM_HERE,
- &LearningTaskController::CompleteObservation,
- id.first, *default_value);
+ controller_->AsyncCall(&LearningTaskController::CompleteObservation)
+ .WithArgs(id.first, *default_value);
} else {
- controller_->Post(FROM_HERE, &LearningTaskController::CancelObservation,
- id.first);
+ controller_->AsyncCall(&LearningTaskController::CancelObservation)
+ .WithArgs(id.first);
}
}
}
@@ -59,8 +58,8 @@ class WeakLearningTaskController : public LearningTaskController {
// We don't send along the default value because LearningTaskControllerImpl
// doesn't support it. Since all client calls eventually come through us
// anyway, it seems okay to handle it here.
- controller_->Post(FROM_HERE, &LearningTaskController::BeginObservation, id,
- features, base::nullopt, source_id);
+ controller_->AsyncCall(&LearningTaskController::BeginObservation)
+ .WithArgs(id, features, base::nullopt, source_id);
}
void CompleteObservation(base::UnguessableToken id,
@@ -68,16 +67,16 @@ class WeakLearningTaskController : public LearningTaskController {
if (!weak_session_)
return;
outstanding_observations_.erase(id);
- controller_->Post(FROM_HERE, &LearningTaskController::CompleteObservation,
- id, completion);
+ controller_->AsyncCall(&LearningTaskController::CompleteObservation)
+ .WithArgs(id, completion);
}
void CancelObservation(base::UnguessableToken id) override {
if (!weak_session_)
return;
outstanding_observations_.erase(id);
- controller_->Post(FROM_HERE, &LearningTaskController::CancelObservation,
- id);
+ controller_->AsyncCall(&LearningTaskController::CancelObservation)
+ .WithArgs(id);
}
void UpdateDefaultTarget(
@@ -93,8 +92,10 @@ class WeakLearningTaskController : public LearningTaskController {
void PredictDistribution(const FeatureVector& features,
PredictionCB callback) override {
- controller_->Post(FROM_HERE, &LearningTaskController::PredictDistribution,
- features, std::move(callback));
+ if (!weak_session_)
+ return;
+ controller_->AsyncCall(&LearningTaskController::PredictDistribution)
+ .WithArgs(features, std::move(callback));
}
base::WeakPtr<LearningSessionImpl> weak_session_;
diff --git a/chromium/media/learning/impl/learning_session_impl_unittest.cc b/chromium/media/learning/impl/learning_session_impl_unittest.cc
index c4b5fd9fada..86df5620ca7 100644
--- a/chromium/media/learning/impl/learning_session_impl_unittest.cc
+++ b/chromium/media/learning/impl/learning_session_impl_unittest.cc
@@ -34,9 +34,8 @@ class LearningSessionImplTest : public testing::Test {
// As a complete hack, call the only public method on fp so that
// we can verify that it was given to us by the session.
if (!feature_provider_.is_null()) {
- feature_provider_.Post(FROM_HERE, &FeatureProvider::AddFeatures,
- FeatureVector(),
- FeatureProvider::FeatureVectorCB());
+ feature_provider_.AsyncCall(&FeatureProvider::AddFeatures)
+ .WithArgs(FeatureVector(), FeatureProvider::FeatureVectorCB());
}
}
diff --git a/chromium/media/learning/impl/learning_task_controller_helper.cc b/chromium/media/learning/impl/learning_task_controller_helper.cc
index 753dfb0940e..59dee3cbbe5 100644
--- a/chromium/media/learning/impl/learning_task_controller_helper.cc
+++ b/chromium/media/learning/impl/learning_task_controller_helper.cc
@@ -35,10 +35,12 @@ void LearningTaskControllerHelper::BeginObservation(
// Start feature prediction, so that we capture the current values.
if (!feature_provider_.is_null()) {
- feature_provider_.Post(
- FROM_HERE, &FeatureProvider::AddFeatures, std::move(features),
- base::BindOnce(&LearningTaskControllerHelper::OnFeaturesReadyTrampoline,
- task_runner_, AsWeakPtr(), id));
+ // TODO(dcheng): Convert this to use Then() helper.
+ feature_provider_.AsyncCall(&FeatureProvider::AddFeatures)
+ .WithArgs(std::move(features),
+ base::BindOnce(
+ &LearningTaskControllerHelper::OnFeaturesReadyTrampoline,
+ task_runner_, AsWeakPtr(), id));
} else {
pending_example.example.features = std::move(features);
pending_example.features_done = true;
diff --git a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc
index 559cde7f1be..37232a6b313 100644
--- a/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc
+++ b/chromium/media/learning/mojo/public/cpp/mojo_learning_task_controller_unittest.cc
@@ -6,7 +6,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ptr_util.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/media_options.gni b/chromium/media/media_options.gni
index 81bcf0a451c..2137fa508ca 100644
--- a/chromium/media/media_options.gni
+++ b/chromium/media/media_options.gni
@@ -5,6 +5,7 @@
import("//build/config/chrome_build.gni")
import("//build/config/chromecast_build.gni")
import("//build/config/chromeos/args.gni")
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//media/gpu/args.gni")
import("//testing/libfuzzer/fuzzer_test.gni")
@@ -79,8 +80,11 @@ declare_args() {
proprietary_codecs && (is_chromecast || is_fuchsia || use_fuzzing_engine)
# Enable HEVC/H265 demuxing. Actual decoding must be provided by the
- # platform. Enabled by default for Chromecast.
- enable_platform_hevc = proprietary_codecs && is_chromecast
+ # platform. Enabled by default for Chromecast, Chrome OS protected media and
+ # fuzzer builds.
+ enable_platform_hevc =
+ proprietary_codecs &&
+ (is_chromecast || use_chromeos_protected_media || use_fuzzing_engine)
# Enable Dolby Vision demuxing. Enable by default for Chromecast. Actual
# decoding must be provided by the platform. Note some Dolby Vision profiles
@@ -101,8 +105,7 @@ declare_args() {
}
declare_args() {
- enable_av1_decoder =
- enable_dav1d_decoder || enable_libaom || enable_libgav1_decoder
+ enable_av1_decoder = enable_dav1d_decoder || enable_libgav1_decoder
}
# enable_hls_sample_aes can only be true if enable_mse_mpeg2ts_stream_parser is.
@@ -174,8 +177,8 @@ declare_args() {
# Enable Storage ID which is used by CDMs. This is only available with chrome
# branding, but may be overridden by other embedders.
- enable_cdm_storage_id = enable_library_cdms && is_chrome_branded &&
- (is_win || is_mac || is_chromeos)
+ enable_cdm_storage_id =
+ enable_library_cdms && is_chrome_branded && (is_win || is_mac || is_ash)
# If |enable_cdm_storage_id| is set, then an implementation specific key
# must also be provided. It can be provided by defining CDM_STORAGE_ID_KEY
@@ -203,7 +206,7 @@ if (is_chromecast) {
"video_decoder",
]
_default_mojo_media_host = "gpu"
-} else if (is_chromeos || is_mac || is_win || (is_linux && use_vaapi)) {
+} else if (is_ash || is_mac || is_win || (is_linux && use_vaapi)) {
_default_mojo_media_services = [ "video_decoder" ]
_default_mojo_media_host = "gpu"
}
diff --git a/chromium/media/midi/BUILD.gn b/chromium/media/midi/BUILD.gn
index 6c34560f473..52329075c07 100644
--- a/chromium/media/midi/BUILD.gn
+++ b/chromium/media/midi/BUILD.gn
@@ -3,6 +3,7 @@
# found in the LICENSE file.
import("//build/config/android/config.gni")
+import("//build/config/chromeos/ui_mode.gni")
import("//build/config/features.gni")
import("//build/config/ui.gni")
import("//media/media_options.gni")
@@ -15,13 +16,6 @@ if (is_android) {
import("//build/config/android/rules.gni")
}
-# This file depends on the legacy global sources assignment filter. It should
-# be converted to check target platform before assigning source files to the
-# sources variable. Remove this import and set_sources_assignment_filter call
-# when the file has been converted. See https://crbug.com/1018739 for details.
-import("//build/config/deprecated_default_sources_assignment_filter.gni")
-set_sources_assignment_filter(deprecated_default_sources_assignment_filter)
-
# Common configuration for targets in the media/midi directory.
config("midi_config") {
if (use_alsa && use_udev) {
@@ -89,8 +83,6 @@ component("midi") {
"midi_export.h",
"midi_manager.cc",
"midi_manager.h",
- "midi_manager_mac.cc",
- "midi_manager_mac.h",
"midi_message_queue.cc",
"midi_message_queue.h",
"midi_service.cc",
@@ -137,6 +129,10 @@ component("midi") {
}
if (is_mac) {
+ sources += [
+ "midi_manager_mac.cc",
+ "midi_manager_mac.h",
+ ]
frameworks = [
"CoreAudio.framework",
"CoreFoundation.framework",
@@ -222,8 +218,11 @@ test("midi_unittests") {
sources += [ "midi_manager_alsa_unittest.cc" ]
}
- # This target should not require the Chrome executable to run.
- assert_no_deps = [ "//chrome" ]
+ # On LaCrOS, tests use ash-chrome as a window manager, thus the dependency.
+ # On other platforms, this target should not require the Chrome to run.
+ if (!is_lacros) {
+ assert_no_deps = [ "//chrome" ]
+ }
}
fuzzer_test("midi_webmidi_data_validator_fuzzer") {
diff --git a/chromium/media/midi/DIR_METADATA b/chromium/media/midi/DIR_METADATA
new file mode 100644
index 00000000000..d6aec82bd66
--- /dev/null
+++ b/chromium/media/midi/DIR_METADATA
@@ -0,0 +1,12 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Blink>WebMIDI"
+}
+team_email: "midi-dev@chromium.org" \ No newline at end of file
diff --git a/chromium/media/midi/OWNERS b/chromium/media/midi/OWNERS
index 66324a3b2c8..45e53d4129d 100644
--- a/chromium/media/midi/OWNERS
+++ b/chromium/media/midi/OWNERS
@@ -6,6 +6,3 @@ yhirano@chromium.org
per-file *.mojom=set noparent
per-file *.mojom=file://ipc/SECURITY_OWNERS
-
-# TEAM: midi-dev@chromium.org
-# COMPONENT: Blink>WebMIDI
diff --git a/chromium/media/midi/midi_manager_win.cc b/chromium/media/midi/midi_manager_win.cc
index bbea3c5180c..1b41d22d5f6 100644
--- a/chromium/media/midi/midi_manager_win.cc
+++ b/chromium/media/midi/midi_manager_win.cc
@@ -18,8 +18,8 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/optional.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/midi/task_service_unittest.cc b/chromium/media/midi/task_service_unittest.cc
index 52960091fc8..f2bc8fb3cd8 100644
--- a/chromium/media/midi/task_service_unittest.cc
+++ b/chromium/media/midi/task_service_unittest.cc
@@ -7,8 +7,8 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/memory/ref_counted.h"
#include "base/run_loop.h"
#include "base/synchronization/lock.h"
diff --git a/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc b/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc
index 5de80338fce..ca09bfd567b 100644
--- a/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_android_overlay_unittest.cc
@@ -81,12 +81,12 @@ class MojoAndroidOverlayTest : public ::testing::Test {
void SetUp() override {
// Set up default config.
config_.rect = gfx::Rect(100, 200, 300, 400);
- config_.ready_cb = base::Bind(&MockClientCallbacks::OnReady,
- base::Unretained(&callbacks_));
- config_.failed_cb = base::Bind(&MockClientCallbacks::OnFailed,
- base::Unretained(&callbacks_));
- config_.power_cb = base::Bind(&MockClientCallbacks::OnPowerEfficient,
- base::Unretained(&callbacks_));
+ config_.ready_cb = base::BindOnce(&MockClientCallbacks::OnReady,
+ base::Unretained(&callbacks_));
+ config_.failed_cb = base::BindOnce(&MockClientCallbacks::OnFailed,
+ base::Unretained(&callbacks_));
+ config_.power_cb = base::BindRepeating(
+ &MockClientCallbacks::OnPowerEfficient, base::Unretained(&callbacks_));
// Make sure that we have an implementation of GpuSurfaceLookup.
gpu::GpuSurfaceTracker::Get();
@@ -139,7 +139,7 @@ class MojoAndroidOverlayTest : public ::testing::Test {
surface_ = gl::ScopedJavaSurface(surface_texture_.get());
surface_key_ = gpu::GpuSurfaceTracker::Get()->AddSurfaceForNativeWidget(
gpu::GpuSurfaceTracker::SurfaceRecord(
- gfx::kNullAcceleratedWidget, surface_.j_surface().obj(),
+ gfx::kNullAcceleratedWidget, surface_.j_surface(),
false /* can_be_used_with_surface_control */));
mock_provider_.client_->OnSurfaceReady(surface_key_);
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.cc b/chromium/media/mojo/clients/mojo_audio_decoder.cc
index fad002c4659..092c50fe4db 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.cc
@@ -5,11 +5,10 @@
#include "media/mojo/clients/mojo_audio_decoder.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/logging.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/build_config.h"
#include "media/base/audio_buffer.h"
@@ -21,12 +20,13 @@
namespace media {
MojoAudioDecoder::MojoAudioDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
mojo::PendingRemote<mojom::AudioDecoder> remote_decoder)
: task_runner_(task_runner),
pending_remote_decoder_(std::move(remote_decoder)),
writer_capacity_(
GetDefaultDecoderBufferConverterCapacity(DemuxerStream::AUDIO)) {
+ DETACH_FROM_SEQUENCE(sequence_checker_);
DVLOG(1) << __func__;
}
@@ -62,7 +62,7 @@ void MojoAudioDecoder::Initialize(const AudioDecoderConfig& config,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!remote_decoder_.is_bound())
BindRemoteDecoder();
@@ -100,7 +100,7 @@ void MojoAudioDecoder::Initialize(const AudioDecoderConfig& config,
void MojoAudioDecoder::Decode(scoped_refptr<DecoderBuffer> media_buffer,
DecodeCB decode_cb) {
DVLOG(3) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!remote_decoder_.is_connected()) {
task_runner_->PostTask(
@@ -128,7 +128,7 @@ void MojoAudioDecoder::Decode(scoped_refptr<DecoderBuffer> media_buffer,
void MojoAudioDecoder::Reset(base::OnceClosure closure) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!remote_decoder_.is_connected()) {
if (decode_cb_) {
@@ -149,14 +149,14 @@ void MojoAudioDecoder::Reset(base::OnceClosure closure) {
bool MojoAudioDecoder::NeedsBitstreamConversion() const {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
return needs_bitstream_conversion_;
}
void MojoAudioDecoder::BindRemoteDecoder() {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
remote_decoder_.Bind(std::move(pending_remote_decoder_));
@@ -170,21 +170,21 @@ void MojoAudioDecoder::BindRemoteDecoder() {
void MojoAudioDecoder::OnBufferDecoded(mojom::AudioBufferPtr buffer) {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
output_cb_.Run(buffer.To<scoped_refptr<AudioBuffer>>());
}
void MojoAudioDecoder::OnWaiting(WaitingReason reason) {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
waiting_cb_.Run(reason);
}
void MojoAudioDecoder::OnConnectionError() {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!remote_decoder_.is_connected());
if (init_cb_) {
@@ -201,7 +201,7 @@ void MojoAudioDecoder::OnConnectionError() {
void MojoAudioDecoder::OnInitialized(const Status& status,
bool needs_bitstream_conversion) {
DVLOG(1) << __func__ << ": success:" << status.is_ok();
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
needs_bitstream_conversion_ = needs_bitstream_conversion;
@@ -219,7 +219,7 @@ void MojoAudioDecoder::OnInitialized(const Status& status,
void MojoAudioDecoder::OnDecodeStatus(const Status& status) {
DVLOG(1) << __func__ << ": status:" << status.code();
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(decode_cb_);
std::move(decode_cb_).Run(status);
@@ -227,7 +227,7 @@ void MojoAudioDecoder::OnDecodeStatus(const Status& status) {
void MojoAudioDecoder::OnResetDone() {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// For pending decodes OnDecodeStatus() should arrive before OnResetDone().
DCHECK(!decode_cb_);
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder.h b/chromium/media/mojo/clients/mojo_audio_decoder.h
index b3b17b5528a..ded8cd2ef76 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder.h
+++ b/chromium/media/mojo/clients/mojo_audio_decoder.h
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
+#include "base/sequence_checker.h"
#include "media/base/audio_decoder.h"
#include "media/mojo/mojom/audio_decoder.mojom.h"
#include "media/mojo/mojom/media_types.mojom.h"
@@ -17,7 +18,7 @@
#include "mojo/public/cpp/bindings/remote.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -25,9 +26,10 @@ namespace media {
class MojoDecoderBufferWriter;
// An AudioDecoder that proxies to a mojom::AudioDecoder.
-class MojoAudioDecoder : public AudioDecoder, public mojom::AudioDecoderClient {
+class MojoAudioDecoder final : public AudioDecoder,
+ public mojom::AudioDecoderClient {
public:
- MojoAudioDecoder(scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ MojoAudioDecoder(scoped_refptr<base::SequencedTaskRunner> task_runner,
mojo::PendingRemote<mojom::AudioDecoder> remote_decoder);
~MojoAudioDecoder() final;
@@ -72,7 +74,8 @@ class MojoAudioDecoder : public AudioDecoder, public mojom::AudioDecoderClient {
// called when |remote_decoder_| finished Reset() sequence.
void OnResetDone();
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+ SEQUENCE_CHECKER(sequence_checker_);
// This class is constructed on one thread and used exclusively on another
// thread. This member is used to safely pass the
diff --git a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
index f699b14f0a9..daba06f1470 100644
--- a/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_audio_decoder_unittest.cc
@@ -5,7 +5,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/run_loop.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/mojo/clients/mojo_cdm.cc b/chromium/media/mojo/clients/mojo_cdm.cc
index e9181895a6d..5ed0040ef79 100644
--- a/chromium/media/mojo/clients/mojo_cdm.cc
+++ b/chromium/media/mojo/clients/mojo_cdm.cc
@@ -9,7 +9,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/location.h"
#include "base/metrics/histogram_macros.h"
#include "base/single_thread_task_runner.h"
diff --git a/chromium/media/mojo/clients/mojo_cdm.h b/chromium/media/mojo/clients/mojo_cdm.h
index 08745e3ab8e..830edc13cb0 100644
--- a/chromium/media/mojo/clients/mojo_cdm.h
+++ b/chromium/media/mojo/clients/mojo_cdm.h
@@ -37,9 +37,9 @@ class MojoDecryptor;
// A ContentDecryptionModule that proxies to a mojom::ContentDecryptionModule.
// That mojom::ContentDecryptionModule proxies back to the MojoCdm via the
// mojom::ContentDecryptionModuleClient interface.
-class MojoCdm : public ContentDecryptionModule,
- public CdmContext,
- public mojom::ContentDecryptionModuleClient {
+class MojoCdm final : public ContentDecryptionModule,
+ public CdmContext,
+ public mojom::ContentDecryptionModuleClient {
public:
using MessageType = CdmMessageType;
diff --git a/chromium/media/mojo/clients/mojo_cdm_factory.h b/chromium/media/mojo/clients/mojo_cdm_factory.h
index 0d987bc51dd..1ca54fec43f 100644
--- a/chromium/media/mojo/clients/mojo_cdm_factory.h
+++ b/chromium/media/mojo/clients/mojo_cdm_factory.h
@@ -14,7 +14,7 @@ namespace mojom {
class InterfaceFactory;
}
-class MojoCdmFactory : public CdmFactory {
+class MojoCdmFactory final : public CdmFactory {
public:
explicit MojoCdmFactory(media::mojom::InterfaceFactory* interface_factory);
~MojoCdmFactory() final;
diff --git a/chromium/media/mojo/clients/mojo_decoder_factory.cc b/chromium/media/mojo/clients/mojo_decoder_factory.cc
index b591fea60f7..c51f097f1ac 100644
--- a/chromium/media/mojo/clients/mojo_decoder_factory.cc
+++ b/chromium/media/mojo/clients/mojo_decoder_factory.cc
@@ -7,7 +7,7 @@
#include <memory>
#include "base/feature_list.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "build/build_config.h"
#include "media/base/media_switches.h"
#include "media/mojo/buildflags.h"
@@ -28,7 +28,7 @@ MojoDecoderFactory::MojoDecoderFactory(
MojoDecoderFactory::~MojoDecoderFactory() = default;
void MojoDecoderFactory::CreateAudioDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) {
#if BUILDFLAG(ENABLE_MOJO_AUDIO_DECODER)
@@ -42,7 +42,7 @@ void MojoDecoderFactory::CreateAudioDecoders(
}
void MojoDecoderFactory::CreateVideoDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
RequestOverlayInfoCB request_overlay_info_cb,
diff --git a/chromium/media/mojo/clients/mojo_decoder_factory.h b/chromium/media/mojo/clients/mojo_decoder_factory.h
index 5e1c847fc2c..61283c60546 100644
--- a/chromium/media/mojo/clients/mojo_decoder_factory.h
+++ b/chromium/media/mojo/clients/mojo_decoder_factory.h
@@ -15,19 +15,19 @@ namespace mojom {
class InterfaceFactory;
}
-class MojoDecoderFactory : public DecoderFactory {
+class MojoDecoderFactory final : public DecoderFactory {
public:
explicit MojoDecoderFactory(
media::mojom::InterfaceFactory* interface_factory);
~MojoDecoderFactory() final;
void CreateAudioDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) final;
void CreateVideoDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
RequestOverlayInfoCB request_overlay_info_cb,
diff --git a/chromium/media/mojo/clients/mojo_decryptor.h b/chromium/media/mojo/clients/mojo_decryptor.h
index e5cc543d83b..944c6005614 100644
--- a/chromium/media/mojo/clients/mojo_decryptor.h
+++ b/chromium/media/mojo/clients/mojo_decryptor.h
@@ -25,7 +25,7 @@ class MojoDecoderBufferWriter;
// This class is single threaded. The |remote_decryptor| is connected before
// being passed to MojoDecryptor, but it is bound to the thread MojoDecryptor
// lives on the first time it is used in this class.
-class MojoDecryptor : public Decryptor {
+class MojoDecryptor final : public Decryptor {
public:
// |writer_capacity| can be used for testing. If 0, default writer capacity
// will be used.
diff --git a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
index 484cd14044f..6de784b000c 100644
--- a/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
+++ b/chromium/media/mojo/clients/mojo_decryptor_unittest.cc
@@ -147,8 +147,8 @@ TEST_F(MojoDecryptorTest, Reset_DuringDecryptAndDecode_Audio) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeAudio(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::AudioDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::AudioDecoded,
+ base::Unretained(this)));
mojo_decryptor_->ResetDecoder(Decryptor::kAudio);
base::RunLoop().RunUntilIdle();
}
@@ -170,8 +170,8 @@ TEST_F(MojoDecryptorTest, Reset_DuringDecryptAndDecode_Audio_ChunkedWrite) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeAudio(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::AudioDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::AudioDecoded,
+ base::Unretained(this)));
mojo_decryptor_->ResetDecoder(Decryptor::kAudio);
base::RunLoop().RunUntilIdle();
}
@@ -194,8 +194,8 @@ TEST_F(MojoDecryptorTest, Reset_DuringDecryptAndDecode_Video) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
mojo_decryptor_->ResetDecoder(Decryptor::kVideo);
base::RunLoop().RunUntilIdle();
}
@@ -219,8 +219,8 @@ TEST_F(MojoDecryptorTest, Reset_DuringDecryptAndDecode_Video_ChunkedWrite) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
mojo_decryptor_->ResetDecoder(Decryptor::kVideo);
base::RunLoop().RunUntilIdle();
}
@@ -258,11 +258,11 @@ TEST_F(MojoDecryptorTest, Reset_DuringDecryptAndDecode_AudioAndVideo) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeAudio(
- buffer,
- base::Bind(&MojoDecryptorTest::AudioDecoded, base::Unretained(this)));
+ buffer, base::BindRepeating(&MojoDecryptorTest::AudioDecoded,
+ base::Unretained(this)));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
mojo_decryptor_->ResetDecoder(Decryptor::kAudio);
mojo_decryptor_->ResetDecoder(Decryptor::kVideo);
base::RunLoop().RunUntilIdle();
@@ -283,8 +283,8 @@ TEST_F(MojoDecryptorTest, VideoDecodeFreesBuffer) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
@@ -303,8 +303,8 @@ TEST_F(MojoDecryptorTest, VideoDecodeFreesMultipleBuffers) {
for (int i = 0; i < TIMES; ++i) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
}
base::RunLoop().RunUntilIdle();
}
@@ -327,8 +327,8 @@ TEST_F(MojoDecryptorTest, VideoDecodeHoldThenFreeBuffers) {
for (int i = 0; i < 2; ++i) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
@@ -354,8 +354,8 @@ TEST_F(MojoDecryptorTest, EOSBuffer) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
@@ -376,8 +376,8 @@ TEST_F(MojoDecryptorTest, DestroyService) {
scoped_refptr<DecoderBuffer> buffer(new DecoderBuffer(100));
mojo_decryptor_->DecryptAndDecodeVideo(
- std::move(buffer),
- base::Bind(&MojoDecryptorTest::VideoDecoded, base::Unretained(this)));
+ std::move(buffer), base::BindRepeating(&MojoDecryptorTest::VideoDecoded,
+ base::Unretained(this)));
base::RunLoop().RunUntilIdle();
}
diff --git a/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h b/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
index 30fcaa67da2..f3d258b11a0 100644
--- a/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
+++ b/chromium/media/mojo/clients/mojo_demuxer_stream_impl.h
@@ -39,8 +39,8 @@ class MojoDemuxerStreamImpl : public mojom::DemuxerStream {
// Sets an error handler that will be called if a connection error occurs on
// the bound message pipe.
- void set_disconnect_handler(const base::Closure& error_handler) {
- receiver_.set_disconnect_handler(error_handler);
+ void set_disconnect_handler(base::OnceClosure error_handler) {
+ receiver_.set_disconnect_handler(std::move(error_handler));
}
private:
diff --git a/chromium/media/mojo/clients/mojo_media_log_service.h b/chromium/media/mojo/clients/mojo_media_log_service.h
index e49bb4eab4e..c6937073271 100644
--- a/chromium/media/mojo/clients/mojo_media_log_service.h
+++ b/chromium/media/mojo/clients/mojo_media_log_service.h
@@ -14,7 +14,7 @@
namespace media {
// Implementation of a mojom::MediaLog service which wraps a media::MediaLog.
-class MojoMediaLogService : public mojom::MediaLog {
+class MojoMediaLogService final : public mojom::MediaLog {
public:
explicit MojoMediaLogService(media::MediaLog* media_log);
~MojoMediaLogService() final;
diff --git a/chromium/media/mojo/clients/mojo_renderer.cc b/chromium/media/mojo/clients/mojo_renderer.cc
index 64de1854b7b..a1d3343e01c 100644
--- a/chromium/media/mojo/clients/mojo_renderer.cc
+++ b/chromium/media/mojo/clients/mojo_renderer.cc
@@ -86,8 +86,8 @@ void MojoRenderer::InitializeRendererFromStreams(
// Using base::Unretained(this) is safe because |this| owns |mojo_stream|,
// and the error handler can't be invoked once |mojo_stream| is destroyed.
mojo_stream->set_disconnect_handler(
- base::Bind(&MojoRenderer::OnDemuxerStreamConnectionError,
- base::Unretained(this), mojo_stream.get()));
+ base::BindOnce(&MojoRenderer::OnDemuxerStreamConnectionError,
+ base::Unretained(this), mojo_stream.get()));
streams_.push_back(std::move(mojo_stream));
stream_proxies.push_back(std::move(stream_proxy));
diff --git a/chromium/media/mojo/clients/mojo_renderer_factory.h b/chromium/media/mojo/clients/mojo_renderer_factory.h
index da93dc4eef3..2459433e5ee 100644
--- a/chromium/media/mojo/clients/mojo_renderer_factory.h
+++ b/chromium/media/mojo/clients/mojo_renderer_factory.h
@@ -30,7 +30,7 @@ class MojoRenderer;
// wrapper factories that use MRF, rather than creating derived MojoRenderer
// types, or extending MRF. See DecryptingRendererFactory and
// MediaPlayerRendererClientFactory for examples of small wrappers around MRF.
-class MojoRendererFactory : public RendererFactory {
+class MojoRendererFactory final : public RendererFactory {
public:
explicit MojoRendererFactory(
media::mojom::InterfaceFactory* interface_factory);
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.cc b/chromium/media/mojo/clients/mojo_video_decoder.cc
index 524f979934d..78da171d12a 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.cc
+++ b/chromium/media/mojo/clients/mojo_video_decoder.cc
@@ -7,7 +7,6 @@
#include <atomic>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/feature_list.h"
#include "base/location.h"
@@ -16,7 +15,7 @@
#include "base/memory/scoped_refptr.h"
#include "base/metrics/histogram_macros.h"
#include "base/no_destructor.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "base/unguessable_token.h"
#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
@@ -25,6 +24,7 @@
#include "media/base/media_switches.h"
#include "media/base/overlay_info.h"
#include "media/base/video_frame.h"
+#include "media/media_buildflags.h"
#include "media/mojo/common/media_type_converters.h"
#include "media/mojo/common/mojo_decoder_buffer_converter.h"
#include "media/mojo/mojom/media_types.mojom.h"
@@ -61,7 +61,7 @@ class MojoVideoFrameHandleReleaser
MojoVideoFrameHandleReleaser(
mojo::PendingRemote<mojom::VideoFrameHandleReleaser>
video_frame_handle_releaser_remote,
- scoped_refptr<base::SingleThreadTaskRunner> task_runner) {
+ scoped_refptr<base::SequencedTaskRunner> task_runner) {
// Connection errors are not handled because we wouldn't do anything
// differently. ("If a tree falls in a forest...")
video_frame_handle_releaser_ =
@@ -98,7 +98,7 @@ class MojoVideoFrameHandleReleaser
};
MojoVideoDecoder::MojoVideoDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
mojo::PendingRemote<mojom::VideoDecoder> pending_remote_decoder,
@@ -117,6 +117,7 @@ MojoVideoDecoder::MojoVideoDecoder(
target_color_space_(target_color_space),
video_decoder_implementation_(implementation) {
DVLOG(1) << __func__;
+ DETACH_FROM_SEQUENCE(sequence_checker_);
weak_this_ = weak_factory_.GetWeakPtr();
}
@@ -133,8 +134,9 @@ bool MojoVideoDecoder::IsPlatformDecoder() const {
}
bool MojoVideoDecoder::SupportsDecryption() const {
- // Currently only the android backends support decryption
-#if defined(OS_ANDROID)
+ // Currently only the Android backends and specific ChromeOS configurations
+ // support decryption.
+#if defined(OS_ANDROID) || BUILDFLAG(USE_CHROMEOS_PROTECTED_MEDIA)
return true;
#else
return false;
@@ -157,7 +159,7 @@ void MojoVideoDecoder::Initialize(const VideoDecoderConfig& config,
const OutputCB& output_cb,
const WaitingCB& waiting_cb) {
DVLOG(1) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// Fail immediately if we know that the remote side cannot support |config|.
if (gpu_factories_ && gpu_factories_->IsDecoderConfigSupported(
@@ -207,7 +209,7 @@ void MojoVideoDecoder::OnInitializeDone(const Status& status,
bool needs_bitstream_conversion,
int32_t max_decode_requests) {
DVLOG(1) << __func__ << ": status = " << std::hex << status.code();
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
initialized_ = status.is_ok();
needs_bitstream_conversion_ = needs_bitstream_conversion;
max_decode_requests_ = max_decode_requests;
@@ -217,7 +219,7 @@ void MojoVideoDecoder::OnInitializeDone(const Status& status,
void MojoVideoDecoder::Decode(scoped_refptr<DecoderBuffer> buffer,
DecodeCB decode_cb) {
DVLOG(3) << __func__ << ": " << buffer->AsHumanReadableString();
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (has_connection_error_) {
task_runner_->PostTask(
@@ -253,7 +255,7 @@ void MojoVideoDecoder::OnVideoFrameDecoded(
bool can_read_without_stalling,
const base::Optional<base::UnguessableToken>& release_token) {
DVLOG(3) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
// TODO(sandersd): Prove that all paths read this value again after running
// |output_cb_|. In practice this isn't very important, since all decoders
@@ -296,7 +298,7 @@ void MojoVideoDecoder::OnVideoFrameDecoded(
void MojoVideoDecoder::OnDecodeDone(uint64_t decode_id, const Status& status) {
DVLOG(3) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
auto it = pending_decodes_.find(decode_id);
if (it == pending_decodes_.end()) {
@@ -315,7 +317,7 @@ void MojoVideoDecoder::OnDecodeDone(uint64_t decode_id, const Status& status) {
void MojoVideoDecoder::Reset(base::OnceClosure reset_cb) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (has_connection_error_) {
task_runner_->PostTask(FROM_HERE, std::move(reset_cb));
@@ -329,7 +331,7 @@ void MojoVideoDecoder::Reset(base::OnceClosure reset_cb) {
void MojoVideoDecoder::OnResetDone() {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
std::move(reset_cb_).Run();
}
@@ -352,7 +354,7 @@ int MojoVideoDecoder::GetMaxDecodeRequests() const {
void MojoVideoDecoder::BindRemoteDecoder() {
DVLOG(3) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(!remote_decoder_bound_);
remote_decoder_.Bind(std::move(pending_remote_decoder_));
@@ -398,14 +400,14 @@ void MojoVideoDecoder::BindRemoteDecoder() {
void MojoVideoDecoder::OnWaiting(WaitingReason reason) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
waiting_cb_.Run(reason);
}
void MojoVideoDecoder::RequestOverlayInfo(bool restart_for_transitions) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(request_overlay_info_cb_);
overlay_info_requested_ = true;
@@ -417,7 +419,7 @@ void MojoVideoDecoder::RequestOverlayInfo(bool restart_for_transitions) {
void MojoVideoDecoder::OnOverlayInfoChanged(const OverlayInfo& overlay_info) {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (has_connection_error_)
return;
@@ -426,7 +428,7 @@ void MojoVideoDecoder::OnOverlayInfoChanged(const OverlayInfo& overlay_info) {
void MojoVideoDecoder::Stop() {
DVLOG(2) << __func__;
- DCHECK(task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
has_connection_error_ = true;
ReportInitialPlaybackErrorUMA();
diff --git a/chromium/media/mojo/clients/mojo_video_decoder.h b/chromium/media/mojo/clients/mojo_video_decoder.h
index 6d7fc9b38f6..201b4ecdd74 100644
--- a/chromium/media/mojo/clients/mojo_video_decoder.h
+++ b/chromium/media/mojo/clients/mojo_video_decoder.h
@@ -9,6 +9,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
+#include "base/sequence_checker.h"
#include "media/base/status.h"
#include "media/base/video_decoder.h"
#include "media/base/video_frame.h"
@@ -21,7 +22,7 @@
#include "ui/gfx/color_space.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
}
namespace media {
@@ -47,7 +48,7 @@ class MojoVideoDecoder final : public VideoDecoder,
public mojom::VideoDecoderClient {
public:
MojoVideoDecoder(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
mojo::PendingRemote<mojom::VideoDecoder> pending_remote_decoder,
@@ -105,7 +106,8 @@ class MojoVideoDecoder final : public VideoDecoder,
void ReportInitialPlaybackErrorUMA();
// Task runner that the decoder runs on (media thread).
- scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> task_runner_;
+ SEQUENCE_CHECKER(sequence_checker_);
// Used to pass the remote decoder from the constructor (on the main thread)
// to Initialize() (on the media thread).
diff --git a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
index 57e6015cc81..2fdfdef3ba4 100644
--- a/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
+++ b/chromium/media/mojo/clients/mojo_video_encode_accelerator.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "build/build_config.h"
#include "gpu/ipc/client/gpu_channel_host.h"
diff --git a/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc b/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc
index 4754c08932b..9d8e66858be 100644
--- a/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc
+++ b/chromium/media/mojo/common/mojo_decoder_buffer_converter.cc
@@ -66,11 +66,11 @@ MojoDecoderBufferReader::MojoDecoderBufferReader(
bytes_read_(0) {
DVLOG(1) << __func__;
- MojoResult result =
- pipe_watcher_.Watch(consumer_handle_.get(), MOJO_HANDLE_SIGNAL_READABLE,
- MOJO_WATCH_CONDITION_SATISFIED,
- base::Bind(&MojoDecoderBufferReader::OnPipeReadable,
- base::Unretained(this)));
+ MojoResult result = pipe_watcher_.Watch(
+ consumer_handle_.get(), MOJO_HANDLE_SIGNAL_READABLE,
+ MOJO_WATCH_CONDITION_SATISFIED,
+ base::BindRepeating(&MojoDecoderBufferReader::OnPipeReadable,
+ base::Unretained(this)));
if (result != MOJO_RESULT_OK) {
DVLOG(1) << __func__
<< ": Failed to start watching the pipe. result=" << result;
diff --git a/chromium/media/mojo/mojom/media_types.mojom b/chromium/media/mojo/mojom/media_types.mojom
index 82d38b23ae2..e54857cddec 100644
--- a/chromium/media/mojo/mojom/media_types.mojom
+++ b/chromium/media/mojo/mojom/media_types.mojom
@@ -12,7 +12,7 @@ import "mojo/public/mojom/base/unguessable_token.mojom";
import "ui/gfx/geometry/mojom/geometry.mojom";
import "ui/gfx/mojom/buffer_types.mojom";
import "ui/gfx/mojom/color_space.mojom";
-import "ui/gl/mojom/hdr_metadata.mojom";
+import "ui/gfx/mojom/hdr_metadata.mojom";
// See media/base/audio_codecs.h for descriptions.
[Native]
@@ -163,7 +163,7 @@ struct VideoDecoderConfig {
array<uint8> extra_data;
EncryptionScheme encryption_scheme;
VideoColorSpace color_space_info;
- gl.mojom.HDRMetadata? hdr_metadata;
+ gfx.mojom.HDRMetadata? hdr_metadata;
};
// Native struct media::SubsampleEntry;
@@ -347,7 +347,7 @@ struct VideoFrame {
VideoFrameMetadata metadata;
gfx.mojom.ColorSpace color_space;
- gl.mojom.HDRMetadata? hdr_metadata;
+ gfx.mojom.HDRMetadata? hdr_metadata;
};
// Possible choices for storing VideoFrame data.
diff --git a/chromium/media/mojo/mojom/provision_fetcher.mojom b/chromium/media/mojo/mojom/provision_fetcher.mojom
index 74d6b85be92..ec0c04fa6de 100644
--- a/chromium/media/mojo/mojom/provision_fetcher.mojom
+++ b/chromium/media/mojo/mojom/provision_fetcher.mojom
@@ -4,6 +4,8 @@
module media.mojom;
+import "url/mojom/url.mojom";
+
// An interface to retrieve provision information for CDM. This includes Android
// MediaDrm. See Android documentation about MediaDrm provisioning:
// https://developer.android.com/reference/android/media/MediaDrm.ProvisionRequest.html
@@ -13,7 +15,6 @@ interface ProvisionFetcher {
// and returns |result| and the |response|. On Android, the input parameters
// |default_url| and |request_data| corresponds to Java class
// MediaDrm.ProvisionRequest. |response| will be empty iff |result| is false.
- // TODO(slan): Pass |default_url| as a url.mojom.Url (crbug.com/662752).
- Retrieve(string default_url, string request_data)
+ Retrieve(url.mojom.Url default_url, string request_data)
=> (bool result, string response);
};
diff --git a/chromium/media/mojo/mojom/speech_recognition_service.mojom b/chromium/media/mojo/mojom/speech_recognition_service.mojom
index 270aecb9049..f85ef0e4b3f 100644
--- a/chromium/media/mojo/mojom/speech_recognition_service.mojom
+++ b/chromium/media/mojo/mojom/speech_recognition_service.mojom
@@ -78,3 +78,21 @@ struct SpeechRecognitionResult {
// final result.
bool is_final;
};
+
+// The interface used to notify the speech recognition client of events
+// triggered by the browser. The remote lives in the browser process and the
+// receiver lives in the renderer process.
+interface SpeechRecognitionAvailabilityObserver {
+ // Notify the speech recognition client when speech recognition availability
+ // changes.
+ SpeechRecognitionAvailabilityChanged(bool is_speech_recognition_available);
+};
+
+// This interface between the speech recognition client and the browser.
+// The remote lives in the renderer process and the receiver lives in the
+// browser process.
+interface SpeechRecognitionClientBrowserInterface {
+ // Bind the speech recognition availability observer.
+ BindSpeechRecognitionAvailabilityObserver(
+ pending_remote<SpeechRecognitionAvailabilityObserver> observer);
+};
diff --git a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc
index 393ff92b119..4699902207f 100644
--- a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.cc
@@ -47,7 +47,7 @@ bool StructTraits<media::mojom::VideoDecoderConfigDataView,
if (!input.ReadColorSpaceInfo(&color_space))
return false;
- base::Optional<gl::HDRMetadata> hdr_metadata;
+ base::Optional<gfx::HDRMetadata> hdr_metadata;
if (!input.ReadHdrMetadata(&hdr_metadata))
return false;
diff --git a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h
index 664aa1da182..f89cfcc2f87 100644
--- a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits.h
@@ -11,7 +11,7 @@
#include "media/mojo/mojom/video_color_space_mojom_traits.h"
#include "media/mojo/mojom/video_transformation_mojom_traits.h"
#include "ui/gfx/geometry/mojom/geometry_mojom_traits.h"
-#include "ui/gl/mojom/hdr_metadata_mojom_traits.h"
+#include "ui/gfx/mojom/hdr_metadata_mojom_traits.h"
namespace mojo {
@@ -64,7 +64,7 @@ struct StructTraits<media::mojom::VideoDecoderConfigDataView,
return input.video_transformation();
}
- static const base::Optional<gl::HDRMetadata>& hdr_metadata(
+ static const base::Optional<gfx::HDRMetadata>& hdr_metadata(
const media::VideoDecoderConfig& input) {
return input.hdr_metadata();
}
diff --git a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
index 5c9f0feb0f0..318834f1463 100644
--- a/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_decoder_config_mojom_traits_unittest.cc
@@ -88,7 +88,7 @@ TEST(VideoDecoderConfigStructTraitsTest,
kCodecVP8, VP8PROFILE_ANY, VideoDecoderConfig::AlphaMode::kIsOpaque,
VideoColorSpace(), kNoTransformation, kCodedSize, kVisibleRect,
kNaturalSize, EmptyExtraData(), EncryptionScheme::kUnencrypted);
- gl::HDRMetadata hdr_metadata;
+ gfx::HDRMetadata hdr_metadata;
hdr_metadata.max_frame_average_light_level = 123;
hdr_metadata.max_content_light_level = 456;
hdr_metadata.mastering_metadata.primary_r.set_x(0.1f);
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
index 33c7655e3a0..552167c0490 100644
--- a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits.cc
@@ -6,7 +6,7 @@
#include <utility>
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "build/build_config.h"
#include "mojo/public/cpp/base/time_mojom_traits.h"
diff --git a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
index 66ebc412d70..38f528440b3 100644
--- a/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_metadata_mojom_traits_unittest.cc
@@ -4,7 +4,7 @@
#include "media/mojo/mojom/video_frame_metadata_mojom_traits.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
index 1f50120daf1..4f7bae7a300 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.cc
@@ -7,7 +7,7 @@
#include <utility>
#include <vector>
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "build/build_config.h"
#include "gpu/ipc/common/gpu_memory_buffer_support.h"
@@ -19,7 +19,7 @@
#include "mojo/public/cpp/system/handle.h"
#include "ui/gfx/mojom/buffer_types_mojom_traits.h"
#include "ui/gfx/mojom/color_space_mojom_traits.h"
-#include "ui/gl/mojom/hdr_metadata_mojom_traits.h"
+#include "ui/gfx/mojom/hdr_metadata_mojom_traits.h"
#if defined(OS_LINUX) || defined(OS_CHROMEOS)
#include "base/posix/eintr_wrapper.h"
@@ -286,7 +286,7 @@ bool StructTraits<media::mojom::VideoFrameDataView,
return false;
frame->set_color_space(color_space);
- base::Optional<gl::HDRMetadata> hdr_metadata;
+ base::Optional<gfx::HDRMetadata> hdr_metadata;
if (!input.ReadHdrMetadata(&hdr_metadata))
return false;
frame->set_hdr_metadata(std::move(hdr_metadata));
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits.h b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
index deb78ed4118..3323ef2ee2f 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits.h
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits.h
@@ -62,7 +62,7 @@ struct StructTraits<media::mojom::VideoFrameDataView,
return input->ColorSpace();
}
- static const base::Optional<gl::HDRMetadata>& hdr_metadata(
+ static const base::Optional<gfx::HDRMetadata>& hdr_metadata(
const scoped_refptr<media::VideoFrame>& input) {
return input->hdr_metadata();
}
diff --git a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
index 802e40e2294..3c6a2c3afb6 100644
--- a/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
+++ b/chromium/media/mojo/mojom/video_frame_mojom_traits_unittest.cc
@@ -4,7 +4,7 @@
#include "media/mojo/mojom/video_frame_mojom_traits.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/mojo/services/BUILD.gn b/chromium/media/mojo/services/BUILD.gn
index f14406826ba..afdf5766a53 100644
--- a/chromium/media/mojo/services/BUILD.gn
+++ b/chromium/media/mojo/services/BUILD.gn
@@ -3,6 +3,7 @@
# found in the LICENSE file.
import("//build/config/chromecast_build.gni")
+import("//build/config/chromeos/ui_mode.gni")
import("//media/media_options.gni")
import("//testing/test.gni")
@@ -86,6 +87,7 @@ component("services") {
]
deps = [
+ "//build:chromeos_buildflags",
"//gpu/ipc/service",
"//media",
"//media:shared_memory_support",
@@ -139,7 +141,7 @@ component("services") {
deps += [ "//sandbox" ]
}
}
- if (is_chromeos) {
+ if (is_ash) {
deps +=
[ "//chromeos/components/cdm_factory_daemon:cdm_factory_daemon_gpu" ]
}
@@ -196,7 +198,7 @@ source_set("unit_tests") {
deps += [ "//media/cdm:cdm_api" ]
}
- if (is_chromeos) {
+ if (is_ash) {
deps += [
"//components/chromeos_camera:mjpeg_decode_accelerator_service_unittest",
]
diff --git a/chromium/media/mojo/services/cdm_service.cc b/chromium/media/mojo/services/cdm_service.cc
index 9db466ab699..025dffbded5 100644
--- a/chromium/media/mojo/services/cdm_service.cc
+++ b/chromium/media/mojo/services/cdm_service.cc
@@ -42,7 +42,7 @@ namespace {
// CDMs too early (e.g. during page navigation) which could cause errors
// (session closed) on the client side. See https://crbug.com/821171 for
// details.
-class CdmFactoryImpl : public DeferredDestroy<mojom::CdmFactory> {
+class CdmFactoryImpl final : public DeferredDestroy<mojom::CdmFactory> {
public:
CdmFactoryImpl(CdmService::Client* client,
mojo::PendingRemote<mojom::FrameInterfaceFactory> interfaces)
diff --git a/chromium/media/mojo/services/cdm_service.h b/chromium/media/mojo/services/cdm_service.h
index adc1d3f74ff..a99cf60a81f 100644
--- a/chromium/media/mojo/services/cdm_service.h
+++ b/chromium/media/mojo/services/cdm_service.h
@@ -27,7 +27,7 @@ namespace media {
class CdmFactory;
-class MEDIA_MOJO_EXPORT CdmService : public mojom::CdmService {
+class MEDIA_MOJO_EXPORT CdmService final : public mojom::CdmService {
public:
class Client {
public:
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.cc b/chromium/media/mojo/services/gpu_mojo_media_client.cc
index b1f553ce767..88ecd898dd6 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.cc
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/feature_list.h"
#include "base/memory/ptr_util.h"
+#include "build/chromeos_buildflags.h"
#include "gpu/ipc/service/gpu_channel.h"
#include "media/base/audio_decoder.h"
#include "media/base/cdm_factory.h"
@@ -58,9 +59,9 @@ using media::android_mojo_util::CreateProvisionFetcher;
using media::android_mojo_util::CreateMediaDrmStorage;
#endif // defined(OS_ANDROID)
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
#include "chromeos/components/cdm_factory_daemon/chromeos_cdm_factory.h"
-#endif // defined(OS_CHROMEOS)
+#endif // BUILDFLAG(IS_ASH)
namespace media {
@@ -110,7 +111,7 @@ D3D11VideoDecoder::GetD3D11DeviceCB GetD3D11DeviceCallback() {
// otherwise it returns false.
bool ShouldUseChromeOSDirectVideoDecoder(
const gpu::GpuPreferences& gpu_preferences) {
-#if defined(OS_CHROMEOS)
+#if BUILDFLAG(IS_ASH)
const bool should_use_direct_video_decoder =
!gpu_preferences.platform_disallows_chromeos_direct_video_decoder &&
base::FeatureList::IsEnabled(kUseChromeOSDirectVideoDecoder);
@@ -244,7 +245,7 @@ std::unique_ptr<VideoDecoder> GpuMojoMediaClient::CreateVideoDecoder(
switch (implementation) {
case VideoDecoderImplementation::kDefault: {
#if defined(OS_ANDROID)
- auto get_stub_cb = base::Bind(
+ auto get_stub_cb = base::BindRepeating(
&GetCommandBufferStub, gpu_task_runner_, media_gpu_channel_manager_,
command_buffer_id->channel_token, command_buffer_id->route_id);
std::unique_ptr<SharedImageVideoProvider> image_provider;
@@ -346,7 +347,7 @@ std::unique_ptr<CdmFactory> GpuMojoMediaClient::CreateCdmFactory(
return std::make_unique<AndroidCdmFactory>(
base::BindRepeating(&CreateProvisionFetcher, frame_interfaces),
base::BindRepeating(&CreateMediaDrmStorage, frame_interfaces));
-#elif defined(OS_CHROMEOS)
+#elif BUILDFLAG(IS_ASH)
return std::make_unique<chromeos::ChromeOsCdmFactory>(frame_interfaces);
#else
return nullptr;
diff --git a/chromium/media/mojo/services/gpu_mojo_media_client.h b/chromium/media/mojo/services/gpu_mojo_media_client.h
index e62511df65a..27dbb020c99 100644
--- a/chromium/media/mojo/services/gpu_mojo_media_client.h
+++ b/chromium/media/mojo/services/gpu_mojo_media_client.h
@@ -29,7 +29,7 @@ namespace media {
class MediaGpuChannelManager;
-class GpuMojoMediaClient : public MojoMediaClient {
+class GpuMojoMediaClient final : public MojoMediaClient {
public:
// |media_gpu_channel_manager| must only be used on |gpu_task_runner|, which
// is expected to be the GPU main thread task runner.
diff --git a/chromium/media/mojo/services/interface_factory_impl.cc b/chromium/media/mojo/services/interface_factory_impl.cc
index 637718e5200..bc04bef061e 100644
--- a/chromium/media/mojo/services/interface_factory_impl.cc
+++ b/chromium/media/mojo/services/interface_factory_impl.cc
@@ -24,7 +24,7 @@
#endif // BUILDFLAG(ENABLE_MOJO_VIDEO_DECODER)
#if BUILDFLAG(ENABLE_MOJO_RENDERER) || BUILDFLAG(ENABLE_CAST_RENDERER)
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "media/base/renderer.h"
#include "media/mojo/services/mojo_renderer_service.h"
#endif // BUILDFLAG(ENABLE_MOJO_RENDERER) || BUILDFLAG(ENABLE_CAST_RENDERER)
@@ -108,7 +108,7 @@ void InterfaceFactoryImpl::CreateDefaultRenderer(
// base::Unretained() is safe because the callback will be fired by
// |mojo_renderer_service|, which is owned by |renderer_receivers_|.
- mojo_renderer_service_ptr->set_bad_message_cb(base::Bind(
+ mojo_renderer_service_ptr->set_bad_message_cb(base::BindRepeating(
base::IgnoreResult(&mojo::UniqueReceiverSet<mojom::Renderer>::Remove),
base::Unretained(&renderer_receivers_), receiver_id));
#endif // BUILDFLAG(ENABLE_MOJO_RENDERER)
diff --git a/chromium/media/mojo/services/interface_factory_impl.h b/chromium/media/mojo/services/interface_factory_impl.h
index 5b3a4d07c41..c8341e6a35b 100644
--- a/chromium/media/mojo/services/interface_factory_impl.h
+++ b/chromium/media/mojo/services/interface_factory_impl.h
@@ -34,7 +34,8 @@ namespace media {
class CdmFactory;
class MojoMediaClient;
-class InterfaceFactoryImpl : public DeferredDestroy<mojom::InterfaceFactory> {
+class InterfaceFactoryImpl final
+ : public DeferredDestroy<mojom::InterfaceFactory> {
public:
InterfaceFactoryImpl(
mojo::PendingRemote<mojom::FrameInterfaceFactory> frame_interfaces,
diff --git a/chromium/media/mojo/services/media_metrics_provider_unittest.cc b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
index d68ae39f71d..af54242353c 100644
--- a/chromium/media/mojo/services/media_metrics_provider_unittest.cc
+++ b/chromium/media/mojo/services/media_metrics_provider_unittest.cc
@@ -6,7 +6,7 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/metrics/histogram_tester.h"
#include "base/test/test_message_loop.h"
diff --git a/chromium/media/mojo/services/media_resource_shim.cc b/chromium/media/mojo/services/media_resource_shim.cc
index 452da3f3879..2523f28b52d 100644
--- a/chromium/media/mojo/services/media_resource_shim.cc
+++ b/chromium/media/mojo/services/media_resource_shim.cc
@@ -7,7 +7,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
namespace media {
@@ -21,8 +20,8 @@ MediaResourceShim::MediaResourceShim(
for (auto& s : streams) {
streams_.emplace_back(new MojoDemuxerStreamAdapter(
- std::move(s), base::Bind(&MediaResourceShim::OnStreamReady,
- weak_factory_.GetWeakPtr())));
+ std::move(s), base::BindOnce(&MediaResourceShim::OnStreamReady,
+ weak_factory_.GetWeakPtr())));
}
}
diff --git a/chromium/media/mojo/services/media_service.h b/chromium/media/mojo/services/media_service.h
index 01f72aec1eb..c1b9dddc270 100644
--- a/chromium/media/mojo/services/media_service.h
+++ b/chromium/media/mojo/services/media_service.h
@@ -22,7 +22,7 @@ namespace media {
class MojoMediaClient;
-class MEDIA_MOJO_EXPORT MediaService : public mojom::MediaService {
+class MEDIA_MOJO_EXPORT MediaService final : public mojom::MediaService {
public:
MediaService(std::unique_ptr<MojoMediaClient> mojo_media_client,
mojo::PendingReceiver<mojom::MediaService> receiver);
diff --git a/chromium/media/mojo/services/mojo_audio_decoder_service.cc b/chromium/media/mojo/services/mojo_audio_decoder_service.cc
index 66157cd1431..f73a6f00d2d 100644
--- a/chromium/media/mojo/services/mojo_audio_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_audio_decoder_service.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "media/base/content_decryption_module.h"
#include "media/mojo/common/media_type_converters.h"
diff --git a/chromium/media/mojo/services/mojo_audio_decoder_service.h b/chromium/media/mojo/services/mojo_audio_decoder_service.h
index d827174e87f..3b0b340196a 100644
--- a/chromium/media/mojo/services/mojo_audio_decoder_service.h
+++ b/chromium/media/mojo/services/mojo_audio_decoder_service.h
@@ -24,7 +24,8 @@ namespace media {
class MojoCdmServiceContext;
class MojoDecoderBufferReader;
-class MEDIA_MOJO_EXPORT MojoAudioDecoderService : public mojom::AudioDecoder {
+class MEDIA_MOJO_EXPORT MojoAudioDecoderService final
+ : public mojom::AudioDecoder {
public:
MojoAudioDecoderService(MojoCdmServiceContext* mojo_cdm_service_context,
std::unique_ptr<media::AudioDecoder> decoder);
diff --git a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
index 4bc9f34f0a3..b3bca797469 100644
--- a/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
+++ b/chromium/media/mojo/services/mojo_audio_output_stream_provider_unittest.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/run_loop.h"
#include "base/test/mock_callback.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/mojo/services/mojo_cdm_allocator.cc b/chromium/media/mojo/services/mojo_cdm_allocator.cc
index afdd0a196cc..f0b6d97c91a 100644
--- a/chromium/media/mojo/services/mojo_cdm_allocator.cc
+++ b/chromium/media/mojo/services/mojo_cdm_allocator.cc
@@ -26,7 +26,7 @@ namespace {
// cdm::Buffer implementation that provides access to mojo shared memory.
// It owns the memory until Destroy() is called.
-class MojoCdmBuffer : public cdm::Buffer {
+class MojoCdmBuffer final : public cdm::Buffer {
public:
static MojoCdmBuffer* Create(
mojo::ScopedSharedBufferHandle buffer,
@@ -108,7 +108,7 @@ class MojoCdmBuffer : public cdm::Buffer {
// VideoFrameImpl that is able to create a MojoSharedBufferVideoFrame
// out of the data.
-class MojoCdmVideoFrame : public VideoFrameImpl {
+class MojoCdmVideoFrame final : public VideoFrameImpl {
public:
explicit MojoCdmVideoFrame(MojoSharedBufferVideoFrame::MojoSharedBufferDoneCB
mojo_shared_buffer_done_cb)
diff --git a/chromium/media/mojo/services/mojo_cdm_allocator.h b/chromium/media/mojo/services/mojo_cdm_allocator.h
index 37061ac704b..1a362245b3e 100644
--- a/chromium/media/mojo/services/mojo_cdm_allocator.h
+++ b/chromium/media/mojo/services/mojo_cdm_allocator.h
@@ -21,7 +21,7 @@
namespace media {
// This is a CdmAllocator that creates buffers using mojo shared memory.
-class MEDIA_MOJO_EXPORT MojoCdmAllocator : public CdmAllocator {
+class MEDIA_MOJO_EXPORT MojoCdmAllocator final : public CdmAllocator {
public:
MojoCdmAllocator();
~MojoCdmAllocator() final;
diff --git a/chromium/media/mojo/services/mojo_cdm_promise.h b/chromium/media/mojo/services/mojo_cdm_promise.h
index 38b79780759..b6c0988eb97 100644
--- a/chromium/media/mojo/services/mojo_cdm_promise.h
+++ b/chromium/media/mojo/services/mojo_cdm_promise.h
@@ -19,7 +19,7 @@ namespace media {
// callback can be passed in by value or as const-refs. Find a better solution
// to handle this.
template <typename F, typename... T>
-class MojoCdmPromise : public CdmPromiseTemplate<T...> {
+class MojoCdmPromise final : public CdmPromiseTemplate<T...> {
public:
using CallbackType = base::OnceCallback<F>;
diff --git a/chromium/media/mojo/services/mojo_cdm_service.h b/chromium/media/mojo/services/mojo_cdm_service.h
index 3afadec7671..ec27caa040d 100644
--- a/chromium/media/mojo/services/mojo_cdm_service.h
+++ b/chromium/media/mojo/services/mojo_cdm_service.h
@@ -32,7 +32,8 @@ class CdmFactory;
// A mojom::ContentDecryptionModule implementation backed by a
// media::ContentDecryptionModule.
-class MEDIA_MOJO_EXPORT MojoCdmService : public mojom::ContentDecryptionModule {
+class MEDIA_MOJO_EXPORT MojoCdmService final
+ : public mojom::ContentDecryptionModule {
public:
using CdmServiceCreatedCB =
base::OnceCallback<void(std::unique_ptr<MojoCdmService> mojo_cdm_service,
diff --git a/chromium/media/mojo/services/mojo_decryptor_service.h b/chromium/media/mojo/services/mojo_decryptor_service.h
index d5cf9866b34..5665be4c30b 100644
--- a/chromium/media/mojo/services/mojo_decryptor_service.h
+++ b/chromium/media/mojo/services/mojo_decryptor_service.h
@@ -26,7 +26,7 @@ class MojoDecoderBufferWriter;
// A mojom::Decryptor implementation that proxies decryptor calls to a
// media::Decryptor.
-class MEDIA_MOJO_EXPORT MojoDecryptorService : public mojom::Decryptor {
+class MEDIA_MOJO_EXPORT MojoDecryptorService final : public mojom::Decryptor {
public:
using StreamType = media::Decryptor::StreamType;
using Status = media::Decryptor::Status;
diff --git a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.cc b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.cc
index e958289ee7b..accafab8e28 100644
--- a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.cc
+++ b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.cc
@@ -19,9 +19,9 @@ namespace media {
MojoDemuxerStreamAdapter::MojoDemuxerStreamAdapter(
mojo::PendingRemote<mojom::DemuxerStream> demuxer_stream,
- const base::Closure& stream_ready_cb)
+ base::OnceClosure stream_ready_cb)
: demuxer_stream_(std::move(demuxer_stream)),
- stream_ready_cb_(stream_ready_cb),
+ stream_ready_cb_(std::move(stream_ready_cb)),
type_(UNKNOWN) {
DVLOG(1) << __func__;
demuxer_stream_->Initialize(base::BindOnce(
@@ -81,7 +81,7 @@ void MojoDemuxerStreamAdapter::OnStreamReady(
UpdateConfig(std::move(audio_config), std::move(video_config));
- stream_ready_cb_.Run();
+ std::move(stream_ready_cb_).Run();
}
void MojoDemuxerStreamAdapter::OnBufferReady(
diff --git a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
index 513a7d559ac..1cb22c8ea3f 100644
--- a/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
+++ b/chromium/media/mojo/services/mojo_demuxer_stream_adapter.h
@@ -35,7 +35,7 @@ class MojoDemuxerStreamAdapter : public DemuxerStream {
// NOTE: Illegal to call any methods until |stream_ready_cb| is invoked.
MojoDemuxerStreamAdapter(
mojo::PendingRemote<mojom::DemuxerStream> demuxer_stream,
- const base::Closure& stream_ready_cb);
+ base::OnceClosure stream_ready_cb);
~MojoDemuxerStreamAdapter() override;
// DemuxerStream implementation.
@@ -66,7 +66,7 @@ class MojoDemuxerStreamAdapter : public DemuxerStream {
// See constructor for descriptions.
mojo::Remote<mojom::DemuxerStream> demuxer_stream_;
- base::Closure stream_ready_cb_;
+ base::OnceClosure stream_ready_cb_;
// The last ReadCB received through a call to Read().
// Used to store the results of OnBufferReady() in the event it is called
diff --git a/chromium/media/mojo/services/mojo_media_drm_storage.cc b/chromium/media/mojo/services/mojo_media_drm_storage.cc
index 577cc013f9f..113535f14ff 100644
--- a/chromium/media/mojo/services/mojo_media_drm_storage.cc
+++ b/chromium/media/mojo/services/mojo_media_drm_storage.cc
@@ -8,7 +8,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/optional.h"
#include "base/unguessable_token.h"
#include "media/mojo/mojom/media_drm_storage.mojom.h"
diff --git a/chromium/media/mojo/services/mojo_provision_fetcher.cc b/chromium/media/mojo/services/mojo_provision_fetcher.cc
index 849804f3eff..e4560640223 100644
--- a/chromium/media/mojo/services/mojo_provision_fetcher.cc
+++ b/chromium/media/mojo/services/mojo_provision_fetcher.cc
@@ -17,7 +17,7 @@ MojoProvisionFetcher::MojoProvisionFetcher(
MojoProvisionFetcher::~MojoProvisionFetcher() = default;
// ProvisionFetcher implementation:
-void MojoProvisionFetcher::Retrieve(const std::string& default_url,
+void MojoProvisionFetcher::Retrieve(const GURL& default_url,
const std::string& request_data,
ResponseCB response_cb) {
DVLOG(1) << __func__;
diff --git a/chromium/media/mojo/services/mojo_provision_fetcher.h b/chromium/media/mojo/services/mojo_provision_fetcher.h
index 268dd30e8f4..b1b0cfe8899 100644
--- a/chromium/media/mojo/services/mojo_provision_fetcher.h
+++ b/chromium/media/mojo/services/mojo_provision_fetcher.h
@@ -17,14 +17,14 @@
namespace media {
// A ProvisionFetcher that proxies to a Remote<mojom::ProvisionFetcher>.
-class MEDIA_MOJO_EXPORT MojoProvisionFetcher : public ProvisionFetcher {
+class MEDIA_MOJO_EXPORT MojoProvisionFetcher final : public ProvisionFetcher {
public:
explicit MojoProvisionFetcher(
mojo::PendingRemote<mojom::ProvisionFetcher> provision_fetcher);
~MojoProvisionFetcher() final;
// ProvisionFetcher implementation:
- void Retrieve(const std::string& default_url,
+ void Retrieve(const GURL& default_url,
const std::string& request_data,
ResponseCB response_cb) final;
diff --git a/chromium/media/mojo/services/mojo_renderer_service.cc b/chromium/media/mojo/services/mojo_renderer_service.cc
index 294f3bd898f..4de989317ba 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.cc
+++ b/chromium/media/mojo/services/mojo_renderer_service.cc
@@ -45,7 +45,7 @@ mojo::SelfOwnedReceiverRef<mojom::Renderer> MojoRendererService::Create(
std::move(receiver));
service->set_bad_message_cb(
- base::Bind(&CloseReceiverOnBadMessage, self_owned_receiver));
+ base::BindRepeating(&CloseReceiverOnBadMessage, self_owned_receiver));
return self_owned_receiver;
}
diff --git a/chromium/media/mojo/services/mojo_renderer_service.h b/chromium/media/mojo/services/mojo_renderer_service.h
index 1bc2d12f008..6bff14670e6 100644
--- a/chromium/media/mojo/services/mojo_renderer_service.h
+++ b/chromium/media/mojo/services/mojo_renderer_service.h
@@ -36,8 +36,8 @@ class Renderer;
// A mojom::Renderer implementation that use a media::Renderer to render
// media streams.
-class MEDIA_MOJO_EXPORT MojoRendererService : public mojom::Renderer,
- public RendererClient {
+class MEDIA_MOJO_EXPORT MojoRendererService final : public mojom::Renderer,
+ public RendererClient {
public:
// Helper function to bind MojoRendererService with a SelfOwendReceiver,
// which is safely accessible via the returned SelfOwnedReceiverRef.
diff --git a/chromium/media/mojo/services/mojo_video_decoder_service.cc b/chromium/media/mojo/services/mojo_video_decoder_service.cc
index c0d97292b1a..15a1f7249a5 100644
--- a/chromium/media/mojo/services/mojo_video_decoder_service.cc
+++ b/chromium/media/mojo/services/mojo_video_decoder_service.cc
@@ -7,7 +7,7 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
diff --git a/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc b/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
index ad08c0ad503..1ba08b485ec 100644
--- a/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
+++ b/chromium/media/mojo/services/mojo_video_encode_accelerator_service_unittest.cc
@@ -5,7 +5,7 @@
#include <stddef.h>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/mojo/services/playback_events_recorder.h b/chromium/media/mojo/services/playback_events_recorder.h
index 25b5e7a3c51..39f248e03f7 100644
--- a/chromium/media/mojo/services/playback_events_recorder.h
+++ b/chromium/media/mojo/services/playback_events_recorder.h
@@ -12,7 +12,8 @@
namespace media {
-class MEDIA_MOJO_EXPORT PlaybackEventsRecorder : public mojom::PlaybackEventsRecorder {
+class MEDIA_MOJO_EXPORT PlaybackEventsRecorder final
+ : public mojom::PlaybackEventsRecorder {
public:
static void Create(
mojo::PendingReceiver<mojom::PlaybackEventsRecorder> receiver);
diff --git a/chromium/media/mojo/services/test_mojo_media_client.h b/chromium/media/mojo/services/test_mojo_media_client.h
index c8899c05404..495da4c8536 100644
--- a/chromium/media/mojo/services/test_mojo_media_client.h
+++ b/chromium/media/mojo/services/test_mojo_media_client.h
@@ -22,7 +22,7 @@ class RendererFactory;
class VideoRendererSink;
// Test MojoMediaClient for MediaService.
-class TestMojoMediaClient : public MojoMediaClient {
+class TestMojoMediaClient final : public MojoMediaClient {
public:
TestMojoMediaClient();
~TestMojoMediaClient() final;
diff --git a/chromium/media/mojo/services/video_decode_perf_history_unittest.cc b/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
index d6760e76553..a475f85b295 100644
--- a/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
+++ b/chromium/media/mojo/services/video_decode_perf_history_unittest.cc
@@ -11,7 +11,7 @@
#include "base/strings/string_number_conversions.h"
#include "base/strings/stringprintf.h"
#include "base/task/post_task.h"
-#include "base/test/bind_test_util.h"
+#include "base/test/bind.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "components/ukm/test_ukm_recorder.h"
diff --git a/chromium/media/mojo/services/watch_time_recorder_unittest.cc b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
index 17bcf4f932c..bf1e96c8cbe 100644
--- a/chromium/media/mojo/services/watch_time_recorder_unittest.cc
+++ b/chromium/media/mojo/services/watch_time_recorder_unittest.cc
@@ -10,7 +10,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/hash/hash.h"
#include "base/run_loop.h"
#include "base/strings/string_number_conversions.h"
diff --git a/chromium/media/muxers/DIR_METADATA b/chromium/media/muxers/DIR_METADATA
new file mode 100644
index 00000000000..af1480ef95e
--- /dev/null
+++ b/chromium/media/muxers/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Blink>MediaRecording"
+} \ No newline at end of file
diff --git a/chromium/media/muxers/OWNERS b/chromium/media/muxers/OWNERS
index 4c73cbda665..ded4b7ed050 100644
--- a/chromium/media/muxers/OWNERS
+++ b/chromium/media/muxers/OWNERS
@@ -1,4 +1,2 @@
# Original (legacy) owner.
mcasas@chromium.org
-
-# COMPONENT: Blink>MediaRecording
diff --git a/chromium/media/muxers/webm_muxer.cc b/chromium/media/muxers/webm_muxer.cc
index e0a01a03f93..f4e9be0754a 100644
--- a/chromium/media/muxers/webm_muxer.cc
+++ b/chromium/media/muxers/webm_muxer.cc
@@ -9,6 +9,7 @@
#include "base/bind.h"
#include "base/logging.h"
+#include "base/sequence_checker.h"
#include "media/base/audio_parameters.h"
#include "media/base/limits.h"
#include "media/base/video_frame.h"
@@ -182,16 +183,13 @@ WebmMuxer::WebmMuxer(AudioCodec audio_codec,
info->set_writing_app("Chrome");
info->set_muxing_app("Chrome");
- // Creation is done on a different thread than main activities.
- thread_checker_.DetachFromThread();
+ // Creation can be done on a different sequence than main activities.
+ DETACH_FROM_SEQUENCE(sequence_checker_);
}
WebmMuxer::~WebmMuxer() {
- // No need to segment_.Finalize() since is not Seekable(), i.e. a live
- // stream, but is a good practice.
- DCHECK(thread_checker_.CalledOnValidThread());
- FlushQueues();
- segment_.Finalize();
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ Flush();
}
bool WebmMuxer::OnEncodedVideo(const VideoParameters& params,
@@ -200,7 +198,7 @@ bool WebmMuxer::OnEncodedVideo(const VideoParameters& params,
base::TimeTicks timestamp,
bool is_key_frame) {
DVLOG(1) << __func__ << " - " << encoded_data.size() << "B";
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(params.codec == kCodecVP8 || params.codec == kCodecVP9 ||
params.codec == kCodecH264)
<< " Unsupported video codec: " << GetCodecName(params.codec);
@@ -244,7 +242,7 @@ bool WebmMuxer::OnEncodedAudio(const media::AudioParameters& params,
std::string encoded_data,
base::TimeTicks timestamp) {
DVLOG(2) << __func__ << " - " << encoded_data.size() << "B";
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!audio_track_index_) {
AddAudioTrack(params);
@@ -266,25 +264,33 @@ bool WebmMuxer::OnEncodedAudio(const media::AudioParameters& params,
void WebmMuxer::Pause() {
DVLOG(1) << __func__;
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (!elapsed_time_in_pause_)
elapsed_time_in_pause_.reset(new base::ElapsedTimer());
}
void WebmMuxer::Resume() {
DVLOG(1) << __func__;
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
if (elapsed_time_in_pause_) {
total_time_in_pause_ += elapsed_time_in_pause_->Elapsed();
elapsed_time_in_pause_.reset();
}
}
+bool WebmMuxer::Flush() {
+ // No need to segment_.Finalize() since is not Seekable(), i.e. a live
+ // stream, but is a good practice.
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
+ FlushQueues();
+ return segment_.Finalize();
+}
+
void WebmMuxer::AddVideoTrack(
const gfx::Size& frame_size,
double frame_rate,
const base::Optional<gfx::ColorSpace>& color_space) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(0u, video_track_index_)
<< "WebmMuxer can only be initialized once.";
@@ -329,7 +335,7 @@ void WebmMuxer::AddVideoTrack(
void WebmMuxer::AddAudioTrack(const media::AudioParameters& params) {
DVLOG(1) << __func__ << " " << params.AsHumanReadableString();
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_EQ(0u, audio_track_index_)
<< "WebmMuxer audio can only be initialised once.";
@@ -370,7 +376,7 @@ void WebmMuxer::AddAudioTrack(const media::AudioParameters& params) {
}
mkvmuxer::int32 WebmMuxer::Write(const void* buf, mkvmuxer::uint32 len) {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(buf);
write_data_callback_.Run(
base::StringPiece(reinterpret_cast<const char*>(buf), len));
@@ -399,14 +405,14 @@ void WebmMuxer::ElementStartNotify(mkvmuxer::uint64 element_id,
}
void WebmMuxer::FlushQueues() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
while ((!video_frames_.empty() || !audio_frames_.empty()) &&
FlushNextFrame()) {
}
}
bool WebmMuxer::PartiallyFlushQueues() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
bool result = true;
while (!(has_video_ && video_frames_.empty()) &&
!(has_audio_ && audio_frames_.empty()) && result) {
@@ -416,7 +422,7 @@ bool WebmMuxer::PartiallyFlushQueues() {
}
bool WebmMuxer::FlushNextFrame() {
- DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
base::TimeDelta min_timestamp = base::TimeDelta::Max();
base::circular_deque<EncodedFrame>* queue = &video_frames_;
uint8_t track_index = video_track_index_;
diff --git a/chromium/media/muxers/webm_muxer.h b/chromium/media/muxers/webm_muxer.h
index 59c628f2af7..f22f3c12438 100644
--- a/chromium/media/muxers/webm_muxer.h
+++ b/chromium/media/muxers/webm_muxer.h
@@ -13,8 +13,8 @@
#include "base/containers/circular_deque.h"
#include "base/macros.h"
#include "base/numerics/safe_math.h"
+#include "base/sequence_checker.h"
#include "base/strings/string_piece.h"
-#include "base/threading/thread_checker.h"
#include "base/time/time.h"
#include "base/timer/elapsed_timer.h"
#include "media/base/audio_codecs.h"
@@ -89,6 +89,10 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
void Pause();
void Resume();
+ // Drains and writes out all buffered frames and finalizes the segment.
+ // Returns true on success, false otherwise.
+ bool Flush();
+
void ForceOneLibWebmErrorForTesting() { force_one_libwebm_error_ = true; }
private:
@@ -137,9 +141,6 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
base::TimeTicks timestamp,
base::TimeTicks* last_timestamp);
- // Used to DCHECK that we are called on the correct thread.
- base::ThreadChecker thread_checker_;
-
// Audio codec configured on construction. Video codec is taken from first
// received frame.
const AudioCodec audio_codec_;
@@ -192,6 +193,8 @@ class MEDIA_EXPORT WebmMuxer : public mkvmuxer::IMkvWriter {
// frame appears.
base::circular_deque<EncodedFrame> video_frames_;
+ SEQUENCE_CHECKER(sequence_checker_);
+
DISALLOW_COPY_AND_ASSIGN(WebmMuxer);
};
diff --git a/chromium/media/remoting/DIR_METADATA b/chromium/media/remoting/DIR_METADATA
new file mode 100644
index 00000000000..9e255559a4f
--- /dev/null
+++ b/chromium/media/remoting/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Internals>Cast>Streaming"
+} \ No newline at end of file
diff --git a/chromium/media/remoting/OWNERS b/chromium/media/remoting/OWNERS
index 84ce14cb44d..124883ef14a 100644
--- a/chromium/media/remoting/OWNERS
+++ b/chromium/media/remoting/OWNERS
@@ -1,5 +1,3 @@
erickung@chromium.org
miu@chromium.org
mfoltz@chromium.org
-
-# COMPONENT: Internals>Cast>Streaming
diff --git a/chromium/media/remoting/courier_renderer.cc b/chromium/media/remoting/courier_renderer.cc
index d7ec647a809..c7d32b9cd5a 100644
--- a/chromium/media/remoting/courier_renderer.cc
+++ b/chromium/media/remoting/courier_renderer.cc
@@ -9,7 +9,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/memory/ptr_util.h"
#include "base/numerics/safe_math.h"
diff --git a/chromium/media/remoting/courier_renderer.h b/chromium/media/remoting/courier_renderer.h
index d7befc5fb33..5a2a9ae00a6 100644
--- a/chromium/media/remoting/courier_renderer.h
+++ b/chromium/media/remoting/courier_renderer.h
@@ -39,7 +39,7 @@ class RendererController;
// A media::Renderer implementation that proxies all operations to a remote
// renderer via RPCs. The CourierRenderer is instantiated by
// AdaptiveRendererFactory when media remoting is meant to take place.
-class CourierRenderer : public Renderer {
+class CourierRenderer final : public Renderer {
public:
// The whole class except for constructor and GetMediaTime() runs on
// |media_task_runner|. The constructor and GetMediaTime() run on render main
diff --git a/chromium/media/remoting/end2end_test_renderer.cc b/chromium/media/remoting/end2end_test_renderer.cc
index 932e0ab2fdc..9d75b130868 100644
--- a/chromium/media/remoting/end2end_test_renderer.cc
+++ b/chromium/media/remoting/end2end_test_renderer.cc
@@ -5,8 +5,8 @@
#include "media/remoting/end2end_test_renderer.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/check.h"
#include "base/notreached.h"
#include "base/threading/thread_task_runner_handle.h"
diff --git a/chromium/media/remoting/fake_remoter.cc b/chromium/media/remoting/fake_remoter.cc
index 109945eefe3..096b0bf6945 100644
--- a/chromium/media/remoting/fake_remoter.cc
+++ b/chromium/media/remoting/fake_remoter.cc
@@ -7,8 +7,8 @@
#include <memory>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
+#include "base/callback_helpers.h"
#include "base/threading/thread_task_runner_handle.h"
#include "build/buildflag.h"
#include "media/media_buildflags.h"
diff --git a/chromium/media/renderers/audio_renderer_impl.cc b/chromium/media/renderers/audio_renderer_impl.cc
index cef105e2bd8..bf96c3f98d8 100644
--- a/chromium/media/renderers/audio_renderer_impl.cc
+++ b/chromium/media/renderers/audio_renderer_impl.cc
@@ -389,9 +389,9 @@ void AudioRendererImpl::Initialize(DemuxerStream* stream,
#if !defined(OS_ANDROID)
if (speech_recognition_client_) {
- speech_recognition_client_->SetOnReadyCallback(
+ speech_recognition_client_->SetOnReadyCallback(BindToCurrentLoop(
base::BindOnce(&AudioRendererImpl::EnableSpeechRecognition,
- weak_factory_.GetWeakPtr()));
+ weak_factory_.GetWeakPtr())));
}
#endif
}
@@ -959,7 +959,7 @@ bool AudioRendererImpl::HandleDecodedBuffer_Locked(
first_packet_timestamp_ = buffer->timestamp();
#if !defined(OS_ANDROID)
- if (transcribe_audio_callback_)
+ if (transcribe_audio_callback_ && volume_ > 0)
transcribe_audio_callback_.Run(buffer);
#endif
diff --git a/chromium/media/renderers/decrypting_renderer.cc b/chromium/media/renderers/decrypting_renderer.cc
index b231ec7a40a..eb0ca87f077 100644
--- a/chromium/media/renderers/decrypting_renderer.cc
+++ b/chromium/media/renderers/decrypting_renderer.cc
@@ -5,7 +5,7 @@
#include "media/renderers/decrypting_renderer.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "media/base/cdm_context.h"
#include "media/base/demuxer_stream.h"
#include "media/base/media_log.h"
diff --git a/chromium/media/renderers/decrypting_renderer_factory.h b/chromium/media/renderers/decrypting_renderer_factory.h
index 8c30f5c856e..6b0e12cd578 100644
--- a/chromium/media/renderers/decrypting_renderer_factory.h
+++ b/chromium/media/renderers/decrypting_renderer_factory.h
@@ -21,7 +21,7 @@ class MediaLog;
//
// The caller must guarantee that the returned DecryptingRenderer will never
// be initialized with a |media_resource| of type MediaResource::Type::URL.
-class MEDIA_EXPORT DecryptingRendererFactory : public RendererFactory {
+class MEDIA_EXPORT DecryptingRendererFactory final : public RendererFactory {
public:
DecryptingRendererFactory(
MediaLog* media_log,
diff --git a/chromium/media/renderers/decrypting_renderer_unittest.cc b/chromium/media/renderers/decrypting_renderer_unittest.cc
index 3e6f88d7a85..c773d534b99 100644
--- a/chromium/media/renderers/decrypting_renderer_unittest.cc
+++ b/chromium/media/renderers/decrypting_renderer_unittest.cc
@@ -5,7 +5,7 @@
#include "media/renderers/decrypting_renderer.h"
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/single_thread_task_runner.h"
#include "base/test/gmock_callback_support.h"
#include "base/test/mock_callback.h"
diff --git a/chromium/media/renderers/default_decoder_factory.cc b/chromium/media/renderers/default_decoder_factory.cc
index 452d6ad33ab..6e96a691b34 100644
--- a/chromium/media/renderers/default_decoder_factory.cc
+++ b/chromium/media/renderers/default_decoder_factory.cc
@@ -8,7 +8,7 @@
#include "base/command_line.h"
#include "base/feature_list.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "build/build_config.h"
#include "build/buildflag.h"
#include "components/viz/common/gpu/raster_context_provider.h"
@@ -30,10 +30,6 @@
#include "media/filters/fuchsia/fuchsia_video_decoder.h"
#endif
-#if BUILDFLAG(ENABLE_LIBAOM)
-#include "media/filters/aom_video_decoder.h"
-#endif
-
#if BUILDFLAG(ENABLE_DAV1D_DECODER)
#include "media/filters/dav1d_video_decoder.h"
#endif
@@ -63,7 +59,7 @@ DefaultDecoderFactory::DefaultDecoderFactory(
DefaultDecoderFactory::~DefaultDecoderFactory() = default;
void DefaultDecoderFactory::CreateAudioDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) {
base::AutoLock auto_lock(shutdown_lock_);
@@ -91,7 +87,7 @@ void DefaultDecoderFactory::CreateAudioDecoders(
}
void DefaultDecoderFactory::CreateVideoDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
RequestOverlayInfoCB request_overlay_info_cb,
@@ -133,9 +129,7 @@ void DefaultDecoderFactory::CreateVideoDecoders(
//
// TODO(crbug.com/580386): Handle context loss properly.
if (context_provider) {
- video_decoders->push_back(
- CreateFuchsiaVideoDecoder(gpu_factories->SharedImageInterface(),
- context_provider->ContextSupport()));
+ video_decoders->push_back(CreateFuchsiaVideoDecoder(context_provider));
} else {
DLOG(ERROR)
<< "Can't create FuchsiaVideoDecoder due to GPU context loss.";
@@ -163,8 +157,6 @@ void DefaultDecoderFactory::CreateVideoDecoders(
#if BUILDFLAG(ENABLE_DAV1D_DECODER)
video_decoders->push_back(
std::make_unique<OffloadingDav1dVideoDecoder>(media_log));
-#elif BUILDFLAG(ENABLE_LIBAOM)
- video_decoders->push_back(std::make_unique<AomVideoDecoder>(media_log));
#endif
}
diff --git a/chromium/media/renderers/default_decoder_factory.h b/chromium/media/renderers/default_decoder_factory.h
index e2a0575d50e..966a81dbfbc 100644
--- a/chromium/media/renderers/default_decoder_factory.h
+++ b/chromium/media/renderers/default_decoder_factory.h
@@ -13,7 +13,7 @@
namespace media {
-class MEDIA_EXPORT DefaultDecoderFactory : public DecoderFactory {
+class MEDIA_EXPORT DefaultDecoderFactory final : public DecoderFactory {
public:
// |external_decoder_factory| is optional decoder factory that provides
// additional decoders.
@@ -22,12 +22,12 @@ class MEDIA_EXPORT DefaultDecoderFactory : public DecoderFactory {
~DefaultDecoderFactory() final;
void CreateAudioDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
MediaLog* media_log,
std::vector<std::unique_ptr<AudioDecoder>>* audio_decoders) final;
void CreateVideoDecoders(
- scoped_refptr<base::SingleThreadTaskRunner> task_runner,
+ scoped_refptr<base::SequencedTaskRunner> task_runner,
GpuVideoAcceleratorFactories* gpu_factories,
MediaLog* media_log,
RequestOverlayInfoCB request_overlay_info_cb,
diff --git a/chromium/media/renderers/default_renderer_factory.h b/chromium/media/renderers/default_renderer_factory.h
index 455ce1bb782..b6486df45d3 100644
--- a/chromium/media/renderers/default_renderer_factory.h
+++ b/chromium/media/renderers/default_renderer_factory.h
@@ -35,7 +35,7 @@ using CreateVideoDecodersCB =
base::RepeatingCallback<std::vector<std::unique_ptr<VideoDecoder>>()>;
// The default factory class for creating RendererImpl.
-class MEDIA_EXPORT DefaultRendererFactory : public RendererFactory {
+class MEDIA_EXPORT DefaultRendererFactory final : public RendererFactory {
public:
using GetGpuFactoriesCB =
base::RepeatingCallback<GpuVideoAcceleratorFactories*()>;
diff --git a/chromium/media/renderers/paint_canvas_video_renderer.cc b/chromium/media/renderers/paint_canvas_video_renderer.cc
index fe038908108..21133386d89 100644
--- a/chromium/media/renderers/paint_canvas_video_renderer.cc
+++ b/chromium/media/renderers/paint_canvas_video_renderer.cc
@@ -54,7 +54,7 @@
#define LIBYUV_I422_TO_ARGB libyuv::I422ToARGB
#define LIBYUV_I444_TO_ARGB libyuv::I444ToARGB
-#define LIBYUV_I420ALPHA_TO_ARGB libyuv::I420AlphaToARGB
+#define LIBYUV_I420ALPHA_TO_ARGB_MATRIX libyuv::I420AlphaToARGBMatrix
#define LIBYUV_J400_TO_ARGB libyuv::J400ToARGB
#define LIBYUV_J420_TO_ARGB libyuv::J420ToARGB
@@ -93,7 +93,7 @@
#define LIBYUV_I422_TO_ARGB libyuv::I422ToABGR
#define LIBYUV_I444_TO_ARGB libyuv::I444ToABGR
-#define LIBYUV_I420ALPHA_TO_ARGB libyuv::I420AlphaToABGR
+#define LIBYUV_I420ALPHA_TO_ARGB_MATRIX libyuv::I420AlphaToABGRMatrix
#define LIBYUV_J400_TO_ARGB libyuv::J400ToARGB
#define LIBYUV_J420_TO_ARGB libyuv::J420ToABGR
@@ -470,15 +470,58 @@ void ConvertVideoFrameToRGBPixelsTask(const VideoFrame* video_frame,
break;
case PIXEL_FORMAT_I420A:
- LIBYUV_I420ALPHA_TO_ARGB(plane_meta[VideoFrame::kYPlane].data,
- plane_meta[VideoFrame::kYPlane].stride,
- plane_meta[VideoFrame::kUPlane].data,
- plane_meta[VideoFrame::kUPlane].stride,
- plane_meta[VideoFrame::kVPlane].data,
- plane_meta[VideoFrame::kVPlane].stride,
- plane_meta[VideoFrame::kAPlane].data,
- plane_meta[VideoFrame::kAPlane].stride, pixels,
- row_bytes, width, rows, premultiply_alpha);
+ switch (color_space) {
+ case kJPEG_SkYUVColorSpace:
+ LIBYUV_I420ALPHA_TO_ARGB_MATRIX(
+ plane_meta[VideoFrame::kYPlane].data,
+ plane_meta[VideoFrame::kYPlane].stride,
+ plane_meta[VideoFrame::kUPlane].data,
+ plane_meta[VideoFrame::kUPlane].stride,
+ plane_meta[VideoFrame::kVPlane].data,
+ plane_meta[VideoFrame::kVPlane].stride,
+ plane_meta[VideoFrame::kAPlane].data,
+ plane_meta[VideoFrame::kAPlane].stride, pixels, row_bytes,
+ &libyuv::kYuvJPEGConstants, width, rows, premultiply_alpha);
+ break;
+ case kRec709_SkYUVColorSpace:
+ LIBYUV_I420ALPHA_TO_ARGB_MATRIX(
+ plane_meta[VideoFrame::kYPlane].data,
+ plane_meta[VideoFrame::kYPlane].stride,
+ plane_meta[VideoFrame::kUPlane].data,
+ plane_meta[VideoFrame::kUPlane].stride,
+ plane_meta[VideoFrame::kVPlane].data,
+ plane_meta[VideoFrame::kVPlane].stride,
+ plane_meta[VideoFrame::kAPlane].data,
+ plane_meta[VideoFrame::kAPlane].stride, pixels, row_bytes,
+ &libyuv::kYuvH709Constants, width, rows, premultiply_alpha);
+ break;
+ case kRec601_SkYUVColorSpace:
+ LIBYUV_I420ALPHA_TO_ARGB_MATRIX(
+ plane_meta[VideoFrame::kYPlane].data,
+ plane_meta[VideoFrame::kYPlane].stride,
+ plane_meta[VideoFrame::kUPlane].data,
+ plane_meta[VideoFrame::kUPlane].stride,
+ plane_meta[VideoFrame::kVPlane].data,
+ plane_meta[VideoFrame::kVPlane].stride,
+ plane_meta[VideoFrame::kAPlane].data,
+ plane_meta[VideoFrame::kAPlane].stride, pixels, row_bytes,
+ &libyuv::kYuvI601Constants, width, rows, premultiply_alpha);
+ break;
+ case kBT2020_SkYUVColorSpace:
+ LIBYUV_I420ALPHA_TO_ARGB_MATRIX(
+ plane_meta[VideoFrame::kYPlane].data,
+ plane_meta[VideoFrame::kYPlane].stride,
+ plane_meta[VideoFrame::kUPlane].data,
+ plane_meta[VideoFrame::kUPlane].stride,
+ plane_meta[VideoFrame::kVPlane].data,
+ plane_meta[VideoFrame::kVPlane].stride,
+ plane_meta[VideoFrame::kAPlane].data,
+ plane_meta[VideoFrame::kAPlane].stride, pixels, row_bytes,
+ &libyuv::kYuv2020Constants, width, rows, premultiply_alpha);
+ break;
+ default:
+ NOTREACHED();
+ }
break;
case PIXEL_FORMAT_I444:
@@ -1256,7 +1299,7 @@ bool PaintCanvasVideoRenderer::CopyVideoFrameTexturesToGLTexture(
video_frame->metadata()->read_lock_fences_enabled) {
if (!raster_context_provider)
return false;
- GrContext* gr_context = raster_context_provider->GrContext();
+ GrDirectContext* gr_context = raster_context_provider->GrContext();
if (!gr_context)
return false;
// TODO(crbug.com/1108154): Expand this uploading path to macOS, linux
diff --git a/chromium/media/renderers/renderer_impl.cc b/chromium/media/renderers/renderer_impl.cc
index 118db2d8234..d69595d7718 100644
--- a/chromium/media/renderers/renderer_impl.cc
+++ b/chromium/media/renderers/renderer_impl.cc
@@ -7,7 +7,6 @@
#include <utility>
#include "base/bind.h"
-#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/callback_helpers.h"
#include "base/command_line.h"
diff --git a/chromium/media/renderers/renderer_impl.h b/chromium/media/renderers/renderer_impl.h
index ae226a7d2da..c33d290bfc9 100644
--- a/chromium/media/renderers/renderer_impl.h
+++ b/chromium/media/renderers/renderer_impl.h
@@ -41,7 +41,7 @@ class TimeSource;
class VideoRenderer;
class WallClockTimeSource;
-class MEDIA_EXPORT RendererImpl : public Renderer {
+class MEDIA_EXPORT RendererImpl final : public Renderer {
public:
// Renders audio/video streams using |audio_renderer| and |video_renderer|
// provided. All methods except for GetMediaTime() run on the |task_runner|.
diff --git a/chromium/media/renderers/renderer_impl_unittest.cc b/chromium/media/renderers/renderer_impl_unittest.cc
index 169394e8fab..4ac4867400a 100644
--- a/chromium/media/renderers/renderer_impl_unittest.cc
+++ b/chromium/media/renderers/renderer_impl_unittest.cc
@@ -8,7 +8,7 @@
#include <vector>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/macros.h"
#include "base/optional.h"
#include "base/run_loop.h"
diff --git a/chromium/media/renderers/video_frame_yuv_converter.cc b/chromium/media/renderers/video_frame_yuv_converter.cc
index 261552c88bb..f9197b6ed73 100644
--- a/chromium/media/renderers/video_frame_yuv_converter.cc
+++ b/chromium/media/renderers/video_frame_yuv_converter.cc
@@ -16,8 +16,9 @@
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkRefCnt.h"
#include "third_party/skia/include/core/SkSurface.h"
-#include "third_party/skia/include/core/SkYUVAIndex.h"
+#include "third_party/skia/include/core/SkYUVAInfo.h"
#include "third_party/skia/include/gpu/GrDirectContext.h"
+#include "third_party/skia/include/gpu/GrYUVABackendTextures.h"
#include "third_party/skia/include/gpu/gl/GrGLTypes.h"
namespace media {
@@ -39,21 +40,25 @@ sk_sp<SkImage> YUVGrBackendTexturesToSkImage(
VideoPixelFormat video_format,
GrBackendTexture* yuv_textures,
const GrBackendTexture& result_texture) {
- SkYUVColorSpace color_space = ColorSpaceToSkYUVColorSpace(video_color_space);
-
+ SkYUVAInfo::PlanarConfig planar_config;
switch (video_format) {
case PIXEL_FORMAT_NV12:
- return SkImage::MakeFromNV12TexturesCopyWithExternalBackend(
- gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin,
- result_texture);
+ planar_config = SkYUVAInfo::PlanarConfig::kY_UV_420;
+ break;
case PIXEL_FORMAT_I420:
- return SkImage::MakeFromYUVTexturesCopyWithExternalBackend(
- gr_context, color_space, yuv_textures, kTopLeft_GrSurfaceOrigin,
- result_texture);
+ planar_config = SkYUVAInfo::PlanarConfig::kY_U_V_420;
+ break;
default:
NOTREACHED();
return nullptr;
}
+ SkYUVColorSpace color_space = ColorSpaceToSkYUVColorSpace(video_color_space);
+ SkYUVAInfo yuva_info(result_texture.dimensions(), planar_config, color_space);
+ GrYUVABackendTextures yuva_backend_textures(yuva_info, yuv_textures,
+ kTopLeft_GrSurfaceOrigin);
+ return SkImage::MakeFromYUVATexturesCopyToExternal(
+ gr_context, yuva_backend_textures, result_texture,
+ kRGBA_8888_SkColorType);
}
gfx::Size GetVideoYSize(const VideoFrame* video_frame) {
@@ -66,20 +71,6 @@ gfx::Size GetVideoUVSize(const VideoFrame* video_frame) {
return gfx::Size((y_size.width() + 1) / 2, (y_size.height() + 1) / 2);
}
-// Some YUVA factories infer the YUVAIndices. This helper identifies the channel
-// to use for single channel textures.
-SkColorChannel GetSingleChannel(const GrBackendTexture& tex) {
- switch (tex.getBackendFormat().channelMask()) {
- case kGray_SkColorChannelFlag: // Gray can be read as any of kR, kG, kB.
- case kRed_SkColorChannelFlag:
- return SkColorChannel::kR;
- case kAlpha_SkColorChannelFlag:
- return SkColorChannel::kA;
- default: // multiple channels in the texture. Guess kR.
- return SkColorChannel::kR;
- }
-}
-
SkColorType GetCompatibleSurfaceColorType(GrGLenum format) {
switch (format) {
case GL_RGBA8:
@@ -124,37 +115,25 @@ bool YUVGrBackendTexturesToSkSurface(GrDirectContext* gr_context,
sk_sp<SkSurface> surface,
bool flip_y,
bool use_visible_rect) {
- SkYUVAIndex indices[4];
-
+ SkYUVAInfo::PlanarConfig planar_config;
switch (video_frame->format()) {
case PIXEL_FORMAT_NV12:
- indices[SkYUVAIndex::kY_Index] = {
- 0, GetSingleChannel(yuv_textures[0])}; // the first backend texture
- indices[SkYUVAIndex::kU_Index] = {
- 1, SkColorChannel::kR}; // the second backend texture
- indices[SkYUVAIndex::kV_Index] = {1, SkColorChannel::kG};
- indices[SkYUVAIndex::kA_Index] = {-1,
- SkColorChannel::kA}; // no alpha plane
+ planar_config = SkYUVAInfo::PlanarConfig::kY_UV_420;
break;
case PIXEL_FORMAT_I420:
- indices[SkYUVAIndex::kY_Index] = {
- 0, GetSingleChannel(yuv_textures[0])}; // the first backend texture
- indices[SkYUVAIndex::kU_Index] = {
- 1, GetSingleChannel(yuv_textures[1])}; // the second backend texture
- indices[SkYUVAIndex::kV_Index] = {2, GetSingleChannel(yuv_textures[2])};
- indices[SkYUVAIndex::kA_Index] = {-1,
- SkColorChannel::kA}; // no alpha plane
+ planar_config = SkYUVAInfo::PlanarConfig::kY_U_V_420;
break;
default:
NOTREACHED();
return false;
}
-
- auto image = SkImage::MakeFromYUVATextures(
- gr_context, ColorSpaceToSkYUVColorSpace(video_frame->ColorSpace()),
- yuv_textures, indices,
+ SkYUVAInfo yuva_info(
{video_frame->coded_size().width(), video_frame->coded_size().height()},
- kTopLeft_GrSurfaceOrigin, SkColorSpace::MakeSRGB());
+ planar_config, ColorSpaceToSkYUVColorSpace(video_frame->ColorSpace()));
+ auto image = SkImage::MakeFromYUVATextures(
+ gr_context,
+ GrYUVABackendTextures(yuva_info, yuv_textures, kTopLeft_GrSurfaceOrigin),
+ SkColorSpace::MakeSRGB());
if (!image) {
return false;
diff --git a/chromium/media/renderers/video_resource_updater.cc b/chromium/media/renderers/video_resource_updater.cc
index da01bca3e48..2197db9298f 100644
--- a/chromium/media/renderers/video_resource_updater.cc
+++ b/chromium/media/renderers/video_resource_updater.cc
@@ -123,10 +123,15 @@ VideoFrameResourceType ExternalResourceTypeForHardwarePlanes(
return VideoFrameResourceType::YUV;
case PIXEL_FORMAT_P016LE:
- DCHECK_EQ(num_textures, 1);
+ if (num_textures == 1) {
+ // Single-texture multi-planar frames can be sampled as RGB.
+ buffer_formats[0] = gfx::BufferFormat::P010;
+ return VideoFrameResourceType::RGB;
+ }
// TODO(mcasas): Support other formats such as e.g. P012.
- buffer_formats[0] = gfx::BufferFormat::P010;
- return VideoFrameResourceType::RGB;
+ buffer_formats[0] = gfx::BufferFormat::R_16;
+ buffer_formats[1] = gfx::BufferFormat::RG_88;
+ return VideoFrameResourceType::YUV;
case PIXEL_FORMAT_UYVY:
NOTREACHED();
@@ -528,23 +533,24 @@ void VideoResourceUpdater::ReleaseFrameResources() {
frame_resources_.clear();
}
-void VideoResourceUpdater::AppendQuads(viz::CompositorRenderPass* render_pass,
- scoped_refptr<VideoFrame> frame,
- gfx::Transform transform,
- gfx::Rect quad_rect,
- gfx::Rect visible_quad_rect,
- const gfx::RRectF& rounded_corner_bounds,
- gfx::Rect clip_rect,
- bool is_clipped,
- bool contents_opaque,
- float draw_opacity,
- int sorting_context_id) {
+void VideoResourceUpdater::AppendQuads(
+ viz::CompositorRenderPass* render_pass,
+ scoped_refptr<VideoFrame> frame,
+ gfx::Transform transform,
+ gfx::Rect quad_rect,
+ gfx::Rect visible_quad_rect,
+ const gfx::MaskFilterInfo& mask_filter_info,
+ gfx::Rect clip_rect,
+ bool is_clipped,
+ bool contents_opaque,
+ float draw_opacity,
+ int sorting_context_id) {
DCHECK(frame.get());
viz::SharedQuadState* shared_quad_state =
render_pass->CreateAndAppendSharedQuadState();
shared_quad_state->SetAll(transform, quad_rect, visible_quad_rect,
- rounded_corner_bounds, clip_rect, is_clipped,
+ mask_filter_info, clip_rect, is_clipped,
contents_opaque, draw_opacity,
SkBlendMode::kSrcOver, sorting_context_id);
@@ -582,6 +588,7 @@ void VideoResourceUpdater::AppendQuads(viz::CompositorRenderPass* render_pass,
VideoFrame::NumPlanes(frame->format()));
if (frame->HasTextures()) {
DCHECK(frame->format() == PIXEL_FORMAT_NV12 ||
+ frame->format() == PIXEL_FORMAT_P016LE ||
frame->format() == PIXEL_FORMAT_I420);
}
diff --git a/chromium/media/renderers/video_resource_updater.h b/chromium/media/renderers/video_resource_updater.h
index ec732002ea8..ab49f3c2fa5 100644
--- a/chromium/media/renderers/video_resource_updater.h
+++ b/chromium/media/renderers/video_resource_updater.h
@@ -29,7 +29,6 @@
namespace gfx {
class Rect;
-class RRectF;
class Transform;
} // namespace gfx
@@ -45,6 +44,10 @@ class CompositorRenderPass;
class SharedBitmapReporter;
} // namespace viz
+namespace gfx {
+class MaskFilterInfo;
+}
+
namespace media {
class PaintCanvasVideoRenderer;
class VideoFrame;
@@ -120,7 +123,7 @@ class MEDIA_EXPORT VideoResourceUpdater
gfx::Transform transform,
gfx::Rect quad_rect,
gfx::Rect visible_quad_rect,
- const gfx::RRectF& rounded_corner_bounds,
+ const gfx::MaskFilterInfo& mask_filter_info,
gfx::Rect clip_rect,
bool is_clipped,
bool context_opaque,
diff --git a/chromium/media/renderers/win/media_foundation_renderer.cc b/chromium/media/renderers/win/media_foundation_renderer.cc
index cebe1e01cca..97d50caf4dc 100644
--- a/chromium/media/renderers/win/media_foundation_renderer.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer.cc
@@ -9,7 +9,6 @@
#include <memory>
#include <string>
-#include "base/bind_helpers.h"
#include "base/callback_helpers.h"
#include "base/guid.h"
#include "base/strings/string16.h"
diff --git a/chromium/media/renderers/win/media_foundation_renderer_unittest.cc b/chromium/media/renderers/win/media_foundation_renderer_unittest.cc
index 2bdcab9c43e..9f74d065f30 100644
--- a/chromium/media/renderers/win/media_foundation_renderer_unittest.cc
+++ b/chromium/media/renderers/win/media_foundation_renderer_unittest.cc
@@ -7,7 +7,7 @@
#include <windows.media.protection.h>
#include "base/bind.h"
-#include "base/bind_helpers.h"
+#include "base/callback_helpers.h"
#include "base/single_thread_task_runner.h"
#include "base/test/mock_callback.h"
#include "base/test/task_environment.h"
diff --git a/chromium/media/video/BUILD.gn b/chromium/media/video/BUILD.gn
index f7f80f3b731..744cc45c4c3 100644
--- a/chromium/media/video/BUILD.gn
+++ b/chromium/media/video/BUILD.gn
@@ -154,3 +154,13 @@ fuzzer_test("media_h264_parser_fuzzer") {
"//ui/gfx/geometry",
]
}
+
+if (proprietary_codecs && enable_platform_hevc) {
+ fuzzer_test("media_h265_parser_fuzzer") {
+ sources = [ "h265_parser_fuzzertest.cc" ]
+ deps = [
+ "//base",
+ "//media",
+ ]
+ }
+}
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
index cff66515d78..bf5fe2a0d43 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -24,6 +24,7 @@
#include "base/macros.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/stringprintf.h"
+#include "base/sys_byteorder.h"
#include "base/time/default_tick_clock.h"
#include "base/trace_event/memory_dump_manager.h"
#include "base/trace_event/memory_dump_provider.h"
@@ -239,6 +240,9 @@ gfx::BufferFormat GpuMemoryBufferFormat(
case GpuVideoAcceleratorFactories::OutputFormat::I420:
DCHECK_LE(plane, 2u);
return gfx::BufferFormat::R_8;
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
+ DCHECK_LE(plane, 1u);
+ return gfx::BufferFormat::P010;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
DCHECK_LE(plane, 1u);
return gfx::BufferFormat::YUV_420_BIPLANAR;
@@ -273,6 +277,7 @@ size_t PlanesPerCopy(GpuVideoAcceleratorFactories::OutputFormat format) {
return 1;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
return 2;
case GpuVideoAcceleratorFactories::OutputFormat::XR30:
case GpuVideoAcceleratorFactories::OutputFormat::XB30:
@@ -292,6 +297,8 @@ VideoPixelFormat VideoFormat(
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
return PIXEL_FORMAT_NV12;
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
+ return PIXEL_FORMAT_P016LE;
case GpuVideoAcceleratorFactories::OutputFormat::BGRA:
return PIXEL_FORMAT_ARGB;
case GpuVideoAcceleratorFactories::OutputFormat::RGBA:
@@ -312,6 +319,8 @@ size_t NumGpuMemoryBuffers(GpuVideoAcceleratorFactories::OutputFormat format) {
switch (format) {
case GpuVideoAcceleratorFactories::OutputFormat::I420:
return 3;
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
+ return 1;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
return 1;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
@@ -375,6 +384,56 @@ void CopyRowsToI420Buffer(int first_row,
}
}
+void CopyRowsToP010Buffer(int first_row,
+ int rows,
+ int bytes_per_row,
+ const VideoFrame* source_frame,
+ uint8_t* dest_y,
+ int dest_stride_y,
+ uint8_t* dest_uv,
+ int dest_stride_uv,
+ base::OnceClosure done) {
+ base::ScopedClosureRunner done_runner(std::move(done));
+ TRACE_EVENT2("media", "CopyRowsToP010Buffer", "bytes_per_row", bytes_per_row,
+ "rows", rows);
+
+ if (!dest_y || !dest_uv)
+ return;
+
+ DCHECK_NE(dest_stride_y, 0);
+ DCHECK_NE(dest_stride_uv, 0);
+ DCHECK_LE(bytes_per_row, std::abs(dest_stride_y));
+ DCHECK_LE(bytes_per_row, std::abs(dest_stride_uv));
+ DCHECK_EQ(0, first_row % 2);
+ DCHECK_EQ(source_frame->format(), PIXEL_FORMAT_YUV420P10);
+ DCHECK_LE(bytes_per_row, source_frame->stride(VideoFrame::kYPlane));
+
+ // TODO(crbug.com/libyuv/873): Replace this with a libyuv optimized path or at
+ // least add a SIMD variant.
+ for (int r = first_row; r < first_row + rows; ++r) {
+ const uint16_t* src = reinterpret_cast<const uint16_t*>(
+ source_frame->visible_data(VideoFrame::kYPlane) +
+ r * source_frame->stride(VideoFrame::kYPlane));
+ uint16_t* dest = reinterpret_cast<uint16_t*>(dest_y + r * dest_stride_y);
+ for (int c = 0; c < bytes_per_row / 2; ++c)
+ *dest++ = *src++ << 6;
+ }
+
+ for (int r = first_row / 2; r < (first_row + rows) / 2; ++r) {
+ const uint16_t* u_src = reinterpret_cast<const uint16_t*>(
+ source_frame->visible_data(VideoFrame::kUPlane) +
+ r * source_frame->stride(VideoFrame::kUPlane));
+ const uint16_t* v_src = reinterpret_cast<const uint16_t*>(
+ source_frame->visible_data(VideoFrame::kVPlane) +
+ r * source_frame->stride(VideoFrame::kVPlane));
+ uint16_t* dest = reinterpret_cast<uint16_t*>(dest_uv + r * dest_stride_uv);
+ for (int c = 0; c < bytes_per_row / 4; ++c) {
+ *dest++ = *u_src++ << 6;
+ *dest++ = *v_src++ << 6;
+ }
+ }
+}
+
void CopyRowsToNV12Buffer(int first_row,
int rows,
int bytes_per_row,
@@ -530,6 +589,7 @@ gfx::Size CodedSize(const VideoFrame* video_frame,
gfx::Size output;
switch (output_format) {
case GpuVideoAcceleratorFactories::OutputFormat::I420:
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_DUAL_GMB:
DCHECK((video_frame->visible_rect().y() & 1) == 0);
@@ -588,9 +648,7 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
// Supported cases.
case PIXEL_FORMAT_YV12:
case PIXEL_FORMAT_I420:
- case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV420P10:
- case PIXEL_FORMAT_YUV420P12:
case PIXEL_FORMAT_I420A:
break;
// Unsupported cases.
@@ -606,9 +664,11 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_MJPEG:
case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV420P9:
case PIXEL_FORMAT_YUV444P9:
case PIXEL_FORMAT_YUV422P10:
case PIXEL_FORMAT_YUV444P10:
+ case PIXEL_FORMAT_YUV420P12:
case PIXEL_FORMAT_YUV422P12:
case PIXEL_FORMAT_YUV444P12:
case PIXEL_FORMAT_Y16:
@@ -796,6 +856,18 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CopyVideoFrameToGpuMemoryBuffers(
buffer->stride(0), barrier));
break;
}
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
+ // Using base::Unretained(video_frame) here is safe because |barrier|
+ // keeps refptr of |video_frame| until all copy tasks are done.
+ worker_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &CopyRowsToP010Buffer, row, rows_to_copy,
+ coded_size.width() * 2, base::Unretained(video_frame.get()),
+ static_cast<uint8_t*>(buffer->memory(0)), buffer->stride(0),
+ static_cast<uint8_t*>(buffer->memory(1)), buffer->stride(1),
+ barrier));
+ break;
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
// Using base::Unretained(video_frame) here is safe because |barrier|
// keeps refptr of |video_frame| until all copy tasks are done.
@@ -936,6 +1008,7 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::
case GpuVideoAcceleratorFactories::OutputFormat::I420:
allow_overlay = video_frame->metadata()->allow_overlay;
break;
+ case GpuVideoAcceleratorFactories::OutputFormat::P010:
case GpuVideoAcceleratorFactories::OutputFormat::NV12_SINGLE_GMB:
allow_overlay = true;
break;
diff --git a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
index c624fee627e..e9821951132 100644
--- a/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
+++ b/chromium/media/video/gpu_memory_buffer_video_frame_pool_unittest.cc
@@ -65,6 +65,7 @@ class GpuMemoryBufferVideoFramePoolTest : public ::testing::Test {
const VideoPixelFormat format =
(bit_depth > 8) ? PIXEL_FORMAT_YUV420P10 : PIXEL_FORMAT_I420;
+ const int multiplier = format == PIXEL_FORMAT_YUV420P10 ? 2 : 1;
DCHECK_LE(dimension, kDimension);
const gfx::Size size(dimension, dimension);
@@ -75,9 +76,9 @@ class GpuMemoryBufferVideoFramePoolTest : public ::testing::Test {
size.width() - visible_rect_crop,
size.height() - visible_rect_crop), // visible_rect
size, // natural_size
- size.width(), // y_stride
- size.width() / 2, // u_stride
- size.width() / 2, // v_stride
+ size.width() * multiplier, // y_stride
+ size.width() * multiplier / 2, // u_stride
+ size.width() * multiplier / 2, // v_stride
y_data, // y_data
u_data, // u_data
v_data, // v_data
@@ -326,6 +327,37 @@ TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30Frame) {
EXPECT_EQ(as_xr30(0, 311, 0), *static_cast<uint32_t*>(memory));
}
+TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareP010Frame) {
+ scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10, 10);
+ scoped_refptr<VideoFrame> frame;
+ mock_gpu_factories_->SetVideoFrameOutputFormat(
+ media::GpuVideoAcceleratorFactories::OutputFormat::P010);
+ gpu_memory_buffer_pool_->MaybeCreateHardwareFrame(
+ software_frame, base::BindOnce(MaybeCreateHardwareFrameCallback, &frame));
+
+ RunUntilIdle();
+
+ EXPECT_NE(software_frame.get(), frame.get());
+ EXPECT_EQ(PIXEL_FORMAT_P016LE, frame->format());
+ EXPECT_EQ(1u, frame->NumTextures());
+ EXPECT_EQ(1u, sii_->shared_image_count());
+ EXPECT_TRUE(frame->metadata()->read_lock_fences_enabled);
+
+ EXPECT_EQ(1u, mock_gpu_factories_->created_memory_buffers().size());
+ mock_gpu_factories_->created_memory_buffers()[0]->Map();
+
+ const uint16_t* y_memory = reinterpret_cast<uint16_t*>(
+ mock_gpu_factories_->created_memory_buffers()[0]->memory(0));
+ EXPECT_EQ(software_frame->visible_data(VideoFrame::kYPlane)[0] << 6,
+ y_memory[0]);
+ const uint16_t* uv_memory = reinterpret_cast<uint16_t*>(
+ mock_gpu_factories_->created_memory_buffers()[0]->memory(1));
+ EXPECT_EQ(software_frame->visible_data(VideoFrame::kUPlane)[0] << 6,
+ uv_memory[0]);
+ EXPECT_EQ(software_frame->visible_data(VideoFrame::kVPlane)[0] << 6,
+ uv_memory[1]);
+}
+
TEST_F(GpuMemoryBufferVideoFramePoolTest, CreateOneHardwareXR30FrameBT709) {
scoped_refptr<VideoFrame> software_frame = CreateTestYUVVideoFrame(10, 10);
software_frame->set_color_space(gfx::ColorSpace::CreateREC709());
diff --git a/chromium/media/video/gpu_video_accelerator_factories.h b/chromium/media/video/gpu_video_accelerator_factories.h
index a12b5fe12ba..d8aa533af24 100644
--- a/chromium/media/video/gpu_video_accelerator_factories.h
+++ b/chromium/media/video/gpu_video_accelerator_factories.h
@@ -27,7 +27,7 @@
#include "ui/gfx/gpu_memory_buffer.h"
namespace base {
-class SingleThreadTaskRunner;
+class SequencedTaskRunner;
} // namespace base
namespace gfx {
@@ -67,6 +67,7 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
XB30, // 10:10:10:2 RGBX in one GMB
RGBA, // One 8:8:8:8 RGBA
BGRA, // One 8:8:8:8 BGRA (Usually Mac)
+ P010, // One P010 GMB.
};
enum class Supported {
@@ -184,7 +185,7 @@ class MEDIA_EXPORT GpuVideoAcceleratorFactories {
size_t size) = 0;
// Returns the task runner the video accelerator runs on.
- virtual scoped_refptr<base::SingleThreadTaskRunner> GetTaskRunner() = 0;
+ virtual scoped_refptr<base::SequencedTaskRunner> GetTaskRunner() = 0;
virtual viz::RasterContextProvider* GetMediaContextProvider() = 0;
diff --git a/chromium/media/video/h265_parser.cc b/chromium/media/video/h265_parser.cc
index 77e83491ce7..7d46b7029fb 100644
--- a/chromium/media/video/h265_parser.cc
+++ b/chromium/media/video/h265_parser.cc
@@ -6,12 +6,115 @@
#include <stddef.h>
+#include <algorithm>
+#include <cmath>
+
+#include "base/bits.h"
#include "base/logging.h"
+#include "base/numerics/safe_conversions.h"
#include "base/stl_util.h"
#include "media/base/decrypt_config.h"
+#include "media/base/video_codecs.h"
+#include "ui/gfx/geometry/rect.h"
+#include "ui/gfx/geometry/size.h"
namespace media {
+namespace {
+
+// From Table 7-6.
+constexpr int kDefaultScalingListSize1To3Matrix0To2[] = {
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 16, 17, 16, 17, 18,
+ 17, 18, 18, 17, 18, 21, 19, 20, 21, 20, 19, 21, 24, 22, 22, 24,
+ 24, 22, 22, 24, 25, 25, 27, 30, 27, 25, 25, 29, 31, 35, 35, 31,
+ 29, 36, 41, 44, 41, 36, 47, 54, 54, 47, 65, 70, 65, 88, 88, 115,
+};
+constexpr int kDefaultScalingListSize1To3Matrix3To5[] = {
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 18,
+ 18, 18, 18, 18, 18, 20, 20, 20, 20, 20, 20, 20, 24, 24, 24, 24,
+ 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 28, 28, 28, 28, 28,
+ 28, 33, 33, 33, 33, 33, 41, 41, 41, 41, 54, 54, 54, 71, 71, 91,
+};
+
+// VUI parameters: Table E-1 "Interpretation of sample aspect ratio indicator"
+constexpr int kTableSarWidth[] = {0, 1, 12, 10, 16, 40, 24, 20, 32,
+ 80, 18, 15, 64, 160, 4, 3, 2};
+constexpr int kTableSarHeight[] = {0, 1, 11, 11, 11, 33, 11, 11, 11,
+ 33, 11, 11, 33, 99, 3, 2, 1};
+static_assert(base::size(kTableSarWidth) == base::size(kTableSarHeight),
+ "sar tables must have the same size");
+
+// Converts [|start|, |end|) range with |encrypted_ranges| into a vector of
+// SubsampleEntry. |encrypted_ranges| must be with in the range defined by
+// |start| and |end|.
+// It is OK to pass in empty |encrypted_ranges|; this will return a vector
+// with single SubsampleEntry with clear_bytes set to the size of the buffer.
+std::vector<SubsampleEntry> EncryptedRangesToSubsampleEntry(
+ const uint8_t* start,
+ const uint8_t* end,
+ const Ranges<const uint8_t*>& encrypted_ranges) {
+ std::vector<SubsampleEntry> subsamples(encrypted_ranges.size());
+ const uint8_t* cur = start;
+ for (size_t i = 0; i < encrypted_ranges.size(); ++i) {
+ const uint8_t* encrypted_start = encrypted_ranges.start(i);
+ DCHECK_GE(encrypted_start, cur)
+ << "Encrypted range started before the current buffer pointer.";
+ subsamples[i].clear_bytes = encrypted_start - cur;
+
+ const uint8_t* encrypted_end = encrypted_ranges.end(i);
+ subsamples[i].cypher_bytes = encrypted_end - encrypted_start;
+
+ cur = encrypted_end;
+ DCHECK_LE(cur, end) << "Encrypted range is outside the buffer range.";
+ }
+
+ // If there is more data in the buffer but not covered by encrypted_ranges,
+ // then it must be in the clear.
+ if (cur < end)
+ subsamples.emplace_back(end - cur, 0);
+
+ return subsamples;
+}
+
+void FillInDefaultScalingListData(H265ScalingListData* scaling_list_data,
+ int size_id,
+ int matrix_id) {
+ if (size_id == 0) {
+ std::fill_n(scaling_list_data->scaling_list_4x4[matrix_id],
+ H265ScalingListData::kScalingListSizeId0Count,
+ H265ScalingListData::kDefaultScalingListSize0Values);
+ return;
+ }
+
+ int* dst;
+ switch (size_id) {
+ case 1:
+ dst = scaling_list_data->scaling_list_8x8[matrix_id];
+ break;
+ case 2:
+ dst = scaling_list_data->scaling_list_16x16[matrix_id];
+ break;
+ case 3:
+ dst = scaling_list_data->scaling_list_32x32[matrix_id];
+ break;
+ }
+ const int* src;
+ if (matrix_id < 3)
+ src = kDefaultScalingListSize1To3Matrix0To2;
+ else
+ src = kDefaultScalingListSize1To3Matrix3To5;
+ memcpy(dst, src,
+ H265ScalingListData::kScalingListSizeId1To3Count * sizeof(*src));
+
+ // These are sixteen because the default for the minus8 values is 8.
+ if (size_id == 2)
+ scaling_list_data->scaling_list_dc_coef_16x16[matrix_id] = 16;
+ else if (size_id == 3)
+ scaling_list_data->scaling_list_dc_coef_32x32[matrix_id] = 16;
+}
+
+} // namespace
+
#define READ_BITS_OR_RETURN(num_bits, out) \
do { \
int _out; \
@@ -23,6 +126,56 @@ namespace media {
*out = _out; \
} while (0)
+#define SKIP_BITS_OR_RETURN(num_bits) \
+ do { \
+ int bits_left = num_bits; \
+ int data; \
+ while (bits_left > 0) { \
+ if (!br_.ReadBits(bits_left > 16 ? 16 : bits_left, &data)) { \
+ DVLOG(1) << "Error in stream: unexpected EOS while trying to skip"; \
+ return kInvalidStream; \
+ } \
+ bits_left -= 16; \
+ } \
+ } while (0)
+
+#define READ_BOOL_OR_RETURN(out) \
+ do { \
+ int _out; \
+ if (!br_.ReadBits(1, &_out)) { \
+ DVLOG(1) \
+ << "Error in stream: unexpected EOS while trying to read " #out; \
+ return kInvalidStream; \
+ } \
+ *out = _out != 0; \
+ } while (0)
+
+#define READ_UE_OR_RETURN(out) \
+ do { \
+ if (ReadUE(out) != kOk) { \
+ DVLOG(1) << "Error in stream: invalid value while trying to read " #out; \
+ return kInvalidStream; \
+ } \
+ } while (0)
+
+#define READ_SE_OR_RETURN(out) \
+ do { \
+ if (ReadSE(out) != kOk) { \
+ DVLOG(1) << "Error in stream: invalid value while trying to read " #out; \
+ return kInvalidStream; \
+ } \
+ } while (0)
+
+#define IN_RANGE_OR_RETURN(val, min, max) \
+ do { \
+ if ((val) < (min) || (val) > (max)) { \
+ DVLOG(1) << "Error in stream: invalid value, expected " #val " to be" \
+ << " in range [" << (min) << ":" << (max) << "]" \
+ << " found " << (val) << " instead"; \
+ return kInvalidStream; \
+ } \
+ } while (0)
+
#define TRUE_OR_RETURN(a) \
do { \
if (!(a)) { \
@@ -32,7 +185,43 @@ namespace media {
} while (0)
H265NALU::H265NALU() {
- memset(this, 0, sizeof(*this));
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265ScalingListData::H265ScalingListData() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265StRefPicSet::H265StRefPicSet() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265SPS::H265SPS() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265ProfileTierLevel::H265ProfileTierLevel() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265VUIParameters::H265VUIParameters() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265PPS::H265PPS() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265RefPicListsModifications::H265RefPicListsModifications() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265PredWeightTable::H265PredWeightTable() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
+}
+
+H265SliceHeader::H265SliceHeader() {
+ memset(reinterpret_cast<void*>(this), 0, sizeof(*this));
}
H265Parser::H265Parser() {
@@ -41,10 +230,87 @@ H265Parser::H265Parser() {
H265Parser::~H265Parser() {}
+int H265ProfileTierLevel::GetMaxLumaPs() const {
+ // From Table A.8 - General tier and level limits.
+ // |general_level_idc| is 30x the actual level.
+ if (general_level_idc <= 30) // level 1
+ return 36864;
+ if (general_level_idc <= 60) // level 2
+ return 122880;
+ if (general_level_idc <= 63) // level 2.1
+ return 245760;
+ if (general_level_idc <= 90) // level 3
+ return 552960;
+ if (general_level_idc <= 93) // level 3.1
+ return 983040;
+ if (general_level_idc <= 123) // level 4, 4.1
+ return 2228224;
+ if (general_level_idc <= 156) // level 5, 5.1, 5.2
+ return 8912896;
+ // level 6, 6.1, 6.2 - beyond that there's no actual limit.
+ return 35651584;
+}
+
+size_t H265ProfileTierLevel::GetDpbMaxPicBuf() const {
+ // From A.4.2 - Profile-specific level limits for the video profiles.
+ // If sps_curr_pic_ref_enabled_flag is required to be zero, than this is 6
+ // otherwise it is 7.
+ return (general_profile_idc >= kProfileIdcMain &&
+ general_profile_idc <= kProfileIdcHighThroughput)
+ ? 6
+ : 7;
+}
+
+gfx::Size H265SPS::GetCodedSize() const {
+ return gfx::Size(pic_width_in_luma_samples, pic_height_in_luma_samples);
+}
+
+gfx::Rect H265SPS::GetVisibleRect() const {
+ // 7.4.3.2.1
+ // These are verified in the parser that they won't overflow.
+ int left = (conf_win_left_offset + vui_parameters.def_disp_win_left_offset) *
+ sub_width_c;
+ int top = (conf_win_top_offset + vui_parameters.def_disp_win_top_offset) *
+ sub_height_c;
+ int right =
+ (conf_win_right_offset + vui_parameters.def_disp_win_right_offset) *
+ sub_width_c;
+ int bottom =
+ (conf_win_bottom_offset + vui_parameters.def_disp_win_bottom_offset) *
+ sub_height_c;
+ return gfx::Rect(left, top, pic_width_in_luma_samples - left - right,
+ pic_height_in_luma_samples - top - bottom);
+}
+
+// From E.3.1 VUI parameters semantics
+VideoColorSpace H265SPS::GetColorSpace() const {
+ if (!vui_parameters.colour_description_present_flag)
+ return VideoColorSpace();
+
+ return VideoColorSpace(
+ vui_parameters.colour_primaries, vui_parameters.transfer_characteristics,
+ vui_parameters.matrix_coeffs,
+ vui_parameters.video_full_range_flag ? gfx::ColorSpace::RangeID::FULL
+ : gfx::ColorSpace::RangeID::LIMITED);
+}
+
+bool H265SliceHeader::IsISlice() const {
+ return slice_type == kSliceTypeI;
+}
+
+bool H265SliceHeader::IsPSlice() const {
+ return slice_type == kSliceTypeP;
+}
+
+bool H265SliceHeader::IsBSlice() const {
+ return slice_type == kSliceTypeB;
+}
+
void H265Parser::Reset() {
stream_ = NULL;
bytes_left_ = 0;
encrypted_ranges_.clear();
+ previous_nalu_range_.clear();
}
void H265Parser::SetStream(const uint8_t* stream, off_t stream_size) {
@@ -61,6 +327,7 @@ void H265Parser::SetEncryptedStream(
stream_ = stream;
bytes_left_ = stream_size;
+ previous_nalu_range_.clear();
encrypted_ranges_.clear();
const uint8_t* start = stream;
@@ -116,6 +383,54 @@ bool H265Parser::LocateNALU(off_t* nalu_size, off_t* start_code_size) {
return true;
}
+H265Parser::Result H265Parser::ReadUE(int* val) {
+ // Count the number of contiguous zero bits.
+ int bit;
+ int num_bits = -1;
+ do {
+ READ_BITS_OR_RETURN(1, &bit);
+ num_bits++;
+ } while (bit == 0);
+
+ if (num_bits > 31)
+ return kInvalidStream;
+
+ // Calculate exp-Golomb code value of size num_bits.
+ // Special case for |num_bits| == 31 to avoid integer overflow. The only
+ // valid representation as an int is 2^31 - 1, so the remaining bits must
+ // be 0 or else the number is too large.
+ *val = (1u << num_bits) - 1u;
+
+ int rest;
+ if (num_bits == 31) {
+ READ_BITS_OR_RETURN(num_bits, &rest);
+ return (rest == 0) ? kOk : kInvalidStream;
+ }
+
+ if (num_bits > 0) {
+ READ_BITS_OR_RETURN(num_bits, &rest);
+ *val += rest;
+ }
+
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ReadSE(int* val) {
+ // See Chapter 9 in the spec.
+ int ue;
+ Result res;
+ res = ReadUE(&ue);
+ if (res != kOk)
+ return res;
+
+ if (ue % 2 == 0)
+ *val = -(ue / 2);
+ else
+ *val = ue / 2 + 1;
+
+ return kOk;
+}
+
H265Parser::Result H265Parser::AdvanceToNextNALU(H265NALU* nalu) {
off_t start_code_size;
off_t nalu_size_with_start_code;
@@ -125,6 +440,7 @@ H265Parser::Result H265Parser::AdvanceToNextNALU(H265NALU* nalu) {
return kEOStream;
}
+ DCHECK(nalu);
nalu->data = stream_ + start_code_size;
nalu->size = nalu_size_with_start_code - start_code_size;
DVLOG(4) << "NALU found: size=" << nalu_size_with_start_code;
@@ -153,6 +469,1369 @@ H265Parser::Result H265Parser::AdvanceToNextNALU(H265NALU* nalu) {
<< " at: " << reinterpret_cast<const void*>(nalu->data)
<< " size: " << nalu->size;
+ previous_nalu_range_.clear();
+ previous_nalu_range_.Add(nalu->data, nalu->data + nalu->size);
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ParseSPS(int* sps_id) {
+ // 7.4.3.2
+ DVLOG(4) << "Parsing SPS";
+ Result res = kOk;
+
+ DCHECK(sps_id);
+ *sps_id = -1;
+
+ std::unique_ptr<H265SPS> sps = std::make_unique<H265SPS>();
+ SKIP_BITS_OR_RETURN(4); // sps_video_parameter_set_id
+ READ_BITS_OR_RETURN(3, &sps->sps_max_sub_layers_minus1);
+ IN_RANGE_OR_RETURN(sps->sps_max_sub_layers_minus1, 0, 6);
+ SKIP_BITS_OR_RETURN(1); // sps_temporal_id_nesting_flag
+
+ res = ParseProfileTierLevel(true, sps->sps_max_sub_layers_minus1,
+ &sps->profile_tier_level);
+ if (res != kOk)
+ return res;
+
+ READ_UE_OR_RETURN(&sps->sps_seq_parameter_set_id);
+ IN_RANGE_OR_RETURN(sps->sps_seq_parameter_set_id, 0, 15);
+ READ_UE_OR_RETURN(&sps->chroma_format_idc);
+ IN_RANGE_OR_RETURN(sps->chroma_format_idc, 0, 3);
+ if (sps->chroma_format_idc == 3) {
+ READ_BOOL_OR_RETURN(&sps->separate_colour_plane_flag);
+ }
+ sps->chroma_array_type =
+ sps->separate_colour_plane_flag ? 0 : sps->chroma_format_idc;
+ // Table 6-1.
+ if (sps->chroma_format_idc == 1) {
+ sps->sub_width_c = sps->sub_height_c = 2;
+ } else if (sps->chroma_format_idc == 2) {
+ sps->sub_width_c = 2;
+ sps->sub_height_c = 1;
+ } else {
+ sps->sub_width_c = sps->sub_height_c = 1;
+ }
+ READ_UE_OR_RETURN(&sps->pic_width_in_luma_samples);
+ READ_UE_OR_RETURN(&sps->pic_height_in_luma_samples);
+ TRUE_OR_RETURN(sps->pic_width_in_luma_samples != 0);
+ TRUE_OR_RETURN(sps->pic_height_in_luma_samples != 0);
+
+ // Equation A-2: Calculate max_dpb_size.
+ int max_luma_ps = sps->profile_tier_level.GetMaxLumaPs();
+ base::CheckedNumeric<int> pic_size = sps->pic_height_in_luma_samples;
+ pic_size *= sps->pic_width_in_luma_samples;
+ if (!pic_size.IsValid())
+ return kInvalidStream;
+ int pic_size_in_samples_y = pic_size.ValueOrDefault(0);
+ size_t max_dpb_pic_buf = sps->profile_tier_level.GetDpbMaxPicBuf();
+ if (pic_size_in_samples_y <= (max_luma_ps >> 2))
+ sps->max_dpb_size = std::min(4 * max_dpb_pic_buf, size_t{16});
+ else if (pic_size_in_samples_y <= (max_luma_ps >> 1))
+ sps->max_dpb_size = std::min(2 * max_dpb_pic_buf, size_t{16});
+ else if (pic_size_in_samples_y <= ((3 * max_luma_ps) >> 2))
+ sps->max_dpb_size = std::min((4 * max_dpb_pic_buf) / 3, size_t{16});
+ else
+ sps->max_dpb_size = max_dpb_pic_buf;
+
+ bool conformance_window_flag;
+ READ_BOOL_OR_RETURN(&conformance_window_flag);
+ if (conformance_window_flag) {
+ READ_UE_OR_RETURN(&sps->conf_win_left_offset);
+ READ_UE_OR_RETURN(&sps->conf_win_right_offset);
+ READ_UE_OR_RETURN(&sps->conf_win_top_offset);
+ READ_UE_OR_RETURN(&sps->conf_win_bottom_offset);
+ base::CheckedNumeric<int> width_crop = sps->conf_win_left_offset;
+ width_crop += sps->conf_win_right_offset;
+ width_crop *= sps->sub_width_c;
+ if (!width_crop.IsValid())
+ return kInvalidStream;
+ TRUE_OR_RETURN(width_crop.ValueOrDefault(0) <
+ sps->pic_width_in_luma_samples);
+ base::CheckedNumeric<int> height_crop = sps->conf_win_top_offset;
+ width_crop += sps->conf_win_bottom_offset;
+ width_crop *= sps->sub_height_c;
+ if (!height_crop.IsValid())
+ return kInvalidStream;
+ TRUE_OR_RETURN(height_crop.ValueOrDefault(0) <
+ sps->pic_height_in_luma_samples);
+ }
+ READ_UE_OR_RETURN(&sps->bit_depth_luma_minus8);
+ IN_RANGE_OR_RETURN(sps->bit_depth_luma_minus8, 0, 8);
+ sps->bit_depth_y = sps->bit_depth_luma_minus8 + 8;
+ READ_UE_OR_RETURN(&sps->bit_depth_chroma_minus8);
+ IN_RANGE_OR_RETURN(sps->bit_depth_chroma_minus8, 0, 8);
+ sps->bit_depth_c = sps->bit_depth_chroma_minus8 + 8;
+ READ_UE_OR_RETURN(&sps->log2_max_pic_order_cnt_lsb_minus4);
+ IN_RANGE_OR_RETURN(sps->log2_max_pic_order_cnt_lsb_minus4, 0, 12);
+ sps->max_pic_order_cnt_lsb =
+ std::pow(2, sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
+ bool sps_sub_layer_ordering_info_present_flag;
+ READ_BOOL_OR_RETURN(&sps_sub_layer_ordering_info_present_flag);
+ for (int i = sps_sub_layer_ordering_info_present_flag
+ ? 0
+ : sps->sps_max_sub_layers_minus1;
+ i <= sps->sps_max_sub_layers_minus1; ++i) {
+ READ_UE_OR_RETURN(&sps->sps_max_dec_pic_buffering_minus1[i]);
+ IN_RANGE_OR_RETURN(sps->sps_max_dec_pic_buffering_minus1[i], 0,
+ static_cast<int>(sps->max_dpb_size) - 1);
+ READ_UE_OR_RETURN(&sps->sps_max_num_reorder_pics[i]);
+ IN_RANGE_OR_RETURN(sps->sps_max_num_reorder_pics[i], 0,
+ sps->sps_max_dec_pic_buffering_minus1[i]);
+ if (i > 0) {
+ TRUE_OR_RETURN(sps->sps_max_dec_pic_buffering_minus1[i] >=
+ sps->sps_max_dec_pic_buffering_minus1[i - 1]);
+ TRUE_OR_RETURN(sps->sps_max_num_reorder_pics[i] >=
+ sps->sps_max_num_reorder_pics[i - 1]);
+ }
+ READ_UE_OR_RETURN(&sps->sps_max_latency_increase_plus1[i]);
+ sps->sps_max_latency_pictures[i] = sps->sps_max_num_reorder_pics[i] +
+ sps->sps_max_latency_increase_plus1[i] -
+ 1;
+ }
+ if (!sps_sub_layer_ordering_info_present_flag) {
+ // Fill in the default values for the other sublayers.
+ for (int i = 0; i < sps->sps_max_sub_layers_minus1; ++i) {
+ sps->sps_max_dec_pic_buffering_minus1[i] =
+ sps->sps_max_dec_pic_buffering_minus1[sps->sps_max_sub_layers_minus1];
+ sps->sps_max_num_reorder_pics[i] =
+ sps->sps_max_num_reorder_pics[sps->sps_max_sub_layers_minus1];
+ sps->sps_max_latency_increase_plus1[i] =
+ sps->sps_max_latency_increase_plus1[sps->sps_max_sub_layers_minus1];
+ sps->sps_max_latency_pictures[i] =
+ sps->sps_max_num_reorder_pics[i] +
+ sps->sps_max_latency_increase_plus1[i] - 1;
+ }
+ }
+ READ_UE_OR_RETURN(&sps->log2_min_luma_coding_block_size_minus3);
+ READ_UE_OR_RETURN(&sps->log2_diff_max_min_luma_coding_block_size);
+
+ int min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3;
+ sps->ctb_log2_size_y =
+ min_cb_log2_size_y + sps->log2_diff_max_min_luma_coding_block_size;
+ TRUE_OR_RETURN(min_cb_log2_size_y <= 31 && sps->ctb_log2_size_y <= 31);
+ int min_cb_size_y = 1 << min_cb_log2_size_y;
+ int ctb_size_y = 1 << sps->ctb_log2_size_y;
+ sps->pic_width_in_ctbs_y = base::ClampCeil(
+ static_cast<float>(sps->pic_width_in_luma_samples) / ctb_size_y);
+ sps->pic_height_in_ctbs_y = base::ClampCeil(
+ static_cast<float>(sps->pic_height_in_luma_samples) / ctb_size_y);
+ sps->pic_size_in_ctbs_y =
+ sps->pic_width_in_ctbs_y * sps->pic_height_in_ctbs_y;
+
+ TRUE_OR_RETURN(sps->pic_width_in_luma_samples % min_cb_size_y == 0);
+ TRUE_OR_RETURN(sps->pic_height_in_luma_samples % min_cb_size_y == 0);
+ READ_UE_OR_RETURN(&sps->log2_min_luma_transform_block_size_minus2);
+ int min_tb_log2_size_y = sps->log2_min_luma_transform_block_size_minus2 + 2;
+ TRUE_OR_RETURN(min_tb_log2_size_y < min_cb_log2_size_y);
+ READ_UE_OR_RETURN(&sps->log2_diff_max_min_luma_transform_block_size);
+ sps->max_tb_log2_size_y =
+ min_tb_log2_size_y + sps->log2_diff_max_min_luma_transform_block_size;
+ TRUE_OR_RETURN(sps->max_tb_log2_size_y <= std::min(sps->ctb_log2_size_y, 5));
+ READ_UE_OR_RETURN(&sps->max_transform_hierarchy_depth_inter);
+ IN_RANGE_OR_RETURN(sps->max_transform_hierarchy_depth_inter, 0,
+ sps->ctb_log2_size_y - min_tb_log2_size_y);
+ READ_UE_OR_RETURN(&sps->max_transform_hierarchy_depth_intra);
+ IN_RANGE_OR_RETURN(sps->max_transform_hierarchy_depth_intra, 0,
+ sps->ctb_log2_size_y - min_tb_log2_size_y);
+ READ_BOOL_OR_RETURN(&sps->scaling_list_enabled_flag);
+ if (sps->scaling_list_enabled_flag) {
+ READ_BOOL_OR_RETURN(&sps->sps_scaling_list_data_present_flag);
+ res = ParseScalingListData(&sps->scaling_list_data);
+ if (res != kOk)
+ return res;
+ } else {
+ // Fill it in with the default values.
+ for (int size_id = 0; size_id < 4; ++size_id) {
+ for (int matrix_id = 0; matrix_id < 6;
+ matrix_id += (size_id == 3) ? 3 : 1) {
+ FillInDefaultScalingListData(&sps->scaling_list_data, size_id,
+ matrix_id);
+ }
+ }
+ }
+ READ_BOOL_OR_RETURN(&sps->amp_enabled_flag);
+ READ_BOOL_OR_RETURN(&sps->sample_adaptive_offset_enabled_flag);
+ READ_BOOL_OR_RETURN(&sps->pcm_enabled_flag);
+ if (sps->pcm_enabled_flag) {
+ READ_BITS_OR_RETURN(4, &sps->pcm_sample_bit_depth_luma_minus1);
+ TRUE_OR_RETURN(sps->pcm_sample_bit_depth_luma_minus1 + 1 <=
+ sps->bit_depth_y);
+ READ_BITS_OR_RETURN(4, &sps->pcm_sample_bit_depth_chroma_minus1);
+ TRUE_OR_RETURN(sps->pcm_sample_bit_depth_chroma_minus1 + 1 <=
+ sps->bit_depth_c);
+ READ_UE_OR_RETURN(&sps->log2_min_pcm_luma_coding_block_size_minus3);
+ int log2_min_ipcm_cb_size_y =
+ sps->log2_min_pcm_luma_coding_block_size_minus3 + 3;
+ IN_RANGE_OR_RETURN(log2_min_ipcm_cb_size_y, std::min(min_cb_log2_size_y, 5),
+ std::min(sps->ctb_log2_size_y, 5));
+ READ_UE_OR_RETURN(&sps->log2_diff_max_min_pcm_luma_coding_block_size);
+ int log2_max_ipcm_cb_size_y =
+ log2_min_ipcm_cb_size_y +
+ sps->log2_diff_max_min_pcm_luma_coding_block_size;
+ TRUE_OR_RETURN(log2_max_ipcm_cb_size_y <=
+ std::min(sps->ctb_log2_size_y, 5));
+ READ_BOOL_OR_RETURN(&sps->pcm_loop_filter_disabled_flag);
+ }
+ READ_UE_OR_RETURN(&sps->num_short_term_ref_pic_sets);
+ IN_RANGE_OR_RETURN(sps->num_short_term_ref_pic_sets, 0,
+ kMaxShortTermRefPicSets);
+ for (int i = 0; i < sps->num_short_term_ref_pic_sets; ++i) {
+ res = ParseStRefPicSet(i, *sps, &sps->st_ref_pic_set[i]);
+ if (res != kOk)
+ return res;
+ }
+ READ_BOOL_OR_RETURN(&sps->long_term_ref_pics_present_flag);
+ if (sps->long_term_ref_pics_present_flag) {
+ READ_UE_OR_RETURN(&sps->num_long_term_ref_pics_sps);
+ IN_RANGE_OR_RETURN(sps->num_long_term_ref_pics_sps, 0,
+ kMaxLongTermRefPicSets);
+ for (int i = 0; i < sps->num_long_term_ref_pics_sps; ++i) {
+ READ_BITS_OR_RETURN(sps->log2_max_pic_order_cnt_lsb_minus4 + 4,
+ &sps->lt_ref_pic_poc_lsb_sps[i]);
+ READ_BOOL_OR_RETURN(&sps->used_by_curr_pic_lt_sps_flag[i]);
+ }
+ }
+ READ_BOOL_OR_RETURN(&sps->sps_temporal_mvp_enabled_flag);
+ READ_BOOL_OR_RETURN(&sps->strong_intra_smoothing_enabled_flag);
+ bool vui_parameters_present_flag;
+ READ_BOOL_OR_RETURN(&vui_parameters_present_flag);
+ if (vui_parameters_present_flag) {
+ res = ParseVuiParameters(*sps, &sps->vui_parameters);
+ if (res != kOk)
+ return res;
+ // Verify cropping parameters. We already verified the conformance window
+ // ranges previously.
+ base::CheckedNumeric<int> width_crop =
+ sps->conf_win_left_offset + sps->conf_win_right_offset;
+ width_crop += sps->vui_parameters.def_disp_win_left_offset;
+ width_crop += sps->vui_parameters.def_disp_win_right_offset;
+ width_crop *= sps->sub_width_c;
+ if (!width_crop.IsValid())
+ return kInvalidStream;
+ TRUE_OR_RETURN(width_crop.ValueOrDefault(0) <
+ sps->pic_width_in_luma_samples);
+ base::CheckedNumeric<int> height_crop =
+ sps->conf_win_top_offset + sps->conf_win_bottom_offset;
+ height_crop += sps->vui_parameters.def_disp_win_top_offset;
+ height_crop += sps->vui_parameters.def_disp_win_bottom_offset;
+ height_crop *= sps->sub_height_c;
+ if (!height_crop.IsValid())
+ return kInvalidStream;
+ TRUE_OR_RETURN(height_crop.ValueOrDefault(0) <
+ sps->pic_height_in_luma_samples);
+ }
+
+ bool sps_extension_present_flag;
+ bool sps_range_extension_flag = false;
+ bool sps_multilayer_extension_flag = false;
+ bool sps_3d_extension_flag = false;
+ bool sps_scc_extension_flag = false;
+ READ_BOOL_OR_RETURN(&sps_extension_present_flag);
+ if (sps_extension_present_flag) {
+ READ_BOOL_OR_RETURN(&sps_range_extension_flag);
+ READ_BOOL_OR_RETURN(&sps_multilayer_extension_flag);
+ READ_BOOL_OR_RETURN(&sps_3d_extension_flag);
+ READ_BOOL_OR_RETURN(&sps_scc_extension_flag);
+ SKIP_BITS_OR_RETURN(4); // sps_extension_4bits
+ }
+ if (sps_range_extension_flag) {
+ DVLOG(1) << "HEVC range extension not supported";
+ return kInvalidStream;
+ }
+ if (sps_multilayer_extension_flag) {
+ DVLOG(1) << "HEVC multilayer extension not supported";
+ return kInvalidStream;
+ }
+ if (sps_3d_extension_flag) {
+ DVLOG(1) << "HEVC 3D extension not supported";
+ return kInvalidStream;
+ }
+ if (sps_scc_extension_flag) {
+ DVLOG(1) << "HEVC SCC extension not supported";
+ return kInvalidStream;
+ }
+
+ // NOTE: The below 2 values are dependent upon the range extension if that is
+ // ever implemented.
+ sps->wp_offset_half_range_y = 1 << 7;
+ sps->wp_offset_half_range_c = 1 << 7;
+
+ // If an SPS with the same id already exists, replace it.
+ *sps_id = sps->sps_seq_parameter_set_id;
+ active_sps_[*sps_id] = std::move(sps);
+
+ return res;
+}
+
+H265Parser::Result H265Parser::ParsePPS(const H265NALU& nalu, int* pps_id) {
+ // 7.4.3.3
+ DVLOG(4) << "Parsing PPS";
+ Result res = kOk;
+
+ DCHECK(pps_id);
+ *pps_id = -1;
+ std::unique_ptr<H265PPS> pps = std::make_unique<H265PPS>();
+
+ pps->temporal_id = nalu.nuh_temporal_id_plus1 - 1;
+
+ // Set these defaults if they are not present here.
+ pps->loop_filter_across_tiles_enabled_flag = 1;
+
+ // 7.4.3.3.1
+ READ_UE_OR_RETURN(&pps->pps_pic_parameter_set_id);
+ IN_RANGE_OR_RETURN(pps->pps_pic_parameter_set_id, 0, 63);
+ READ_UE_OR_RETURN(&pps->pps_seq_parameter_set_id);
+ IN_RANGE_OR_RETURN(pps->pps_seq_parameter_set_id, 0, 15);
+ const H265SPS* sps = GetSPS(pps->pps_seq_parameter_set_id);
+ if (!sps) {
+ return kMissingParameterSet;
+ }
+ READ_BOOL_OR_RETURN(&pps->dependent_slice_segments_enabled_flag);
+ READ_BOOL_OR_RETURN(&pps->output_flag_present_flag);
+ READ_BITS_OR_RETURN(3, &pps->num_extra_slice_header_bits);
+ READ_BOOL_OR_RETURN(&pps->sign_data_hiding_enabled_flag);
+ READ_BOOL_OR_RETURN(&pps->cabac_init_present_flag);
+ READ_UE_OR_RETURN(&pps->num_ref_idx_l0_default_active_minus1);
+ IN_RANGE_OR_RETURN(pps->num_ref_idx_l0_default_active_minus1, 0,
+ kMaxRefIdxActive - 1);
+ READ_UE_OR_RETURN(&pps->num_ref_idx_l1_default_active_minus1);
+ IN_RANGE_OR_RETURN(pps->num_ref_idx_l1_default_active_minus1, 0,
+ kMaxRefIdxActive - 1);
+ READ_SE_OR_RETURN(&pps->init_qp_minus26);
+ pps->qp_bd_offset_y = 6 * sps->bit_depth_luma_minus8;
+ IN_RANGE_OR_RETURN(pps->init_qp_minus26, -(26 + pps->qp_bd_offset_y), 25);
+ READ_BOOL_OR_RETURN(&pps->constrained_intra_pred_flag);
+ READ_BOOL_OR_RETURN(&pps->transform_skip_enabled_flag);
+ READ_BOOL_OR_RETURN(&pps->cu_qp_delta_enabled_flag);
+ if (pps->cu_qp_delta_enabled_flag) {
+ READ_UE_OR_RETURN(&pps->diff_cu_qp_delta_depth);
+ IN_RANGE_OR_RETURN(pps->diff_cu_qp_delta_depth, 0,
+ sps->log2_diff_max_min_luma_coding_block_size);
+ }
+ READ_SE_OR_RETURN(&pps->pps_cb_qp_offset);
+ IN_RANGE_OR_RETURN(pps->pps_cb_qp_offset, -12, 12);
+ READ_SE_OR_RETURN(&pps->pps_cr_qp_offset);
+ IN_RANGE_OR_RETURN(pps->pps_cr_qp_offset, -12, 12);
+ READ_BOOL_OR_RETURN(&pps->pps_slice_chroma_qp_offsets_present_flag);
+ READ_BOOL_OR_RETURN(&pps->weighted_pred_flag);
+ READ_BOOL_OR_RETURN(&pps->weighted_bipred_flag);
+ READ_BOOL_OR_RETURN(&pps->transquant_bypass_enabled_flag);
+ READ_BOOL_OR_RETURN(&pps->tiles_enabled_flag);
+ READ_BOOL_OR_RETURN(&pps->entropy_coding_sync_enabled_flag);
+ if (pps->tiles_enabled_flag) {
+ READ_UE_OR_RETURN(&pps->num_tile_columns_minus1);
+ IN_RANGE_OR_RETURN(pps->num_tile_columns_minus1, 0,
+ sps->pic_width_in_ctbs_y - 1);
+ TRUE_OR_RETURN(pps->num_tile_columns_minus1 <
+ H265PPS::kMaxNumTileColumnWidth);
+ READ_UE_OR_RETURN(&pps->num_tile_rows_minus1);
+ IN_RANGE_OR_RETURN(pps->num_tile_rows_minus1, 0,
+ sps->pic_height_in_ctbs_y - 1);
+ TRUE_OR_RETURN((pps->num_tile_columns_minus1 != 0) ||
+ (pps->num_tile_rows_minus1 != 0));
+ TRUE_OR_RETURN(pps->num_tile_rows_minus1 < H265PPS::kMaxNumTileRowHeight);
+ READ_BOOL_OR_RETURN(&pps->uniform_spacing_flag);
+ if (!pps->uniform_spacing_flag) {
+ pps->column_width_minus1[pps->num_tile_columns_minus1] =
+ sps->pic_width_in_ctbs_y - 1;
+ for (int i = 0; i < pps->num_tile_columns_minus1; ++i) {
+ READ_UE_OR_RETURN(&pps->column_width_minus1[i]);
+ pps->column_width_minus1[pps->num_tile_columns_minus1] -=
+ pps->column_width_minus1[i] + 1;
+ }
+ pps->row_height_minus1[pps->num_tile_rows_minus1] =
+ sps->pic_height_in_ctbs_y - 1;
+ for (int i = 0; i < pps->num_tile_rows_minus1; ++i) {
+ READ_UE_OR_RETURN(&pps->row_height_minus1[i]);
+ pps->row_height_minus1[pps->num_tile_rows_minus1] -=
+ pps->row_height_minus1[i] + 1;
+ }
+ }
+ READ_BOOL_OR_RETURN(&pps->loop_filter_across_tiles_enabled_flag);
+ }
+ READ_BOOL_OR_RETURN(&pps->pps_loop_filter_across_slices_enabled_flag);
+ bool deblocking_filter_control_present_flag;
+ READ_BOOL_OR_RETURN(&deblocking_filter_control_present_flag);
+ if (deblocking_filter_control_present_flag) {
+ READ_BOOL_OR_RETURN(&pps->deblocking_filter_override_enabled_flag);
+ READ_BOOL_OR_RETURN(&pps->pps_deblocking_filter_disabled_flag);
+ if (!pps->pps_deblocking_filter_disabled_flag) {
+ READ_SE_OR_RETURN(&pps->pps_beta_offset_div2);
+ IN_RANGE_OR_RETURN(pps->pps_beta_offset_div2, -6, 6);
+ READ_SE_OR_RETURN(&pps->pps_tc_offset_div2);
+ IN_RANGE_OR_RETURN(pps->pps_tc_offset_div2, -6, 6);
+ }
+ }
+ READ_BOOL_OR_RETURN(&pps->pps_scaling_list_data_present_flag);
+ if (pps->pps_scaling_list_data_present_flag) {
+ res = ParseScalingListData(&pps->scaling_list_data);
+ if (res != kOk)
+ return res;
+ }
+ READ_BOOL_OR_RETURN(&pps->lists_modification_present_flag);
+ READ_UE_OR_RETURN(&pps->log2_parallel_merge_level_minus2);
+ IN_RANGE_OR_RETURN(pps->log2_parallel_merge_level_minus2, 0,
+ sps->ctb_log2_size_y - 2);
+ READ_BOOL_OR_RETURN(&pps->slice_segment_header_extension_present_flag);
+ bool pps_extension_present_flag;
+ READ_BOOL_OR_RETURN(&pps_extension_present_flag);
+ bool pps_range_extension_flag = false;
+ bool pps_multilayer_extension_flag = false;
+ bool pps_3d_extension_flag = false;
+ bool pps_scc_extension_flag = false;
+ if (pps_extension_present_flag) {
+ READ_BOOL_OR_RETURN(&pps_range_extension_flag);
+ READ_BOOL_OR_RETURN(&pps_multilayer_extension_flag);
+ READ_BOOL_OR_RETURN(&pps_3d_extension_flag);
+ READ_BOOL_OR_RETURN(&pps_scc_extension_flag);
+ SKIP_BITS_OR_RETURN(4); // pps_extension_4bits
+ }
+
+ if (pps_range_extension_flag) {
+ DVLOG(1) << "HEVC range extension not supported";
+ return kInvalidStream;
+ }
+ if (pps_multilayer_extension_flag) {
+ DVLOG(1) << "HEVC multilayer extension not supported";
+ return kInvalidStream;
+ }
+ if (pps_3d_extension_flag) {
+ DVLOG(1) << "HEVC 3D extension not supported";
+ return kInvalidStream;
+ }
+ if (pps_scc_extension_flag) {
+ DVLOG(1) << "HEVC SCC extension not supported";
+ return kInvalidStream;
+ }
+
+ // If a PPS with the same id already exists, replace it.
+ *pps_id = pps->pps_pic_parameter_set_id;
+ active_pps_[*pps_id] = std::move(pps);
+
+ return res;
+}
+
+const H265SPS* H265Parser::GetSPS(int sps_id) const {
+ auto it = active_sps_.find(sps_id);
+ if (it == active_sps_.end()) {
+ DVLOG(1) << "Requested a nonexistent SPS id " << sps_id;
+ return nullptr;
+ }
+
+ return it->second.get();
+}
+
+const H265PPS* H265Parser::GetPPS(int pps_id) const {
+ auto it = active_pps_.find(pps_id);
+ if (it == active_pps_.end()) {
+ DVLOG(1) << "Requested a nonexistent PPS id " << pps_id;
+ return nullptr;
+ }
+
+ return it->second.get();
+}
+
+H265Parser::Result H265Parser::ParseSliceHeader(const H265NALU& nalu,
+ H265SliceHeader* shdr) {
+ // 7.4.7 Slice segment header
+ DVLOG(4) << "Parsing slice header";
+ Result res = kOk;
+ const H265SPS* sps;
+ const H265PPS* pps;
+
+ DCHECK(shdr);
+ shdr->nal_unit_type = nalu.nal_unit_type;
+ shdr->nalu_data = nalu.data;
+ shdr->nalu_size = nalu.size;
+
+ READ_BOOL_OR_RETURN(&shdr->first_slice_segment_in_pic_flag);
+ shdr->irap_pic = (shdr->nal_unit_type >= H265NALU::BLA_W_LP &&
+ shdr->nal_unit_type <= H265NALU::RSV_IRAP_VCL23);
+ if (shdr->irap_pic) {
+ READ_BOOL_OR_RETURN(&shdr->no_output_of_prior_pics_flag);
+ }
+ READ_UE_OR_RETURN(&shdr->slice_pic_parameter_set_id);
+ IN_RANGE_OR_RETURN(shdr->slice_pic_parameter_set_id, 0, 63);
+ pps = GetPPS(shdr->slice_pic_parameter_set_id);
+ if (!pps) {
+ return kMissingParameterSet;
+ }
+ sps = GetSPS(pps->pps_seq_parameter_set_id);
+ DCHECK(sps); // We already validated this when we parsed the PPS.
+
+ // Set these defaults if they are not present here.
+ shdr->pic_output_flag = 1;
+ shdr->num_ref_idx_l0_active_minus1 =
+ pps->num_ref_idx_l0_default_active_minus1;
+ shdr->num_ref_idx_l1_active_minus1 =
+ pps->num_ref_idx_l1_default_active_minus1;
+ shdr->collocated_from_l0_flag = 1;
+ shdr->slice_deblocking_filter_disabled_flag =
+ pps->pps_deblocking_filter_disabled_flag;
+ shdr->slice_beta_offset_div2 = pps->pps_beta_offset_div2;
+ shdr->slice_tc_offset_div2 = pps->pps_tc_offset_div2;
+ shdr->slice_loop_filter_across_slices_enabled_flag =
+ pps->pps_loop_filter_across_slices_enabled_flag;
+
+ if (!shdr->first_slice_segment_in_pic_flag) {
+ if (pps->dependent_slice_segments_enabled_flag)
+ READ_BOOL_OR_RETURN(&shdr->dependent_slice_segment_flag);
+ READ_BITS_OR_RETURN(base::bits::Log2Ceiling(sps->pic_size_in_ctbs_y),
+ &shdr->slice_segment_address);
+ IN_RANGE_OR_RETURN(shdr->slice_segment_address, 0,
+ sps->pic_size_in_ctbs_y - 1);
+ }
+ shdr->curr_rps_idx = sps->num_short_term_ref_pic_sets;
+ if (!shdr->dependent_slice_segment_flag) {
+ // slice_reserved_flag
+ SKIP_BITS_OR_RETURN(pps->num_extra_slice_header_bits);
+ READ_UE_OR_RETURN(&shdr->slice_type);
+ if ((shdr->irap_pic ||
+ sps->sps_max_dec_pic_buffering_minus1[pps->temporal_id] == 0) &&
+ nalu.nuh_layer_id == 0) {
+ TRUE_OR_RETURN(shdr->slice_type == 2);
+ }
+ if (pps->output_flag_present_flag)
+ READ_BOOL_OR_RETURN(&shdr->pic_output_flag);
+ if (sps->separate_colour_plane_flag) {
+ READ_BITS_OR_RETURN(2, &shdr->colour_plane_id);
+ IN_RANGE_OR_RETURN(shdr->colour_plane_id, 0, 2);
+ }
+ if (shdr->nal_unit_type != H265NALU::IDR_W_RADL &&
+ shdr->nal_unit_type != H265NALU::IDR_N_LP) {
+ READ_BITS_OR_RETURN(sps->log2_max_pic_order_cnt_lsb_minus4 + 4,
+ &shdr->slice_pic_order_cnt_lsb);
+ IN_RANGE_OR_RETURN(shdr->slice_pic_order_cnt_lsb, 0,
+ sps->max_pic_order_cnt_lsb - 1);
+ READ_BOOL_OR_RETURN(&shdr->short_term_ref_pic_set_sps_flag);
+ if (!shdr->short_term_ref_pic_set_sps_flag) {
+ off_t bits_left_prior = br_.NumBitsLeft();
+ size_t num_epb_prior = br_.NumEmulationPreventionBytesRead();
+ res = ParseStRefPicSet(sps->num_short_term_ref_pic_sets, *sps,
+ &shdr->st_ref_pic_set);
+ if (res != kOk)
+ return res;
+ shdr->st_rps_bits =
+ (bits_left_prior - br_.NumBitsLeft()) -
+ 8 * (br_.NumEmulationPreventionBytesRead() - num_epb_prior);
+ } else if (sps->num_short_term_ref_pic_sets > 1) {
+ READ_BITS_OR_RETURN(
+ base::bits::Log2Ceiling(sps->num_short_term_ref_pic_sets),
+ &shdr->short_term_ref_pic_set_idx);
+ IN_RANGE_OR_RETURN(shdr->short_term_ref_pic_set_idx, 0,
+ sps->num_short_term_ref_pic_sets - 1);
+ }
+
+ if (shdr->short_term_ref_pic_set_sps_flag)
+ shdr->curr_rps_idx = shdr->short_term_ref_pic_set_idx;
+
+ if (sps->long_term_ref_pics_present_flag) {
+ if (sps->num_long_term_ref_pics_sps > 0) {
+ READ_UE_OR_RETURN(&shdr->num_long_term_sps);
+ IN_RANGE_OR_RETURN(shdr->num_long_term_sps, 0,
+ sps->num_long_term_ref_pics_sps);
+ }
+ READ_UE_OR_RETURN(&shdr->num_long_term_pics);
+ if (nalu.nuh_layer_id == 0) {
+ TRUE_OR_RETURN(
+ shdr->num_long_term_pics <=
+ (sps->sps_max_dec_pic_buffering_minus1[pps->temporal_id] -
+ shdr->GetStRefPicSet(sps).num_negative_pics -
+ shdr->GetStRefPicSet(sps).num_positive_pics -
+ shdr->num_long_term_sps));
+ }
+ IN_RANGE_OR_RETURN(shdr->num_long_term_sps + shdr->num_long_term_pics,
+ 0, kMaxLongTermRefPicSets);
+ for (int i = 0; i < shdr->num_long_term_sps + shdr->num_long_term_pics;
+ ++i) {
+ if (i < shdr->num_long_term_sps) {
+ int lt_idx_sps = 0;
+ if (sps->num_long_term_ref_pics_sps > 1) {
+ READ_BITS_OR_RETURN(
+ base::bits::Log2Ceiling(sps->num_long_term_ref_pics_sps),
+ &lt_idx_sps);
+ IN_RANGE_OR_RETURN(lt_idx_sps, 0,
+ sps->num_long_term_ref_pics_sps - 1);
+ }
+ shdr->poc_lsb_lt[i] = sps->lt_ref_pic_poc_lsb_sps[lt_idx_sps];
+ shdr->used_by_curr_pic_lt[i] =
+ sps->used_by_curr_pic_lt_sps_flag[lt_idx_sps];
+ } else {
+ READ_BITS_OR_RETURN(sps->log2_max_pic_order_cnt_lsb_minus4 + 4,
+ &shdr->poc_lsb_lt[i]);
+ READ_BOOL_OR_RETURN(&shdr->used_by_curr_pic_lt[i]);
+ }
+ READ_BOOL_OR_RETURN(&shdr->delta_poc_msb_present_flag[i]);
+ if (shdr->delta_poc_msb_present_flag[i]) {
+ READ_UE_OR_RETURN(&shdr->delta_poc_msb_cycle_lt[i]);
+ IN_RANGE_OR_RETURN(
+ shdr->delta_poc_msb_cycle_lt[i], 0,
+ std::pow(2, 32 - sps->log2_max_pic_order_cnt_lsb_minus4 - 4));
+ // Equation 7-52.
+ if (i != 0 && i != shdr->num_long_term_sps) {
+ shdr->delta_poc_msb_cycle_lt[i] =
+ shdr->delta_poc_msb_cycle_lt[i] +
+ shdr->delta_poc_msb_cycle_lt[i - 1];
+ }
+ }
+ }
+ }
+ if (sps->sps_temporal_mvp_enabled_flag)
+ READ_BOOL_OR_RETURN(&shdr->slice_temporal_mvp_enabled_flag);
+ }
+ if (sps->sample_adaptive_offset_enabled_flag) {
+ READ_BOOL_OR_RETURN(&shdr->slice_sao_luma_flag);
+ if (sps->chroma_array_type != 0)
+ READ_BOOL_OR_RETURN(&shdr->slice_sao_chroma_flag);
+ }
+ if (shdr->IsPSlice() || shdr->IsBSlice()) {
+ READ_BOOL_OR_RETURN(&shdr->num_ref_idx_active_override_flag);
+ if (shdr->num_ref_idx_active_override_flag) {
+ READ_UE_OR_RETURN(&shdr->num_ref_idx_l0_active_minus1);
+ IN_RANGE_OR_RETURN(shdr->num_ref_idx_l0_active_minus1, 0,
+ kMaxRefIdxActive - 1);
+ if (shdr->IsBSlice()) {
+ READ_UE_OR_RETURN(&shdr->num_ref_idx_l1_active_minus1);
+ IN_RANGE_OR_RETURN(shdr->num_ref_idx_l1_active_minus1, 0,
+ kMaxRefIdxActive - 1);
+ }
+ }
+
+ shdr->num_pic_total_curr = 0;
+ const H265StRefPicSet& st_ref_pic = shdr->GetStRefPicSet(sps);
+ for (int i = 0; i < st_ref_pic.num_negative_pics; ++i) {
+ if (st_ref_pic.used_by_curr_pic_s0[i])
+ shdr->num_pic_total_curr++;
+ }
+ for (int i = 0; i < st_ref_pic.num_positive_pics; ++i) {
+ if (st_ref_pic.used_by_curr_pic_s1[i])
+ shdr->num_pic_total_curr++;
+ }
+ for (int i = 0; i < shdr->num_long_term_sps + shdr->num_long_term_pics;
+ ++i) {
+ if (shdr->used_by_curr_pic_lt[i])
+ shdr->num_pic_total_curr++;
+ }
+
+ if (pps->lists_modification_present_flag &&
+ shdr->num_pic_total_curr > 1) {
+ res = ParseRefPicListsModifications(*shdr,
+ &shdr->ref_pic_lists_modification);
+ if (res != kOk)
+ return res;
+ }
+ if (shdr->IsBSlice())
+ READ_BOOL_OR_RETURN(&shdr->mvd_l1_zero_flag);
+ if (pps->cabac_init_present_flag)
+ READ_BOOL_OR_RETURN(&shdr->cabac_init_flag);
+ if (shdr->slice_temporal_mvp_enabled_flag) {
+ if (shdr->IsBSlice())
+ READ_BOOL_OR_RETURN(&shdr->collocated_from_l0_flag);
+ if ((shdr->collocated_from_l0_flag &&
+ shdr->num_ref_idx_l0_active_minus1 > 0) ||
+ (!shdr->collocated_from_l0_flag &&
+ shdr->num_ref_idx_l1_active_minus1 > 0)) {
+ READ_UE_OR_RETURN(&shdr->collocated_ref_idx);
+ if ((shdr->IsPSlice() || shdr->IsBSlice()) &&
+ shdr->collocated_from_l0_flag) {
+ IN_RANGE_OR_RETURN(shdr->collocated_ref_idx, 0,
+ shdr->num_ref_idx_l0_active_minus1);
+ }
+ if (shdr->IsBSlice() && !shdr->collocated_from_l0_flag) {
+ IN_RANGE_OR_RETURN(shdr->collocated_ref_idx, 0,
+ shdr->num_ref_idx_l1_active_minus1);
+ }
+ }
+ }
+
+ if ((pps->weighted_pred_flag && shdr->IsPSlice()) ||
+ (pps->weighted_bipred_flag && shdr->IsBSlice())) {
+ res = ParsePredWeightTable(*sps, *shdr, &shdr->pred_weight_table);
+ if (res != kOk)
+ return res;
+ }
+ READ_UE_OR_RETURN(&shdr->five_minus_max_num_merge_cand);
+ IN_RANGE_OR_RETURN(5 - shdr->five_minus_max_num_merge_cand, 1, 5);
+ }
+ READ_SE_OR_RETURN(&shdr->slice_qp_delta);
+ IN_RANGE_OR_RETURN(26 + pps->init_qp_minus26 + shdr->slice_qp_delta,
+ -pps->qp_bd_offset_y, 51);
+
+ if (pps->pps_slice_chroma_qp_offsets_present_flag) {
+ READ_SE_OR_RETURN(&shdr->slice_cb_qp_offset);
+ IN_RANGE_OR_RETURN(shdr->slice_cb_qp_offset, -12, 12);
+ IN_RANGE_OR_RETURN(pps->pps_cb_qp_offset + shdr->slice_cb_qp_offset, -12,
+ 12);
+ READ_SE_OR_RETURN(&shdr->slice_cr_qp_offset);
+ IN_RANGE_OR_RETURN(shdr->slice_cr_qp_offset, -12, 12);
+ IN_RANGE_OR_RETURN(pps->pps_cr_qp_offset + shdr->slice_cr_qp_offset, -12,
+ 12);
+ }
+
+ // pps_slice_act_qp_offsets_present_flag is zero, we don't support SCC ext.
+
+ // chroma_qp_offset_list_enabled_flag is zero, we don't support range ext.
+
+ bool deblocking_filter_override_flag = false;
+ if (pps->deblocking_filter_override_enabled_flag)
+ READ_BOOL_OR_RETURN(&deblocking_filter_override_flag);
+ if (deblocking_filter_override_flag) {
+ READ_BOOL_OR_RETURN(&shdr->slice_deblocking_filter_disabled_flag);
+ if (!shdr->slice_deblocking_filter_disabled_flag) {
+ READ_SE_OR_RETURN(&shdr->slice_beta_offset_div2);
+ IN_RANGE_OR_RETURN(shdr->slice_beta_offset_div2, -6, 6);
+ READ_SE_OR_RETURN(&shdr->slice_tc_offset_div2);
+ IN_RANGE_OR_RETURN(shdr->slice_tc_offset_div2, -6, 6);
+ }
+ }
+ if (pps->pps_loop_filter_across_slices_enabled_flag &&
+ (shdr->slice_sao_luma_flag || shdr->slice_sao_chroma_flag ||
+ !shdr->slice_deblocking_filter_disabled_flag)) {
+ READ_BOOL_OR_RETURN(&shdr->slice_loop_filter_across_slices_enabled_flag);
+ }
+ }
+
+ if (pps->tiles_enabled_flag || pps->entropy_coding_sync_enabled_flag) {
+ int num_entry_point_offsets;
+ READ_UE_OR_RETURN(&num_entry_point_offsets);
+ if (!pps->tiles_enabled_flag) {
+ IN_RANGE_OR_RETURN(num_entry_point_offsets, 0,
+ sps->pic_height_in_ctbs_y - 1);
+ } else if (!pps->entropy_coding_sync_enabled_flag) {
+ IN_RANGE_OR_RETURN(
+ num_entry_point_offsets, 0,
+ (pps->num_tile_columns_minus1 + 1) * (pps->num_tile_rows_minus1 + 1) -
+ 1);
+ } else { // both are true
+ IN_RANGE_OR_RETURN(
+ num_entry_point_offsets, 0,
+ (pps->num_tile_columns_minus1 + 1) * sps->pic_height_in_ctbs_y - 1);
+ }
+ if (num_entry_point_offsets > 0) {
+ int offset_len_minus1;
+ READ_UE_OR_RETURN(&offset_len_minus1);
+ IN_RANGE_OR_RETURN(offset_len_minus1, 0, 31);
+ SKIP_BITS_OR_RETURN(num_entry_point_offsets * (offset_len_minus1 + 1));
+ }
+ }
+
+ if (pps->slice_segment_header_extension_present_flag) {
+ int slice_segment_header_extension_length;
+ READ_UE_OR_RETURN(&slice_segment_header_extension_length);
+ IN_RANGE_OR_RETURN(slice_segment_header_extension_length, 0, 256);
+ SKIP_BITS_OR_RETURN(slice_segment_header_extension_length * 8);
+ }
+
+ // byte_alignment()
+ SKIP_BITS_OR_RETURN(1); // alignment bit
+ int bits_left_to_align = br_.NumBitsLeft() % 8;
+ if (bits_left_to_align)
+ SKIP_BITS_OR_RETURN(bits_left_to_align);
+
+ shdr->header_emulation_prevention_bytes =
+ br_.NumEmulationPreventionBytesRead();
+ shdr->header_size = shdr->nalu_size -
+ shdr->header_emulation_prevention_bytes -
+ br_.NumBitsLeft() / 8;
+ return res;
+}
+
+// static
+VideoCodecProfile H265Parser::ProfileIDCToVideoCodecProfile(int profile_idc) {
+ switch (profile_idc) {
+ case H265ProfileTierLevel::kProfileIdcMain:
+ return HEVCPROFILE_MAIN;
+ case H265ProfileTierLevel::kProfileIdcMain10:
+ return HEVCPROFILE_MAIN10;
+ case H265ProfileTierLevel::kProfileIdcMainStill:
+ return HEVCPROFILE_MAIN_STILL_PICTURE;
+ default:
+ DVLOG(1) << "unknown video profile: " << profile_idc;
+ return VIDEO_CODEC_PROFILE_UNKNOWN;
+ }
+}
+
+std::vector<SubsampleEntry> H265Parser::GetCurrentSubsamples() {
+ DCHECK_EQ(previous_nalu_range_.size(), 1u)
+ << "This should only be called after a "
+ "successful call to AdvanceToNextNalu()";
+
+ auto intersection = encrypted_ranges_.IntersectionWith(previous_nalu_range_);
+ return EncryptedRangesToSubsampleEntry(
+ previous_nalu_range_.start(0), previous_nalu_range_.end(0), intersection);
+}
+
+H265Parser::Result H265Parser::ParseProfileTierLevel(
+ bool profile_present,
+ int max_num_sub_layers_minus1,
+ H265ProfileTierLevel* profile_tier_level) {
+ // 7.4.4
+ DVLOG(4) << "Parsing profile_tier_level";
+ if (profile_present) {
+ int general_profile_space;
+ READ_BITS_OR_RETURN(2, &general_profile_space);
+ TRUE_OR_RETURN(general_profile_space == 0);
+ SKIP_BITS_OR_RETURN(1); // general_tier_flag
+ READ_BITS_OR_RETURN(5, &profile_tier_level->general_profile_idc);
+ IN_RANGE_OR_RETURN(profile_tier_level->general_profile_idc, 0, 11);
+ bool general_profile_compatibility_flag[32];
+ for (int j = 0; j < 32; ++j) {
+ READ_BOOL_OR_RETURN(&general_profile_compatibility_flag[j]);
+ }
+ bool general_progressive_source_flag;
+ bool general_interlaced_source_flag;
+ READ_BOOL_OR_RETURN(&general_progressive_source_flag);
+ READ_BOOL_OR_RETURN(&general_interlaced_source_flag);
+ if (!general_progressive_source_flag && general_interlaced_source_flag) {
+ DVLOG(1) << "Interlaced streams not supported";
+ return kUnsupportedStream;
+ }
+ SKIP_BITS_OR_RETURN(2); // general_{non_packed,frame_only}_constraint_flag
+ // Skip the compatibility flags, they are always 43 bits.
+ SKIP_BITS_OR_RETURN(43);
+ SKIP_BITS_OR_RETURN(1); // general_inbld_flag
+ }
+ READ_BITS_OR_RETURN(8, &profile_tier_level->general_level_idc);
+ bool sub_layer_profile_present_flag[8];
+ bool sub_layer_level_present_flag[8];
+ for (int i = 0; i < max_num_sub_layers_minus1; ++i) {
+ READ_BOOL_OR_RETURN(&sub_layer_profile_present_flag[i]);
+ READ_BOOL_OR_RETURN(&sub_layer_level_present_flag[i]);
+ }
+ if (max_num_sub_layers_minus1 > 0) {
+ for (int i = max_num_sub_layers_minus1; i < 8; i++) {
+ SKIP_BITS_OR_RETURN(2);
+ }
+ }
+ for (int i = 0; i < max_num_sub_layers_minus1; i++) {
+ if (sub_layer_profile_present_flag[i]) {
+ SKIP_BITS_OR_RETURN(2); // sub_layer_profile_space
+ SKIP_BITS_OR_RETURN(1); // sub_layer_tier_flag
+ SKIP_BITS_OR_RETURN(5); // sub_layer_profile_idc
+ SKIP_BITS_OR_RETURN(32); // sub_layer_profile_compatibility_flag
+ SKIP_BITS_OR_RETURN(2); // sub_layer_{progressive,interlaced}_source_flag
+ // Ignore sub_layer_non_packed_constraint_flag and
+ // sub_layer_frame_only_constraint_flag.
+ SKIP_BITS_OR_RETURN(2);
+ // Skip the compatibility flags, they are always 43 bits.
+ SKIP_BITS_OR_RETURN(43);
+ SKIP_BITS_OR_RETURN(1); // sub_layer_inbld_flag
+ }
+ if (sub_layer_level_present_flag[i]) {
+ SKIP_BITS_OR_RETURN(8); // sub_layer_level_idc
+ }
+ }
+
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ParseScalingListData(
+ H265ScalingListData* scaling_list_data) {
+ for (int size_id = 0; size_id < 4; ++size_id) {
+ for (int matrix_id = 0; matrix_id < 6;
+ matrix_id += (size_id == 3) ? 3 : 1) {
+ bool scaling_list_pred_mode_flag;
+ READ_BOOL_OR_RETURN(&scaling_list_pred_mode_flag);
+ if (!scaling_list_pred_mode_flag) {
+ int scaling_list_pred_matrix_id_delta;
+ READ_UE_OR_RETURN(&scaling_list_pred_matrix_id_delta);
+ if (size_id <= 2) {
+ IN_RANGE_OR_RETURN(scaling_list_pred_matrix_id_delta, 0, matrix_id);
+ } else { // size_id == 3
+ IN_RANGE_OR_RETURN(scaling_list_pred_matrix_id_delta, 0,
+ matrix_id / 3);
+ }
+ if (scaling_list_pred_matrix_id_delta == 0) {
+ FillInDefaultScalingListData(scaling_list_data, size_id, matrix_id);
+ } else {
+ int ref_matrix_id = matrix_id - scaling_list_pred_matrix_id_delta *
+ (size_id == 3 ? 3 : 1);
+ int* dst;
+ int* src;
+ int count = H265ScalingListData::kScalingListSizeId1To3Count;
+ switch (size_id) {
+ case 0:
+ src = scaling_list_data->scaling_list_4x4[ref_matrix_id];
+ dst = scaling_list_data->scaling_list_4x4[matrix_id];
+ count = H265ScalingListData::kScalingListSizeId0Count;
+ break;
+ case 1:
+ src = scaling_list_data->scaling_list_8x8[ref_matrix_id];
+ dst = scaling_list_data->scaling_list_8x8[matrix_id];
+ break;
+ case 2:
+ src = scaling_list_data->scaling_list_16x16[ref_matrix_id];
+ dst = scaling_list_data->scaling_list_16x16[matrix_id];
+ break;
+ case 3:
+ src = scaling_list_data->scaling_list_32x32[ref_matrix_id];
+ dst = scaling_list_data->scaling_list_32x32[matrix_id];
+ break;
+ }
+ memcpy(dst, src, count * sizeof(*src));
+
+ if (size_id == 2) {
+ scaling_list_data->scaling_list_dc_coef_16x16[matrix_id] =
+ scaling_list_data->scaling_list_dc_coef_16x16[ref_matrix_id];
+ } else if (size_id == 3) {
+ scaling_list_data->scaling_list_dc_coef_32x32[matrix_id] =
+ scaling_list_data->scaling_list_dc_coef_32x32[ref_matrix_id];
+ }
+ }
+ } else {
+ int next_coef = 8;
+ int coef_num = std::min(64, (1 << (4 + (size_id << 1))));
+ if (size_id > 1) {
+ if (size_id == 2) {
+ READ_SE_OR_RETURN(
+ &scaling_list_data->scaling_list_dc_coef_16x16[matrix_id]);
+ IN_RANGE_OR_RETURN(
+ scaling_list_data->scaling_list_dc_coef_16x16[matrix_id], -7,
+ 247);
+ // This is parsed as minus8;
+ scaling_list_data->scaling_list_dc_coef_16x16[matrix_id] += 8;
+ next_coef =
+ scaling_list_data->scaling_list_dc_coef_16x16[matrix_id];
+ } else { // size_id == 3
+ READ_SE_OR_RETURN(
+ &scaling_list_data->scaling_list_dc_coef_32x32[matrix_id]);
+ IN_RANGE_OR_RETURN(
+ scaling_list_data->scaling_list_dc_coef_32x32[matrix_id], -7,
+ 247);
+ // This is parsed as minus8;
+ scaling_list_data->scaling_list_dc_coef_32x32[matrix_id] += 8;
+ next_coef =
+ scaling_list_data->scaling_list_dc_coef_32x32[matrix_id];
+ }
+ }
+ for (int i = 0; i < coef_num; ++i) {
+ int scaling_list_delta_coef;
+ READ_SE_OR_RETURN(&scaling_list_delta_coef);
+ IN_RANGE_OR_RETURN(scaling_list_delta_coef, -128, 127);
+ next_coef = (next_coef + scaling_list_delta_coef + 256) % 256;
+ switch (size_id) {
+ case 0:
+ scaling_list_data->scaling_list_4x4[matrix_id][i] = next_coef;
+ break;
+ case 1:
+ scaling_list_data->scaling_list_8x8[matrix_id][i] = next_coef;
+ break;
+ case 2:
+ scaling_list_data->scaling_list_16x16[matrix_id][i] = next_coef;
+ break;
+ case 3:
+ scaling_list_data->scaling_list_32x32[matrix_id][i] = next_coef;
+ break;
+ }
+ }
+ }
+ }
+ }
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ParseStRefPicSet(
+ int st_rps_idx,
+ const H265SPS& sps,
+ H265StRefPicSet* st_ref_pic_set) {
+ // 7.4.8
+ bool inter_ref_pic_set_prediction_flag = false;
+ if (st_rps_idx != 0) {
+ READ_BOOL_OR_RETURN(&inter_ref_pic_set_prediction_flag);
+ }
+ if (inter_ref_pic_set_prediction_flag) {
+ int delta_idx_minus1 = 0;
+ if (st_rps_idx == sps.num_short_term_ref_pic_sets) {
+ READ_UE_OR_RETURN(&delta_idx_minus1);
+ IN_RANGE_OR_RETURN(delta_idx_minus1, 0, st_rps_idx - 1);
+ }
+ int ref_rps_idx = st_rps_idx - (delta_idx_minus1 + 1);
+ int delta_rps_sign;
+ int abs_delta_rps_minus1;
+ READ_BOOL_OR_RETURN(&delta_rps_sign);
+ READ_UE_OR_RETURN(&abs_delta_rps_minus1);
+ int delta_rps = (1 - 2 * delta_rps_sign) * (abs_delta_rps_minus1 + 1);
+ const H265StRefPicSet& ref_set = sps.st_ref_pic_set[ref_rps_idx];
+ bool used_by_curr_pic_flag[kMaxShortTermRefPicSets];
+ bool use_delta_flag[kMaxShortTermRefPicSets];
+ // 7.4.8 - use_delta_flag defaults to 1 if not present.
+ std::fill_n(use_delta_flag, kMaxShortTermRefPicSets, true);
+
+ for (int j = 0; j <= ref_set.num_delta_pocs; j++) {
+ READ_BOOL_OR_RETURN(&used_by_curr_pic_flag[j]);
+ if (!used_by_curr_pic_flag[j]) {
+ READ_BOOL_OR_RETURN(&use_delta_flag[j]);
+ }
+ }
+ // Calculate delta_poc_s{0,1}, used_by_curr_pic_s{0,1}, num_negative_pics
+ // and num_positive_pics.
+ // Equation 7-61
+ int i = 0;
+ for (int j = ref_set.num_positive_pics - 1; j >= 0; --j) {
+ int d_poc = ref_set.delta_poc_s1[j] + delta_rps;
+ if (d_poc < 0 && use_delta_flag[ref_set.num_negative_pics + j]) {
+ st_ref_pic_set->delta_poc_s0[i] = d_poc;
+ st_ref_pic_set->used_by_curr_pic_s0[i++] =
+ used_by_curr_pic_flag[ref_set.num_negative_pics + j];
+ }
+ }
+ if (delta_rps < 0 && use_delta_flag[ref_set.num_delta_pocs]) {
+ st_ref_pic_set->delta_poc_s0[i] = delta_rps;
+ st_ref_pic_set->used_by_curr_pic_s0[i++] =
+ used_by_curr_pic_flag[ref_set.num_delta_pocs];
+ }
+ for (int j = 0; j < ref_set.num_negative_pics; ++j) {
+ int d_poc = ref_set.delta_poc_s0[j] + delta_rps;
+ if (d_poc < 0 && use_delta_flag[j]) {
+ st_ref_pic_set->delta_poc_s0[i] = d_poc;
+ st_ref_pic_set->used_by_curr_pic_s0[i++] = used_by_curr_pic_flag[j];
+ }
+ }
+ st_ref_pic_set->num_negative_pics = i;
+ // Equation 7-62
+ i = 0;
+ for (int j = ref_set.num_negative_pics - 1; j >= 0; --j) {
+ int d_poc = ref_set.delta_poc_s0[j] + delta_rps;
+ if (d_poc > 0 && use_delta_flag[j]) {
+ st_ref_pic_set->delta_poc_s1[i] = d_poc;
+ st_ref_pic_set->used_by_curr_pic_s1[i++] = used_by_curr_pic_flag[j];
+ }
+ }
+ if (delta_rps > 0 && use_delta_flag[ref_set.num_delta_pocs]) {
+ st_ref_pic_set->delta_poc_s1[i] = delta_rps;
+ st_ref_pic_set->used_by_curr_pic_s1[i++] =
+ used_by_curr_pic_flag[ref_set.num_delta_pocs];
+ }
+ for (int j = 0; j < ref_set.num_positive_pics; ++j) {
+ int d_poc = ref_set.delta_poc_s1[j] + delta_rps;
+ if (d_poc > 0 && use_delta_flag[ref_set.num_negative_pics + j]) {
+ st_ref_pic_set->delta_poc_s1[i] = d_poc;
+ st_ref_pic_set->used_by_curr_pic_s1[i++] =
+ used_by_curr_pic_flag[ref_set.num_negative_pics + j];
+ }
+ }
+ st_ref_pic_set->num_positive_pics = i;
+ } else {
+ READ_UE_OR_RETURN(&st_ref_pic_set->num_negative_pics);
+ READ_UE_OR_RETURN(&st_ref_pic_set->num_positive_pics);
+ IN_RANGE_OR_RETURN(
+ st_ref_pic_set->num_negative_pics, 0,
+ sps.sps_max_dec_pic_buffering_minus1[sps.sps_max_sub_layers_minus1]);
+ IN_RANGE_OR_RETURN(
+ st_ref_pic_set->num_positive_pics, 0,
+ sps.sps_max_dec_pic_buffering_minus1[sps.sps_max_sub_layers_minus1] -
+ st_ref_pic_set->num_negative_pics);
+ for (int i = 0; i < st_ref_pic_set->num_negative_pics; ++i) {
+ int delta_poc_s0_minus1;
+ READ_UE_OR_RETURN(&delta_poc_s0_minus1);
+ if (i == 0) {
+ st_ref_pic_set->delta_poc_s0[i] = -(delta_poc_s0_minus1 + 1);
+ } else {
+ st_ref_pic_set->delta_poc_s0[i] =
+ st_ref_pic_set->delta_poc_s0[i - 1] - (delta_poc_s0_minus1 + 1);
+ }
+ READ_BOOL_OR_RETURN(&st_ref_pic_set->used_by_curr_pic_s0[i]);
+ }
+ for (int i = 0; i < st_ref_pic_set->num_positive_pics; ++i) {
+ int delta_poc_s1_minus1;
+ READ_UE_OR_RETURN(&delta_poc_s1_minus1);
+ if (i == 0) {
+ st_ref_pic_set->delta_poc_s1[i] = delta_poc_s1_minus1 + 1;
+ } else {
+ st_ref_pic_set->delta_poc_s1[i] =
+ st_ref_pic_set->delta_poc_s1[i - 1] + delta_poc_s1_minus1 + 1;
+ }
+ READ_BOOL_OR_RETURN(&st_ref_pic_set->used_by_curr_pic_s1[i]);
+ }
+ }
+ // Calculate num_delta_pocs.
+ st_ref_pic_set->num_delta_pocs =
+ st_ref_pic_set->num_negative_pics + st_ref_pic_set->num_positive_pics;
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ParseVuiParameters(const H265SPS& sps,
+ H265VUIParameters* vui) {
+ Result res = kOk;
+ bool aspect_ratio_info_present_flag;
+ READ_BOOL_OR_RETURN(&aspect_ratio_info_present_flag);
+ if (aspect_ratio_info_present_flag) {
+ int aspect_ratio_idc;
+ READ_BITS_OR_RETURN(8, &aspect_ratio_idc);
+ constexpr int kExtendedSar = 255;
+ if (aspect_ratio_idc == kExtendedSar) {
+ READ_BITS_OR_RETURN(16, &vui->sar_width);
+ READ_BITS_OR_RETURN(16, &vui->sar_height);
+ } else {
+ const int max_aspect_ratio_idc = base::size(kTableSarWidth) - 1;
+ IN_RANGE_OR_RETURN(aspect_ratio_idc, 0, max_aspect_ratio_idc);
+ vui->sar_width = kTableSarWidth[aspect_ratio_idc];
+ vui->sar_height = kTableSarHeight[aspect_ratio_idc];
+ }
+ }
+
+ int data;
+ // Read and ignore overscan info.
+ READ_BOOL_OR_RETURN(&data); // overscan_info_present_flag
+ if (data)
+ SKIP_BITS_OR_RETURN(1); // overscan_appropriate_flag
+
+ bool video_signal_type_present_flag;
+ READ_BOOL_OR_RETURN(&video_signal_type_present_flag);
+ if (video_signal_type_present_flag) {
+ SKIP_BITS_OR_RETURN(3); // video_format
+ READ_BOOL_OR_RETURN(&vui->video_full_range_flag);
+ READ_BOOL_OR_RETURN(&vui->colour_description_present_flag);
+ if (vui->colour_description_present_flag) {
+ // color description syntax elements
+ READ_BITS_OR_RETURN(8, &vui->colour_primaries);
+ READ_BITS_OR_RETURN(8, &vui->transfer_characteristics);
+ READ_BITS_OR_RETURN(8, &vui->matrix_coeffs);
+ }
+ }
+
+ READ_BOOL_OR_RETURN(&data); // chroma_loc_info_present_flag
+ if (data) {
+ READ_UE_OR_RETURN(&data); // chroma_sample_loc_type_top_field
+ READ_UE_OR_RETURN(&data); // chroma_sample_loc_type_bottom_field
+ }
+
+ // Ignore neutral_chroma_indication_flag, field_seq_flag and
+ // frame_field_info_present_flag.
+ SKIP_BITS_OR_RETURN(3);
+
+ bool default_display_window_flag;
+ READ_BOOL_OR_RETURN(&default_display_window_flag);
+ if (default_display_window_flag) {
+ READ_UE_OR_RETURN(&vui->def_disp_win_left_offset);
+ READ_UE_OR_RETURN(&vui->def_disp_win_right_offset);
+ READ_UE_OR_RETURN(&vui->def_disp_win_top_offset);
+ READ_UE_OR_RETURN(&vui->def_disp_win_bottom_offset);
+ }
+
+ // Read and ignore timing info.
+ READ_BOOL_OR_RETURN(&data); // timing_info_present_flag
+ if (data) {
+ SKIP_BITS_OR_RETURN(32); // vui_num_units_in_tick
+ SKIP_BITS_OR_RETURN(32); // vui_time_scale
+ READ_BOOL_OR_RETURN(&data); // vui_poc_proportional_to_timing_flag
+ if (data)
+ READ_UE_OR_RETURN(&data); // vui_num_ticks_poc_diff_one_minus1
+ res = ParseAndIgnoreHrdParameters(true, sps.sps_max_sub_layers_minus1);
+ if (res != kOk)
+ return res;
+ }
+
+ bool bitstream_restriction_flag;
+ READ_BOOL_OR_RETURN(&bitstream_restriction_flag);
+ if (bitstream_restriction_flag) {
+ // Skip tiles_fixed_structure_flag, motion_vectors_over_pic_boundaries_flag
+ // and restricted_ref_pic_lists_flag.
+ SKIP_BITS_OR_RETURN(3);
+ READ_UE_OR_RETURN(&data); // min_spatial_segmentation_idc
+ READ_UE_OR_RETURN(&data); // max_bytes_per_pic_denom
+ READ_UE_OR_RETURN(&data); // max_bits_per_min_cu_denom
+ READ_UE_OR_RETURN(&data); // log2_max_mv_length_horizontal
+ READ_UE_OR_RETURN(&data); // log2_max_mv_length_vertical
+ }
+
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ParseAndIgnoreHrdParameters(
+ bool common_inf_present_flag,
+ int max_num_sub_layers_minus1) {
+ Result res = kOk;
+ int data;
+ READ_BOOL_OR_RETURN(&data); // present_flag
+ if (!data)
+ return res;
+
+ bool nal_hrd_parameters_present_flag = false;
+ bool vcl_hrd_parameters_present_flag = false;
+ bool sub_pic_hrd_params_present_flag = false;
+ if (common_inf_present_flag) {
+ READ_BOOL_OR_RETURN(&nal_hrd_parameters_present_flag);
+ READ_BOOL_OR_RETURN(&vcl_hrd_parameters_present_flag);
+ if (nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag) {
+ READ_BOOL_OR_RETURN(&sub_pic_hrd_params_present_flag);
+ if (sub_pic_hrd_params_present_flag) {
+ SKIP_BITS_OR_RETURN(8); // tick_divisor_minus2
+ SKIP_BITS_OR_RETURN(5); // du_cpb_removal_delay_increment_length_minus1
+ SKIP_BITS_OR_RETURN(1); // sub_pic_cpb_params_in_pic_timing_sei_flag
+ SKIP_BITS_OR_RETURN(5); // dpb_output_delay_du_length_minus1
+ }
+ SKIP_BITS_OR_RETURN(4); // bit_rate_scale;
+ SKIP_BITS_OR_RETURN(4); // cpb_size_scale;
+ if (sub_pic_hrd_params_present_flag)
+ SKIP_BITS_OR_RETURN(4); // cpb_size_du_scale
+ SKIP_BITS_OR_RETURN(5); // initial_cpb_removal_delay_length_minus1
+ SKIP_BITS_OR_RETURN(5); // au_cpb_removal_delay_length_minus1
+ SKIP_BITS_OR_RETURN(5); // dpb_output_delay_length_minus1
+ }
+ }
+ for (int i = 0; i <= max_num_sub_layers_minus1; ++i) {
+ bool fixed_pic_rate_flag;
+ READ_BOOL_OR_RETURN(&fixed_pic_rate_flag); // general
+ if (!fixed_pic_rate_flag)
+ READ_BOOL_OR_RETURN(&fixed_pic_rate_flag); // within_cvs
+ bool low_delay_hrd_flag = false;
+ if (fixed_pic_rate_flag)
+ READ_UE_OR_RETURN(&data); // elemental_duration_in_tc_minus1
+ else
+ READ_BOOL_OR_RETURN(&low_delay_hrd_flag);
+ int cpb_cnt = 1;
+ if (!low_delay_hrd_flag) {
+ READ_UE_OR_RETURN(&cpb_cnt);
+ cpb_cnt += 1; // parsed as minus1
+ }
+ if (nal_hrd_parameters_present_flag) {
+ res = ParseAndIgnoreSubLayerHrdParameters(
+ cpb_cnt, sub_pic_hrd_params_present_flag);
+ if (res != kOk)
+ return res;
+ }
+ if (vcl_hrd_parameters_present_flag) {
+ res = ParseAndIgnoreSubLayerHrdParameters(
+ cpb_cnt, sub_pic_hrd_params_present_flag);
+ if (res != kOk)
+ return res;
+ }
+ }
+ return res;
+}
+
+H265Parser::Result H265Parser::ParseAndIgnoreSubLayerHrdParameters(
+ int cpb_cnt,
+ bool sub_pic_hrd_params_present_flag) {
+ int data;
+ for (int i = 0; i < cpb_cnt; ++i) {
+ READ_UE_OR_RETURN(&data); // bit_rate_value_minus1[i]
+ READ_UE_OR_RETURN(&data); // cpb_size_value_minus1[i]
+ if (sub_pic_hrd_params_present_flag) {
+ READ_UE_OR_RETURN(&data); // cpb_size_du_value_minus1[i]
+ READ_UE_OR_RETURN(&data); // bit_rate_du_value_minus1[i]
+ }
+ SKIP_BITS_OR_RETURN(1); // cbr_flag[i]
+ }
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ParseRefPicListsModifications(
+ const H265SliceHeader& shdr,
+ H265RefPicListsModifications* rpl_mod) {
+ READ_BOOL_OR_RETURN(&rpl_mod->ref_pic_list_modification_flag_l0);
+ if (rpl_mod->ref_pic_list_modification_flag_l0) {
+ for (int i = 0; i <= shdr.num_ref_idx_l0_active_minus1; ++i) {
+ READ_BITS_OR_RETURN(base::bits::Log2Ceiling(shdr.num_pic_total_curr),
+ &rpl_mod->list_entry_l0[i]);
+ IN_RANGE_OR_RETURN(rpl_mod->list_entry_l0[i], 0,
+ shdr.num_pic_total_curr - 1);
+ }
+ }
+ if (shdr.IsBSlice()) {
+ READ_BOOL_OR_RETURN(&rpl_mod->ref_pic_list_modification_flag_l1);
+ if (rpl_mod->ref_pic_list_modification_flag_l1) {
+ for (int i = 0; i <= shdr.num_ref_idx_l1_active_minus1; ++i) {
+ READ_BITS_OR_RETURN(base::bits::Log2Ceiling(shdr.num_pic_total_curr),
+ &rpl_mod->list_entry_l1[i]);
+ IN_RANGE_OR_RETURN(rpl_mod->list_entry_l1[i], 0,
+ shdr.num_pic_total_curr - 1);
+ }
+ }
+ }
+ return kOk;
+}
+
+H265Parser::Result H265Parser::ParsePredWeightTable(
+ const H265SPS& sps,
+ const H265SliceHeader& shdr,
+ H265PredWeightTable* pred_weight_table) {
+ // 7.4.6.3 Weighted prediction parameters semantics
+ READ_UE_OR_RETURN(&pred_weight_table->luma_log2_weight_denom);
+ IN_RANGE_OR_RETURN(pred_weight_table->luma_log2_weight_denom, 0, 7);
+ if (sps.chroma_array_type) {
+ READ_SE_OR_RETURN(&pred_weight_table->delta_chroma_log2_weight_denom);
+ pred_weight_table->chroma_log2_weight_denom =
+ pred_weight_table->delta_chroma_log2_weight_denom +
+ pred_weight_table->luma_log2_weight_denom;
+ IN_RANGE_OR_RETURN(pred_weight_table->chroma_log2_weight_denom, 0, 7);
+ }
+ bool luma_weight_flag[kMaxRefIdxActive];
+ bool chroma_weight_flag[kMaxRefIdxActive];
+ memset(chroma_weight_flag, 0, sizeof(chroma_weight_flag));
+ for (int i = 0; i <= shdr.num_ref_idx_l0_active_minus1; ++i) {
+ READ_BOOL_OR_RETURN(&luma_weight_flag[i]);
+ }
+ if (sps.chroma_array_type) {
+ for (int i = 0; i <= shdr.num_ref_idx_l0_active_minus1; ++i) {
+ READ_BOOL_OR_RETURN(&chroma_weight_flag[i]);
+ }
+ }
+ int sum_weight_l0_flags = 0;
+ for (int i = 0; i <= shdr.num_ref_idx_l0_active_minus1; ++i) {
+ if (luma_weight_flag[i]) {
+ sum_weight_l0_flags++;
+ READ_SE_OR_RETURN(&pred_weight_table->delta_luma_weight_l0[i]);
+ IN_RANGE_OR_RETURN(pred_weight_table->delta_luma_weight_l0[i], -128, 127);
+ READ_SE_OR_RETURN(&pred_weight_table->luma_offset_l0[i]);
+ IN_RANGE_OR_RETURN(pred_weight_table->luma_offset_l0[i],
+ -sps.wp_offset_half_range_y,
+ sps.wp_offset_half_range_y - 1);
+ }
+ if (chroma_weight_flag[i]) {
+ sum_weight_l0_flags += 2;
+ for (int j = 0; j < 2; ++j) {
+ READ_SE_OR_RETURN(&pred_weight_table->delta_chroma_weight_l0[i][j]);
+ IN_RANGE_OR_RETURN(pred_weight_table->delta_chroma_weight_l0[i][j],
+ -128, 127);
+ READ_SE_OR_RETURN(&pred_weight_table->delta_chroma_offset_l0[i][j]);
+ IN_RANGE_OR_RETURN(pred_weight_table->delta_chroma_offset_l0[i][j],
+ -4 * sps.wp_offset_half_range_c,
+ 4 * sps.wp_offset_half_range_c - 1);
+ }
+ }
+ }
+ if (shdr.IsPSlice())
+ TRUE_OR_RETURN(sum_weight_l0_flags <= 24);
+ if (shdr.IsBSlice()) {
+ memset(chroma_weight_flag, 0, sizeof(chroma_weight_flag));
+ int sum_weight_l1_flags = 0;
+ for (int i = 0; i <= shdr.num_ref_idx_l1_active_minus1; ++i) {
+ READ_BOOL_OR_RETURN(&luma_weight_flag[i]);
+ }
+ if (sps.chroma_array_type) {
+ for (int i = 0; i <= shdr.num_ref_idx_l1_active_minus1; ++i) {
+ READ_BOOL_OR_RETURN(&chroma_weight_flag[i]);
+ }
+ }
+ for (int i = 0; i <= shdr.num_ref_idx_l1_active_minus1; ++i) {
+ if (luma_weight_flag[i]) {
+ sum_weight_l1_flags++;
+ READ_SE_OR_RETURN(&pred_weight_table->delta_luma_weight_l1[i]);
+ IN_RANGE_OR_RETURN(pred_weight_table->delta_luma_weight_l1[i], -128,
+ 127);
+ READ_SE_OR_RETURN(&pred_weight_table->luma_offset_l1[i]);
+ IN_RANGE_OR_RETURN(pred_weight_table->luma_offset_l1[i],
+ -sps.wp_offset_half_range_y,
+ sps.wp_offset_half_range_y - 1);
+ }
+ if (chroma_weight_flag[i]) {
+ sum_weight_l1_flags += 2;
+ for (int j = 0; j < 2; ++j) {
+ READ_SE_OR_RETURN(&pred_weight_table->delta_chroma_weight_l1[i][j]);
+ IN_RANGE_OR_RETURN(pred_weight_table->delta_chroma_weight_l1[i][j],
+ -128, 127);
+ READ_SE_OR_RETURN(&pred_weight_table->delta_chroma_offset_l1[i][j]);
+ IN_RANGE_OR_RETURN(pred_weight_table->delta_chroma_offset_l1[i][j],
+ -4 * sps.wp_offset_half_range_c,
+ 4 * sps.wp_offset_half_range_c - 1);
+ }
+ }
+ }
+ TRUE_OR_RETURN(sum_weight_l0_flags + sum_weight_l1_flags <= 24);
+ }
+
return kOk;
}
diff --git a/chromium/media/video/h265_parser.h b/chromium/media/video/h265_parser.h
index cca0b7940eb..e922e82f432 100644
--- a/chromium/media/video/h265_parser.h
+++ b/chromium/media/video/h265_parser.h
@@ -10,12 +10,13 @@
#include <stdint.h>
#include <sys/types.h>
-#include <map>
#include <vector>
+#include "base/containers/flat_map.h"
#include "base/macros.h"
#include "media/base/media_export.h"
#include "media/base/ranges.h"
+#include "media/base/video_color_space.h"
#include "media/video/h264_bit_reader.h"
#include "media/video/h264_parser.h"
@@ -107,14 +108,329 @@ struct MEDIA_EXPORT H265NALU {
int nuh_temporal_id_plus1;
};
+enum {
+ kMaxLongTermRefPicSets = 32, // 7.4.3.2.1
+ kMaxShortTermRefPicSets = 64, // 7.4.3.2.1
+ kMaxSubLayers = 7, // 7.4.3.1 & 7.4.3.2.1 [v|s]ps_max_sub_layers_minus1 + 1
+ kMaxDpbSize = 16, // A.4.2
+ kMaxRefIdxActive = 15, // 7.4.7.1 num_ref_idx_l{0,1}_active_minus1 + 1
+};
+
+struct MEDIA_EXPORT H265ProfileTierLevel {
+ H265ProfileTierLevel();
+
+ // From Annex A.3.
+ enum H264ProfileIdc {
+ kProfileIdcMain = 1,
+ kProfileIdcMain10 = 2,
+ kProfileIdcMainStill = 3,
+ kProfileIdcRangeExtensions = 4,
+ kProfileIdcHighThroughput = 5,
+ kProfileIdcScreenContentCoding = 9,
+ kProfileIdcHighThroughputScreenContentCoding = 11,
+ };
+
+ // Syntax elements.
+ int general_profile_idc;
+ int general_level_idc; // 30x the actual level.
+
+ // From Table A.8 - General tier and level limits.
+ int GetMaxLumaPs() const;
+ // From A.4.2 - Profile-specific level limits for the video profiles.
+ size_t GetDpbMaxPicBuf() const;
+};
+
+struct MEDIA_EXPORT H265ScalingListData {
+ H265ScalingListData();
+
+ enum {
+ kDefaultScalingListSize0Values = 16, // Table 7-5, all values are 16
+ kScalingListSizeId0Count = 16, // 7.4.5
+ kScalingListSizeId1To3Count = 64, // 7.4.5
+ kNumScalingListMatrices = 6,
+ };
+
+ // TODO(jkardatzke): Optimize storage of the 32x32 since only indices 0 and 3
+ // are actually used. Also change it in the accelerator delegate if that is
+ // done.
+ // Syntax elements.
+ int scaling_list_dc_coef_16x16[kNumScalingListMatrices];
+ int scaling_list_dc_coef_32x32[kNumScalingListMatrices];
+ int scaling_list_4x4[kNumScalingListMatrices][kScalingListSizeId0Count];
+ int scaling_list_8x8[kNumScalingListMatrices][kScalingListSizeId1To3Count];
+ int scaling_list_16x16[kNumScalingListMatrices][kScalingListSizeId1To3Count];
+ int scaling_list_32x32[kNumScalingListMatrices][kScalingListSizeId1To3Count];
+};
+
+struct MEDIA_EXPORT H265StRefPicSet {
+ H265StRefPicSet();
+
+ // Syntax elements.
+ int num_negative_pics;
+ int num_positive_pics;
+ int delta_poc_s0[kMaxShortTermRefPicSets];
+ int used_by_curr_pic_s0[kMaxShortTermRefPicSets];
+ int delta_poc_s1[kMaxShortTermRefPicSets];
+ int used_by_curr_pic_s1[kMaxShortTermRefPicSets];
+
+ // Calculated fields.
+ int num_delta_pocs;
+};
+
+struct MEDIA_EXPORT H265VUIParameters {
+ H265VUIParameters();
+
+ // Syntax elements.
+ int sar_width;
+ int sar_height;
+ bool video_full_range_flag;
+ bool colour_description_present_flag;
+ int colour_primaries;
+ int transfer_characteristics;
+ int matrix_coeffs;
+ int def_disp_win_left_offset;
+ int def_disp_win_right_offset;
+ int def_disp_win_top_offset;
+ int def_disp_win_bottom_offset;
+};
+
+struct MEDIA_EXPORT H265SPS {
+ H265SPS();
+
+ // Syntax elements.
+ int sps_max_sub_layers_minus1;
+ H265ProfileTierLevel profile_tier_level;
+ int sps_seq_parameter_set_id;
+ int chroma_format_idc;
+ bool separate_colour_plane_flag;
+ int pic_width_in_luma_samples;
+ int pic_height_in_luma_samples;
+ int conf_win_left_offset;
+ int conf_win_right_offset;
+ int conf_win_top_offset;
+ int conf_win_bottom_offset;
+ int bit_depth_luma_minus8;
+ int bit_depth_chroma_minus8;
+ int log2_max_pic_order_cnt_lsb_minus4;
+ int sps_max_dec_pic_buffering_minus1[kMaxSubLayers];
+ int sps_max_num_reorder_pics[kMaxSubLayers];
+ int sps_max_latency_increase_plus1[kMaxSubLayers];
+ int log2_min_luma_coding_block_size_minus3;
+ int log2_diff_max_min_luma_coding_block_size;
+ int log2_min_luma_transform_block_size_minus2;
+ int log2_diff_max_min_luma_transform_block_size;
+ int max_transform_hierarchy_depth_inter;
+ int max_transform_hierarchy_depth_intra;
+ bool scaling_list_enabled_flag;
+ bool sps_scaling_list_data_present_flag;
+ H265ScalingListData scaling_list_data;
+ bool amp_enabled_flag;
+ bool sample_adaptive_offset_enabled_flag;
+ bool pcm_enabled_flag;
+ int pcm_sample_bit_depth_luma_minus1;
+ int pcm_sample_bit_depth_chroma_minus1;
+ int log2_min_pcm_luma_coding_block_size_minus3;
+ int log2_diff_max_min_pcm_luma_coding_block_size;
+ bool pcm_loop_filter_disabled_flag;
+ int num_short_term_ref_pic_sets;
+ H265StRefPicSet st_ref_pic_set[kMaxShortTermRefPicSets];
+ bool long_term_ref_pics_present_flag;
+ int num_long_term_ref_pics_sps;
+ int lt_ref_pic_poc_lsb_sps[kMaxLongTermRefPicSets];
+ bool used_by_curr_pic_lt_sps_flag[kMaxLongTermRefPicSets];
+ bool sps_temporal_mvp_enabled_flag;
+ bool strong_intra_smoothing_enabled_flag;
+ H265VUIParameters vui_parameters;
+
+ // Calculated fields.
+ int chroma_array_type;
+ int sub_width_c;
+ int sub_height_c;
+ size_t max_dpb_size;
+ int bit_depth_y;
+ int bit_depth_c;
+ int max_pic_order_cnt_lsb;
+ int sps_max_latency_pictures[kMaxSubLayers];
+ int ctb_log2_size_y;
+ int pic_width_in_ctbs_y;
+ int pic_height_in_ctbs_y;
+ int pic_size_in_ctbs_y;
+ int max_tb_log2_size_y;
+ int wp_offset_half_range_y;
+ int wp_offset_half_range_c;
+
+ // Helpers to compute frequently-used values. They do not verify that the
+ // results are in-spec for the given profile or level.
+ gfx::Size GetCodedSize() const;
+ gfx::Rect GetVisibleRect() const;
+ VideoColorSpace GetColorSpace() const;
+};
+
+struct MEDIA_EXPORT H265PPS {
+ H265PPS();
+
+ enum {
+ kMaxNumTileColumnWidth = 19, // From VAAPI.
+ kMaxNumTileRowHeight = 21, // From VAAPI.
+ };
+
+ int temporal_id; // calculated from NALU
+
+ // Syntax elements.
+ int pps_pic_parameter_set_id;
+ int pps_seq_parameter_set_id;
+ bool dependent_slice_segments_enabled_flag;
+ bool output_flag_present_flag;
+ int num_extra_slice_header_bits;
+ bool sign_data_hiding_enabled_flag;
+ bool cabac_init_present_flag;
+ int num_ref_idx_l0_default_active_minus1;
+ int num_ref_idx_l1_default_active_minus1;
+ int init_qp_minus26;
+ bool constrained_intra_pred_flag;
+ bool transform_skip_enabled_flag;
+ bool cu_qp_delta_enabled_flag;
+ int diff_cu_qp_delta_depth;
+ int pps_cb_qp_offset;
+ int pps_cr_qp_offset;
+ bool pps_slice_chroma_qp_offsets_present_flag;
+ bool weighted_pred_flag;
+ bool weighted_bipred_flag;
+ bool transquant_bypass_enabled_flag;
+ bool tiles_enabled_flag;
+ bool entropy_coding_sync_enabled_flag;
+ int num_tile_columns_minus1;
+ int num_tile_rows_minus1;
+ bool uniform_spacing_flag;
+ int column_width_minus1[kMaxNumTileColumnWidth];
+ int row_height_minus1[kMaxNumTileRowHeight];
+ bool loop_filter_across_tiles_enabled_flag;
+ bool pps_loop_filter_across_slices_enabled_flag;
+ bool deblocking_filter_override_enabled_flag;
+ bool pps_deblocking_filter_disabled_flag;
+ int pps_beta_offset_div2;
+ int pps_tc_offset_div2;
+ bool pps_scaling_list_data_present_flag;
+ H265ScalingListData scaling_list_data;
+ bool lists_modification_present_flag;
+ int log2_parallel_merge_level_minus2;
+ bool slice_segment_header_extension_present_flag;
+
+ // Calculated fields.
+ int qp_bd_offset_y;
+};
+
+struct MEDIA_EXPORT H265RefPicListsModifications {
+ H265RefPicListsModifications();
+
+ // Syntax elements.
+ bool ref_pic_list_modification_flag_l0;
+ int list_entry_l0[kMaxRefIdxActive];
+ bool ref_pic_list_modification_flag_l1;
+ int list_entry_l1[kMaxRefIdxActive];
+};
+
+struct MEDIA_EXPORT H265PredWeightTable {
+ H265PredWeightTable();
+
+ // Syntax elements.
+ int luma_log2_weight_denom;
+ int delta_chroma_log2_weight_denom;
+ int chroma_log2_weight_denom;
+ int delta_luma_weight_l0[kMaxRefIdxActive];
+ int luma_offset_l0[kMaxRefIdxActive];
+ int delta_chroma_weight_l0[kMaxRefIdxActive][2];
+ int delta_chroma_offset_l0[kMaxRefIdxActive][2];
+ int delta_luma_weight_l1[kMaxRefIdxActive];
+ int luma_offset_l1[kMaxRefIdxActive];
+ int delta_chroma_weight_l1[kMaxRefIdxActive][2];
+ int delta_chroma_offset_l1[kMaxRefIdxActive][2];
+};
+
+struct MEDIA_EXPORT H265SliceHeader {
+ H265SliceHeader();
+
+ enum {
+ kSliceTypeB = 0, // Table 7-7
+ kSliceTypeP = 1, // Table 7-7
+ kSliceTypeI = 2, // Table 7-7
+ };
+
+ int nal_unit_type; // from NAL header
+ const uint8_t* nalu_data; // from NAL header
+ size_t nalu_size; // from NAL header
+ size_t header_size; // calculated, not including emulation prevention bytes
+ size_t header_emulation_prevention_bytes;
+
+ // Syntax elements.
+ bool first_slice_segment_in_pic_flag;
+ bool no_output_of_prior_pics_flag;
+ int slice_pic_parameter_set_id;
+ bool dependent_slice_segment_flag;
+ int slice_segment_address;
+ int slice_type;
+ bool pic_output_flag;
+ int colour_plane_id;
+ int slice_pic_order_cnt_lsb;
+ bool short_term_ref_pic_set_sps_flag;
+ H265StRefPicSet st_ref_pic_set;
+ int short_term_ref_pic_set_idx;
+ int num_long_term_sps;
+ int num_long_term_pics;
+ int poc_lsb_lt[kMaxLongTermRefPicSets];
+ bool used_by_curr_pic_lt[kMaxLongTermRefPicSets];
+ bool delta_poc_msb_present_flag[kMaxLongTermRefPicSets];
+ int delta_poc_msb_cycle_lt[kMaxLongTermRefPicSets];
+ bool slice_temporal_mvp_enabled_flag;
+ bool slice_sao_luma_flag;
+ bool slice_sao_chroma_flag;
+ bool num_ref_idx_active_override_flag;
+ int num_ref_idx_l0_active_minus1;
+ int num_ref_idx_l1_active_minus1;
+ H265RefPicListsModifications ref_pic_lists_modification;
+ bool mvd_l1_zero_flag;
+ bool cabac_init_flag;
+ bool collocated_from_l0_flag;
+ int collocated_ref_idx;
+ H265PredWeightTable pred_weight_table;
+ int five_minus_max_num_merge_cand;
+ int slice_qp_delta;
+ int slice_cb_qp_offset;
+ int slice_cr_qp_offset;
+ bool slice_deblocking_filter_disabled_flag;
+ int slice_beta_offset_div2;
+ int slice_tc_offset_div2;
+ bool slice_loop_filter_across_slices_enabled_flag;
+
+ // Calculated.
+ int curr_rps_idx;
+ int num_pic_total_curr;
+ bool irap_pic;
+ // Number of bits st_ref_pic_set takes after removing emulation prevention
+ // bytes.
+ int st_rps_bits;
+
+ bool IsISlice() const;
+ bool IsPSlice() const;
+ bool IsBSlice() const;
+
+ const H265StRefPicSet& GetStRefPicSet(const H265SPS* sps) const {
+ if (curr_rps_idx == sps->num_short_term_ref_pic_sets)
+ return st_ref_pic_set;
+
+ return sps->st_ref_pic_set[curr_rps_idx];
+ }
+};
+
// Class to parse an Annex-B H.265 stream.
class MEDIA_EXPORT H265Parser {
public:
enum Result {
kOk,
- kInvalidStream, // error in stream
- kUnsupportedStream, // stream not supported by the parser
- kEOStream, // end of stream
+ kInvalidStream, // error in stream
+ kUnsupportedStream, // stream not supported by the parser
+ kMissingParameterSet, // missing PPS/SPS from what was parsed
+ kEOStream, // end of stream
};
H265Parser();
@@ -138,6 +454,41 @@ class MEDIA_EXPORT H265Parser {
// again, instead of any NALU-type specific parse functions below.
Result AdvanceToNextNALU(H265NALU* nalu);
+ // NALU-specific parsing functions.
+ // These should be called after AdvanceToNextNALU().
+
+ // SPSes and PPSes are owned by the parser class and the memory for their
+ // structures is managed here, not by the caller, as they are reused across
+ // NALUs.
+ //
+ // Parse an SPS/PPS NALU and save their data in the parser, returning id
+ // of the parsed structure in |*pps_id|/|*sps_id|. To get a pointer to a given
+ // SPS/PPS structure, use GetSPS()/GetPPS(), passing the returned
+ // |*sps_id|/|*pps_id| as parameter.
+ Result ParseSPS(int* sps_id);
+ Result ParsePPS(const H265NALU& nalu, int* pps_id);
+
+ // Return a pointer to SPS/PPS with given |sps_id|/|pps_id| or null if not
+ // present.
+ const H265SPS* GetSPS(int sps_id) const;
+ const H265PPS* GetPPS(int pps_id) const;
+
+ // Slice headers and are not used across NALUs by the parser and can be
+ // discarded after current NALU, so the parser does not store them, nor does
+ // it manage their memory. The caller has to provide and manage it instead.
+
+ // Parse a slice header, returning it in |*shdr|. |*nalu| must be set to
+ // the NALU returned from AdvanceToNextNALU() and corresponding to |*shdr|.
+ Result ParseSliceHeader(const H265NALU& nalu, H265SliceHeader* shdr);
+
+ static VideoCodecProfile ProfileIDCToVideoCodecProfile(int profile_idc);
+
+ // The return value of this method changes for every successful call to
+ // AdvanceToNextNALU().
+ // This returns the subsample information for the last NALU that was output
+ // from AdvanceToNextNALU().
+ std::vector<SubsampleEntry> GetCurrentSubsamples();
+
private:
// Move the stream pointer to the beginning of the next NALU,
// i.e. pointing at the next start code.
@@ -148,6 +499,32 @@ class MEDIA_EXPORT H265Parser {
// - the size in bytes of the start code is returned in |*start_code_size|.
bool LocateNALU(off_t* nalu_size, off_t* start_code_size);
+ // Exp-Golomb code parsing as specified in chapter 9.2 of the spec.
+ // Read one unsigned exp-Golomb code from the stream and return in |*val|.
+ Result ReadUE(int* val);
+
+ // Read one signed exp-Golomb code from the stream and return in |*val|.
+ Result ReadSE(int* val);
+
+ Result ParseProfileTierLevel(bool profile_present,
+ int max_num_sub_layers_minus1,
+ H265ProfileTierLevel* profile_tier_level);
+ Result ParseScalingListData(H265ScalingListData* scaling_list_data);
+ Result ParseStRefPicSet(int st_rps_idx,
+ const H265SPS& sps,
+ H265StRefPicSet* st_ref_pic_set);
+ Result ParseVuiParameters(const H265SPS& sps, H265VUIParameters* vui);
+ Result ParseAndIgnoreHrdParameters(bool common_inf_present_flag,
+ int max_num_sub_layers_minus1);
+ Result ParseAndIgnoreSubLayerHrdParameters(
+ int cpb_cnt,
+ bool sub_pic_hrd_params_present_flag);
+ Result ParseRefPicListsModifications(const H265SliceHeader& shdr,
+ H265RefPicListsModifications* rpl_mod);
+ Result ParsePredWeightTable(const H265SPS& sps,
+ const H265SliceHeader& shdr,
+ H265PredWeightTable* pred_weight_table);
+
// Pointer to the current NALU in the stream.
const uint8_t* stream_;
@@ -156,10 +533,17 @@ class MEDIA_EXPORT H265Parser {
H264BitReader br_;
- // Ranges of encrypted bytes in the buffer passed to
- // SetEncryptedStream().
+ // PPSes and SPSes stored for future reference.
+ base::flat_map<int, std::unique_ptr<H265SPS>> active_sps_;
+ base::flat_map<int, std::unique_ptr<H265PPS>> active_pps_;
+
+ // Ranges of encrypted bytes in the buffer passed to SetEncryptedStream().
Ranges<const uint8_t*> encrypted_ranges_;
+ // This contains the range of the previous NALU found in
+ // AdvanceToNextNalu(). Holds exactly one range.
+ Ranges<const uint8_t*> previous_nalu_range_;
+
DISALLOW_COPY_AND_ASSIGN(H265Parser);
};
diff --git a/chromium/media/video/h265_parser_fuzzertest.cc b/chromium/media/video/h265_parser_fuzzertest.cc
new file mode 100644
index 00000000000..8d0186f6acd
--- /dev/null
+++ b/chromium/media/video/h265_parser_fuzzertest.cc
@@ -0,0 +1,63 @@
+// Copyright 2020 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "base/numerics/safe_conversions.h"
+#include "media/video/h265_parser.h"
+
+// Entry point for LibFuzzer.
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ if (!size)
+ return 0;
+
+ media::H265Parser parser;
+ parser.SetStream(data, base::checked_cast<off_t>(size));
+
+ // Parse until the end of stream/unsupported stream/error in stream is
+ // found.
+ while (true) {
+ media::H265NALU nalu;
+ media::H265Parser::Result res = parser.AdvanceToNextNALU(&nalu);
+ if (res != media::H265Parser::kOk)
+ break;
+
+ media::H265SliceHeader shdr;
+ switch (nalu.nal_unit_type) {
+ case media::H265NALU::SPS_NUT:
+ int sps_id;
+ res = parser.ParseSPS(&sps_id);
+ break;
+ case media::H265NALU::PPS_NUT:
+ int pps_id;
+ res = parser.ParsePPS(nalu, &pps_id);
+ break;
+ case media::H265NALU::TRAIL_N:
+ case media::H265NALU::TRAIL_R:
+ case media::H265NALU::TSA_N:
+ case media::H265NALU::TSA_R:
+ case media::H265NALU::STSA_N:
+ case media::H265NALU::STSA_R:
+ case media::H265NALU::RADL_N:
+ case media::H265NALU::RADL_R:
+ case media::H265NALU::RASL_N:
+ case media::H265NALU::RASL_R:
+ case media::H265NALU::BLA_W_LP:
+ case media::H265NALU::BLA_W_RADL:
+ case media::H265NALU::BLA_N_LP:
+ case media::H265NALU::IDR_W_RADL:
+ case media::H265NALU::IDR_N_LP:
+ case media::H265NALU::CRA_NUT: // fallthrough
+ res = parser.ParseSliceHeader(nalu, &shdr);
+ break;
+ default:
+ // Skip any other NALU.
+ break;
+ }
+ if (res != media::H265Parser::kOk)
+ break;
+ }
+
+ return 0;
+}
diff --git a/chromium/media/video/h265_parser_unittest.cc b/chromium/media/video/h265_parser_unittest.cc
index bcf25a066d9..7acece54eb0 100644
--- a/chromium/media/video/h265_parser_unittest.cc
+++ b/chromium/media/video/h265_parser_unittest.cc
@@ -2,42 +2,388 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "media/video/h265_parser.h"
+#include <memory>
+#include <string>
+
+#include "base/files/file_path.h"
#include "base/files/memory_mapped_file.h"
#include "base/logging.h"
+#include "media/base/subsample_entry.h"
#include "media/base/test_data_util.h"
+#include "media/video/h265_parser.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
-TEST(H265ParserTest, RawHevcStreamFileParsing) {
- base::FilePath file_path = GetTestDataFilePath("bear.hevc");
+namespace {
+struct HevcTestData {
+ std::string file_name;
// Number of NALUs in the test stream to be parsed.
- const int num_nalus = 35;
+ int num_nalus;
+};
- base::MemoryMappedFile stream;
- ASSERT_TRUE(stream.Initialize(file_path))
- << "Couldn't open stream file: " << file_path.MaybeAsASCII();
+} // namespace
- H265Parser parser;
- parser.SetStream(stream.data(), stream.length());
-
- // Parse until the end of stream/unsupported stream/error in stream is found.
- int num_parsed_nalus = 0;
- while (true) {
- H265NALU nalu;
- H265Parser::Result res = parser.AdvanceToNextNALU(&nalu);
- if (res == H265Parser::kEOStream) {
- DVLOG(1) << "Number of successfully parsed NALUs before EOS: "
- << num_parsed_nalus;
- ASSERT_EQ(num_nalus, num_parsed_nalus);
- return;
+class H265ParserTest : public ::testing::Test {
+ protected:
+ void LoadParserFile(std::string file_name) {
+ parser_.Reset();
+ base::FilePath file_path = GetTestDataFilePath(file_name);
+
+ stream_ = std::make_unique<base::MemoryMappedFile>();
+ ASSERT_TRUE(stream_->Initialize(file_path))
+ << "Couldn't open stream file: " << file_path.MaybeAsASCII();
+
+ parser_.SetStream(stream_->data(), stream_->length());
+ }
+
+ bool ParseNalusUntilNut(H265NALU* target_nalu, H265NALU::Type nalu_type) {
+ while (true) {
+ H265Parser::Result res = parser_.AdvanceToNextNALU(target_nalu);
+ if (res == H265Parser::kEOStream) {
+ return false;
+ }
+ EXPECT_EQ(res, H265Parser::kOk);
+ if (target_nalu->nal_unit_type == nalu_type)
+ return true;
+ }
+ }
+
+ H265Parser parser_;
+ std::unique_ptr<base::MemoryMappedFile> stream_;
+};
+
+TEST_F(H265ParserTest, RawHevcStreamFileParsing) {
+ HevcTestData test_data[] = {
+ {"bear.hevc", 35},
+ {"bbb.hevc", 64},
+ };
+
+ for (const auto& data : test_data) {
+ LoadParserFile(data.file_name);
+ // Parse until the end of stream/unsupported stream/error in stream is
+ // found.
+ int num_parsed_nalus = 0;
+ while (true) {
+ H265NALU nalu;
+ H265Parser::Result res = parser_.AdvanceToNextNALU(&nalu);
+ if (res == H265Parser::kEOStream) {
+ DVLOG(1) << "Number of successfully parsed NALUs before EOS: "
+ << num_parsed_nalus;
+ EXPECT_EQ(data.num_nalus, num_parsed_nalus);
+ break;
+ }
+ EXPECT_EQ(res, H265Parser::kOk);
+
+ ++num_parsed_nalus;
+ DVLOG(4) << "Found NALU " << nalu.nal_unit_type;
+
+ H265SliceHeader shdr;
+ switch (nalu.nal_unit_type) {
+ case H265NALU::SPS_NUT:
+ int sps_id;
+ res = parser_.ParseSPS(&sps_id);
+ EXPECT_TRUE(!!parser_.GetSPS(sps_id));
+ break;
+ case H265NALU::PPS_NUT:
+ int pps_id;
+ res = parser_.ParsePPS(nalu, &pps_id);
+ EXPECT_TRUE(!!parser_.GetPPS(pps_id));
+ break;
+ case H265NALU::TRAIL_N:
+ case H265NALU::TRAIL_R:
+ case H265NALU::TSA_N:
+ case H265NALU::TSA_R:
+ case H265NALU::STSA_N:
+ case H265NALU::STSA_R:
+ case H265NALU::RADL_N:
+ case H265NALU::RADL_R:
+ case H265NALU::RASL_N:
+ case H265NALU::RASL_R:
+ case H265NALU::BLA_W_LP:
+ case H265NALU::BLA_W_RADL:
+ case H265NALU::BLA_N_LP:
+ case H265NALU::IDR_W_RADL:
+ case H265NALU::IDR_N_LP:
+ case H265NALU::CRA_NUT: // fallthrough
+ res = parser_.ParseSliceHeader(nalu, &shdr);
+ break;
+ default:
+ break;
+ }
+ EXPECT_EQ(res, H265Parser::kOk);
}
- ASSERT_EQ(res, H265Parser::kOk);
+ }
+}
- ++num_parsed_nalus;
- DVLOG(4) << "Found NALU " << nalu.nal_unit_type;
+TEST_F(H265ParserTest, SpsParsing) {
+ LoadParserFile("bear.hevc");
+ H265NALU target_nalu;
+ EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::SPS_NUT));
+ int sps_id;
+ EXPECT_EQ(H265Parser::kOk, parser_.ParseSPS(&sps_id));
+ const H265SPS* sps = parser_.GetSPS(sps_id);
+ EXPECT_TRUE(!!sps);
+ EXPECT_EQ(sps->sps_max_sub_layers_minus1, 0);
+ EXPECT_EQ(sps->profile_tier_level.general_profile_idc, 1);
+ EXPECT_EQ(sps->profile_tier_level.general_level_idc, 60);
+ EXPECT_EQ(sps->sps_seq_parameter_set_id, 0);
+ EXPECT_EQ(sps->chroma_format_idc, 1);
+ EXPECT_FALSE(sps->separate_colour_plane_flag);
+ EXPECT_EQ(sps->pic_width_in_luma_samples, 320);
+ EXPECT_EQ(sps->pic_height_in_luma_samples, 184);
+ EXPECT_EQ(sps->conf_win_left_offset, 0);
+ EXPECT_EQ(sps->conf_win_right_offset, 0);
+ EXPECT_EQ(sps->conf_win_top_offset, 0);
+ EXPECT_EQ(sps->conf_win_bottom_offset, 2);
+ EXPECT_EQ(sps->bit_depth_luma_minus8, 0);
+ EXPECT_EQ(sps->bit_depth_chroma_minus8, 0);
+ EXPECT_EQ(sps->log2_max_pic_order_cnt_lsb_minus4, 4);
+ EXPECT_EQ(sps->sps_max_dec_pic_buffering_minus1[0], 4);
+ EXPECT_EQ(sps->sps_max_num_reorder_pics[0], 2);
+ EXPECT_EQ(sps->sps_max_latency_increase_plus1[0], 0);
+ for (int i = 1; i < kMaxSubLayers; ++i) {
+ EXPECT_EQ(sps->sps_max_dec_pic_buffering_minus1[i], 0);
+ EXPECT_EQ(sps->sps_max_num_reorder_pics[i], 0);
+ EXPECT_EQ(sps->sps_max_latency_increase_plus1[i], 0);
}
+ EXPECT_EQ(sps->log2_min_luma_coding_block_size_minus3, 0);
+ EXPECT_EQ(sps->log2_diff_max_min_luma_coding_block_size, 3);
+ EXPECT_EQ(sps->log2_min_luma_transform_block_size_minus2, 0);
+ EXPECT_EQ(sps->log2_diff_max_min_luma_transform_block_size, 3);
+ EXPECT_EQ(sps->max_transform_hierarchy_depth_inter, 0);
+ EXPECT_EQ(sps->max_transform_hierarchy_depth_intra, 0);
+ EXPECT_FALSE(sps->scaling_list_enabled_flag);
+ EXPECT_FALSE(sps->sps_scaling_list_data_present_flag);
+ EXPECT_FALSE(sps->amp_enabled_flag);
+ EXPECT_TRUE(sps->sample_adaptive_offset_enabled_flag);
+ EXPECT_FALSE(sps->pcm_enabled_flag);
+ EXPECT_EQ(sps->pcm_sample_bit_depth_luma_minus1, 0);
+ EXPECT_EQ(sps->pcm_sample_bit_depth_chroma_minus1, 0);
+ EXPECT_EQ(sps->log2_min_pcm_luma_coding_block_size_minus3, 0);
+ EXPECT_EQ(sps->log2_diff_max_min_pcm_luma_coding_block_size, 0);
+ EXPECT_FALSE(sps->pcm_loop_filter_disabled_flag);
+ EXPECT_EQ(sps->num_short_term_ref_pic_sets, 0);
+ EXPECT_FALSE(sps->long_term_ref_pics_present_flag);
+ EXPECT_EQ(sps->num_long_term_ref_pics_sps, 0);
+ EXPECT_TRUE(sps->sps_temporal_mvp_enabled_flag);
+ EXPECT_TRUE(sps->strong_intra_smoothing_enabled_flag);
+ EXPECT_EQ(sps->vui_parameters.sar_width, 0);
+ EXPECT_EQ(sps->vui_parameters.sar_height, 0);
+ EXPECT_EQ(sps->vui_parameters.video_full_range_flag, 0);
+ EXPECT_EQ(sps->vui_parameters.colour_description_present_flag, 0);
+ EXPECT_EQ(sps->vui_parameters.colour_primaries, 0);
+ EXPECT_EQ(sps->vui_parameters.transfer_characteristics, 0);
+ EXPECT_EQ(sps->vui_parameters.matrix_coeffs, 0);
+ EXPECT_EQ(sps->vui_parameters.def_disp_win_left_offset, 0);
+ EXPECT_EQ(sps->vui_parameters.def_disp_win_right_offset, 0);
+ EXPECT_EQ(sps->vui_parameters.def_disp_win_top_offset, 0);
+ EXPECT_EQ(sps->vui_parameters.def_disp_win_bottom_offset, 0);
+}
+
+TEST_F(H265ParserTest, PpsParsing) {
+ LoadParserFile("bear.hevc");
+ H265NALU target_nalu;
+ EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::SPS_NUT));
+ int sps_id;
+ // We need to parse the SPS so the PPS can find it.
+ EXPECT_EQ(H265Parser::kOk, parser_.ParseSPS(&sps_id));
+ EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::PPS_NUT));
+ int pps_id;
+ EXPECT_EQ(H265Parser::kOk, parser_.ParsePPS(target_nalu, &pps_id));
+ const H265PPS* pps = parser_.GetPPS(pps_id);
+ EXPECT_TRUE(!!pps);
+ EXPECT_EQ(pps->pps_pic_parameter_set_id, 0);
+ EXPECT_EQ(pps->pps_seq_parameter_set_id, 0);
+ EXPECT_FALSE(pps->dependent_slice_segments_enabled_flag);
+ EXPECT_FALSE(pps->output_flag_present_flag);
+ EXPECT_EQ(pps->num_extra_slice_header_bits, 0);
+ EXPECT_TRUE(pps->sign_data_hiding_enabled_flag);
+ EXPECT_FALSE(pps->cabac_init_present_flag);
+ EXPECT_EQ(pps->num_ref_idx_l0_default_active_minus1, 0);
+ EXPECT_EQ(pps->num_ref_idx_l1_default_active_minus1, 0);
+ EXPECT_EQ(pps->init_qp_minus26, 0);
+ EXPECT_FALSE(pps->constrained_intra_pred_flag);
+ EXPECT_FALSE(pps->transform_skip_enabled_flag);
+ EXPECT_TRUE(pps->cu_qp_delta_enabled_flag);
+ EXPECT_EQ(pps->diff_cu_qp_delta_depth, 0);
+ EXPECT_EQ(pps->pps_cb_qp_offset, 0);
+ EXPECT_EQ(pps->pps_cr_qp_offset, 0);
+ EXPECT_FALSE(pps->pps_slice_chroma_qp_offsets_present_flag);
+ EXPECT_TRUE(pps->weighted_pred_flag);
+ EXPECT_FALSE(pps->weighted_bipred_flag);
+ EXPECT_FALSE(pps->transquant_bypass_enabled_flag);
+ EXPECT_FALSE(pps->tiles_enabled_flag);
+ EXPECT_TRUE(pps->entropy_coding_sync_enabled_flag);
+ EXPECT_TRUE(pps->loop_filter_across_tiles_enabled_flag);
+ EXPECT_FALSE(pps->pps_scaling_list_data_present_flag);
+ EXPECT_FALSE(pps->lists_modification_present_flag);
+ EXPECT_EQ(pps->log2_parallel_merge_level_minus2, 0);
+ EXPECT_FALSE(pps->slice_segment_header_extension_present_flag);
+}
+
+TEST_F(H265ParserTest, SliceHeaderParsing) {
+ LoadParserFile("bear.hevc");
+ H265NALU target_nalu;
+ EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::SPS_NUT));
+ int sps_id;
+ // We need to parse the SPS/PPS so the slice header can find them.
+ EXPECT_EQ(H265Parser::kOk, parser_.ParseSPS(&sps_id));
+ EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::PPS_NUT));
+ int pps_id;
+ EXPECT_EQ(H265Parser::kOk, parser_.ParsePPS(target_nalu, &pps_id));
+
+ // Do an IDR slice header first.
+ EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::IDR_W_RADL));
+ H265SliceHeader shdr;
+ EXPECT_EQ(H265Parser::kOk, parser_.ParseSliceHeader(target_nalu, &shdr));
+ EXPECT_TRUE(shdr.first_slice_segment_in_pic_flag);
+ EXPECT_FALSE(shdr.no_output_of_prior_pics_flag);
+ EXPECT_EQ(shdr.slice_pic_parameter_set_id, 0);
+ EXPECT_FALSE(shdr.dependent_slice_segment_flag);
+ EXPECT_EQ(shdr.slice_type, H265SliceHeader::kSliceTypeI);
+ EXPECT_TRUE(shdr.slice_sao_luma_flag);
+ EXPECT_TRUE(shdr.slice_sao_chroma_flag);
+ EXPECT_EQ(shdr.slice_qp_delta, 8);
+ EXPECT_TRUE(shdr.slice_loop_filter_across_slices_enabled_flag);
+
+ // Then do a non-IDR slice header.
+ EXPECT_TRUE(ParseNalusUntilNut(&target_nalu, H265NALU::TRAIL_R));
+ EXPECT_EQ(H265Parser::kOk, parser_.ParseSliceHeader(target_nalu, &shdr));
+ EXPECT_TRUE(shdr.first_slice_segment_in_pic_flag);
+ EXPECT_EQ(shdr.slice_pic_parameter_set_id, 0);
+ EXPECT_FALSE(shdr.dependent_slice_segment_flag);
+ EXPECT_EQ(shdr.slice_type, H265SliceHeader::kSliceTypeP);
+ EXPECT_EQ(shdr.slice_pic_order_cnt_lsb, 4);
+ EXPECT_FALSE(shdr.short_term_ref_pic_set_sps_flag);
+ EXPECT_EQ(shdr.st_ref_pic_set.num_negative_pics, 1);
+ EXPECT_EQ(shdr.st_ref_pic_set.num_positive_pics, 0);
+ EXPECT_EQ(shdr.st_ref_pic_set.delta_poc_s0[0], -4);
+ EXPECT_EQ(shdr.st_ref_pic_set.used_by_curr_pic_s0[0], 1);
+ EXPECT_TRUE(shdr.slice_temporal_mvp_enabled_flag);
+ EXPECT_TRUE(shdr.slice_sao_luma_flag);
+ EXPECT_TRUE(shdr.slice_sao_chroma_flag);
+ EXPECT_FALSE(shdr.num_ref_idx_active_override_flag);
+ EXPECT_EQ(shdr.pred_weight_table.luma_log2_weight_denom, 0);
+ EXPECT_EQ(shdr.pred_weight_table.delta_chroma_log2_weight_denom, 7);
+ EXPECT_EQ(shdr.pred_weight_table.delta_luma_weight_l0[0], 0);
+ EXPECT_EQ(shdr.pred_weight_table.luma_offset_l0[0], -2);
+ EXPECT_EQ(shdr.pred_weight_table.delta_chroma_weight_l0[0][0], -9);
+ EXPECT_EQ(shdr.pred_weight_table.delta_chroma_weight_l0[0][1], -9);
+ EXPECT_EQ(shdr.pred_weight_table.delta_chroma_offset_l0[0][0], 0);
+ EXPECT_EQ(shdr.pred_weight_table.delta_chroma_offset_l0[0][1], 0);
+ EXPECT_EQ(shdr.five_minus_max_num_merge_cand, 3);
+ EXPECT_EQ(shdr.slice_qp_delta, 8);
+ EXPECT_TRUE(shdr.slice_loop_filter_across_slices_enabled_flag);
+}
+
+// Verify that GetCurrentSubsamples works.
+TEST_F(H265ParserTest, GetCurrentSubsamplesNormal) {
+ constexpr uint8_t kStream[] = {
+ // First NALU.
+ // Clear bytes = 5.
+ 0x00, 0x00, 0x01, // start code.
+ 0x28, 0x00, // Nalu type = 20, IDR slice.
+ // Below is bogus data.
+ // Encrypted bytes = 15.
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03,
+ 0x04, 0x05, 0x06,
+ // Clear bytes = 5.
+ 0x07, 0x00, 0x01, 0x02, 0x03,
+ // Encrypted until next NALU. Encrypted bytes = 20.
+ 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Note that this is still in the encrypted region but looks like a start
+ // code.
+ 0x00, 0x00, 0x01, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Second NALU. Completely clear.
+ // Clear bytes = 11.
+ 0x00, 0x00, 0x01, // start code.
+ 0x42, 0x00, // nalu type = 33, SPS.
+ // Bogus data.
+ 0xff, 0xfe, 0xfd, 0xee, 0x12, 0x33,
+ };
+ std::vector<SubsampleEntry> subsamples;
+ subsamples.emplace_back(5u, 15u);
+ subsamples.emplace_back(5u, 20u);
+ subsamples.emplace_back(11u, 0u);
+ H265Parser parser;
+ parser.SetEncryptedStream(kStream, base::size(kStream), subsamples);
+
+ H265NALU nalu;
+ EXPECT_EQ(H265Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ EXPECT_EQ(H265NALU::IDR_N_LP, nalu.nal_unit_type);
+ auto nalu_subsamples = parser.GetCurrentSubsamples();
+ EXPECT_EQ(2u, nalu_subsamples.size());
+
+ // Note that nalu->data starts from the NALU header, i.e. does not include
+ // the start code.
+ EXPECT_EQ(2u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(15u, nalu_subsamples[0].cypher_bytes);
+ EXPECT_EQ(5u, nalu_subsamples[1].clear_bytes);
+ EXPECT_EQ(20u, nalu_subsamples[1].cypher_bytes);
+
+ // Make sure that it reached the next NALU.
+ EXPECT_EQ(H265Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ EXPECT_EQ(H265NALU::SPS_NUT, nalu.nal_unit_type);
+ nalu_subsamples = parser.GetCurrentSubsamples();
+ EXPECT_EQ(1u, nalu_subsamples.size());
+
+ EXPECT_EQ(8u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(0u, nalu_subsamples[0].cypher_bytes);
+}
+
+// Verify that subsamples starting at non-NALU boundary also works.
+TEST_F(H265ParserTest, GetCurrentSubsamplesSubsampleNotStartingAtNaluBoundary) {
+ constexpr uint8_t kStream[] = {
+ // First NALU.
+ // Clear bytes = 5.
+ 0x00, 0x00, 0x01, // start code.
+ 0x28, 0x00, // Nalu type = 20, IDR slice.
+ // Below is bogus data.
+ // Encrypted bytes = 24.
+ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03,
+ 0x04, 0x05, 0x06, 0x07, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Clear bytes = 19. The rest is in the clear. Note that this is not at
+ // a NALU boundary and a NALU starts below.
+ 0xaa, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ // Second NALU. Completely clear.
+ 0x00, 0x00, 0x01, // start code.
+ 0x42, 0x00, // nalu type = 33, SPS.
+ // Bogus data.
+ 0xff, 0xfe, 0xfd, 0xee, 0x12, 0x33,
+ };
+
+ std::vector<SubsampleEntry> subsamples;
+ subsamples.emplace_back(5u, 24u);
+ subsamples.emplace_back(19u, 0u);
+ H265Parser parser;
+ parser.SetEncryptedStream(kStream, base::size(kStream), subsamples);
+
+ H265NALU nalu;
+ EXPECT_EQ(H265Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ EXPECT_EQ(H265NALU::IDR_N_LP, nalu.nal_unit_type);
+ auto nalu_subsamples = parser.GetCurrentSubsamples();
+ EXPECT_EQ(2u, nalu_subsamples.size());
+
+ // Note that nalu->data starts from the NALU header, i.e. does not include
+ // the start code.
+ EXPECT_EQ(2u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(24u, nalu_subsamples[0].cypher_bytes);
+
+ // The nalu ends with 8 more clear bytes. The last 10 bytes should be
+ // associated with the next nalu.
+ EXPECT_EQ(8u, nalu_subsamples[1].clear_bytes);
+ EXPECT_EQ(0u, nalu_subsamples[1].cypher_bytes);
+
+ EXPECT_EQ(H265Parser::kOk, parser.AdvanceToNextNALU(&nalu));
+ EXPECT_EQ(H265NALU::SPS_NUT, nalu.nal_unit_type);
+ nalu_subsamples = parser.GetCurrentSubsamples();
+ EXPECT_EQ(1u, nalu_subsamples.size());
+
+ // Although the input had 10 more bytes, since nalu->data starts from the nalu
+ // header, there's only 7 more bytes left.
+ EXPECT_EQ(8u, nalu_subsamples[0].clear_bytes);
+ EXPECT_EQ(0u, nalu_subsamples[0].cypher_bytes);
}
} // namespace media
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.cc b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
index f6178a7f48b..6b7c4722bcd 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.cc
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.cc
@@ -28,6 +28,7 @@ class GpuMemoryBufferImpl : public gfx::GpuMemoryBuffer {
DCHECK(gfx::BufferFormat::R_8 == format_ ||
gfx::BufferFormat::RG_88 == format_ ||
gfx::BufferFormat::YUV_420_BIPLANAR == format_ ||
+ gfx::BufferFormat::P010 == format_ ||
gfx::BufferFormat::BGRA_1010102 == format_ ||
gfx::BufferFormat::RGBA_1010102 == format_ ||
gfx::BufferFormat::RGBA_8888 == format_ ||
diff --git a/chromium/media/video/mock_gpu_video_accelerator_factories.h b/chromium/media/video/mock_gpu_video_accelerator_factories.h
index 6d962f4b939..6fe68856aa2 100644
--- a/chromium/media/video/mock_gpu_video_accelerator_factories.h
+++ b/chromium/media/video/mock_gpu_video_accelerator_factories.h
@@ -13,7 +13,7 @@
#include "base/macros.h"
#include "base/memory/ref_counted.h"
-#include "base/single_thread_task_runner.h"
+#include "base/sequenced_task_runner.h"
#include "media/video/gpu_video_accelerator_factories.h"
#include "media/video/video_encode_accelerator.h"
#include "services/viz/public/cpp/gpu/context_provider_command_buffer.h"
@@ -47,7 +47,7 @@ class MockGpuVideoAcceleratorFactories : public GpuVideoAcceleratorFactories {
// framework does not want. Trampoline it.
MOCK_METHOD0(DoCreateVideoEncodeAccelerator, VideoEncodeAccelerator*());
- MOCK_METHOD0(GetTaskRunner, scoped_refptr<base::SingleThreadTaskRunner>());
+ MOCK_METHOD0(GetTaskRunner, scoped_refptr<base::SequencedTaskRunner>());
MOCK_METHOD0(GetMediaContextProvider, viz::RasterContextProvider*());
MOCK_METHOD1(SetRenderingColorSpace, void(const gfx::ColorSpace&));
diff --git a/chromium/media/video/openh264_video_encoder.cc b/chromium/media/video/openh264_video_encoder.cc
index 3a108af13ae..05abf97b4f4 100644
--- a/chromium/media/video/openh264_video_encoder.cc
+++ b/chromium/media/video/openh264_video_encoder.cc
@@ -27,9 +27,10 @@ Status SetUpOpenH264Params(const VideoEncoder::Options& options,
params->bEnableDenoise = false;
// Set to 1 due to https://crbug.com/583348
params->iMultipleThreadIdc = 1;
- params->fMaxFrameRate = options.framerate;
- params->iPicHeight = options.height;
- params->iPicWidth = options.width;
+ if (options.framerate.has_value())
+ params->fMaxFrameRate = options.framerate.value();
+ params->iPicHeight = options.frame_size.height();
+ params->iPicWidth = options.frame_size.width();
if (options.keyframe_interval.has_value())
params->uiIntraPeriod = options.keyframe_interval.value();
@@ -77,7 +78,7 @@ void OpenH264VideoEncoder::Initialize(VideoCodecProfile profile,
const Options& options,
OutputCB output_cb,
StatusCB done_cb) {
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeTwice);
return;
@@ -131,7 +132,7 @@ void OpenH264VideoEncoder::Initialize(VideoCodecProfile profile,
}
options_ = options;
- output_cb_ = media::BindToCurrentLoop(std::move(output_cb));
+ output_cb_ = BindToCurrentLoop(std::move(output_cb));
codec_ = std::move(codec);
std::move(done_cb).Run(Status());
}
@@ -140,7 +141,7 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) {
Status status;
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (!codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
@@ -151,7 +152,7 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
- if (!frame->IsMappable() || frame->format() != media::PIXEL_FORMAT_I420) {
+ if (!frame->IsMappable() || frame->format() != PIXEL_FORMAT_I420) {
status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
.WithData("IsMappable", frame->IsMappable())
@@ -245,20 +246,46 @@ void OpenH264VideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
}
void OpenH264VideoEncoder::ChangeOptions(const Options& options,
+ OutputCB output_cb,
StatusCB done_cb) {
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (!codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
}
- // TODO(eugene): Not implemented yet.
+ Status status;
+ SEncParamExt params = {};
+ if (int err = codec_->GetDefaultParams(&params)) {
+ status = Status(StatusCode::kEncoderInitializationError,
+ "Failed to get default params.")
+ .WithData("error", err);
+ std::move(done_cb).Run(status);
+ return;
+ }
+
+ status = SetUpOpenH264Params(options, &params);
+ if (!status.is_ok()) {
+ std::move(done_cb).Run(status);
+ return;
+ }
+
+ if (int err =
+ codec_->SetOption(ENCODER_OPTION_SVC_ENCODE_PARAM_EXT, &params)) {
+ status = Status(StatusCode::kEncoderInitializationError,
+ "OpenH264 encoder failed to set new SEncParamExt.")
+ .WithData("error", err);
+ std::move(done_cb).Run(status);
+ return;
+ }
+ if (!output_cb.is_null())
+ output_cb_ = BindToCurrentLoop(std::move(output_cb));
std::move(done_cb).Run(Status());
}
void OpenH264VideoEncoder::Flush(StatusCB done_cb) {
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (!codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
diff --git a/chromium/media/video/openh264_video_encoder.h b/chromium/media/video/openh264_video_encoder.h
index 34484a6ed0e..f7bf3b0216f 100644
--- a/chromium/media/video/openh264_video_encoder.h
+++ b/chromium/media/video/openh264_video_encoder.h
@@ -30,7 +30,9 @@ class MEDIA_EXPORT OpenH264VideoEncoder : public VideoEncoder {
void Encode(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) override;
- void ChangeOptions(const Options& options, StatusCB done_cb) override;
+ void ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) override;
void Flush(StatusCB done_cb) override;
private:
diff --git a/chromium/media/video/video_decode_accelerator.h b/chromium/media/video/video_decode_accelerator.h
index 22ba2520123..8f712d8b0a1 100644
--- a/chromium/media/video/video_decode_accelerator.h
+++ b/chromium/media/video/video_decode_accelerator.h
@@ -187,7 +187,7 @@ class MEDIA_EXPORT VideoDecodeAccelerator {
gfx::ColorSpace target_color_space;
// HDR metadata specified by the container.
- base::Optional<gl::HDRMetadata> hdr_metadata;
+ base::Optional<gfx::HDRMetadata> hdr_metadata;
};
// Interface for collaborating with picture interface to provide memory for
diff --git a/chromium/media/video/video_encode_accelerator.h b/chromium/media/video/video_encode_accelerator.h
index 03bfa8a199a..e7331f8846e 100644
--- a/chromium/media/video/video_encode_accelerator.h
+++ b/chromium/media/video/video_encode_accelerator.h
@@ -19,7 +19,7 @@
#include "media/base/bitstream_buffer.h"
#include "media/base/media_export.h"
#include "media/base/video_bitrate_allocation.h"
-#include "media/base/video_decoder_config.h"
+#include "media/base/video_codecs.h"
#include "media/base/video_frame.h"
#include "media/video/h264_parser.h"
#include "media/video/video_encoder_info.h"
diff --git a/chromium/media/video/video_encode_accelerator_adapter.cc b/chromium/media/video/video_encode_accelerator_adapter.cc
index 1a43bcc4303..bdfa80bd994 100644
--- a/chromium/media/video/video_encode_accelerator_adapter.cc
+++ b/chromium/media/video/video_encode_accelerator_adapter.cc
@@ -9,9 +9,11 @@
#include "base/logging.h"
#include "base/memory/ref_counted.h"
+#include "base/sequenced_task_runner.h"
#include "base/strings/stringprintf.h"
#include "base/synchronization/waitable_event.h"
#include "base/time/time.h"
+#include "build/build_config.h"
#include "media/base/bind_to_current_loop.h"
#include "media/base/video_frame.h"
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
@@ -28,18 +30,24 @@ namespace {
// to estimate bits per second for ~30 fps with ~1/16 compression rate.
constexpr int kVEADefaultBitratePerPixel = 2;
-Status SetUpVeaConfig(VideoCodecProfile profile,
- const VideoEncoder::Options& opts,
- VideoEncodeAccelerator::Config* config) {
- if (opts.width <= 0 || opts.height <= 0)
- return Status(StatusCode::kEncoderUnsupportedConfig,
- "Negative width or height values");
-
- *config = VideoEncodeAccelerator::Config(
- PIXEL_FORMAT_I420, gfx::Size(opts.width, opts.height), profile,
- opts.bitrate.value_or(opts.width * opts.height *
+VideoEncodeAccelerator::Config SetUpVeaConfig(
+ VideoCodecProfile profile,
+ const VideoEncoder::Options& opts,
+ VideoPixelFormat format,
+ VideoFrame::StorageType storage_type) {
+ auto config = VideoEncodeAccelerator::Config(
+ format, opts.frame_size, profile,
+ opts.bitrate.value_or(opts.frame_size.width() * opts.frame_size.height() *
kVEADefaultBitratePerPixel));
- return Status();
+
+#if defined(OS_LINUX) || defined(OS_CHROMEOS)
+ if (storage_type == VideoFrame::STORAGE_DMABUFS ||
+ storage_type == VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
+ config.storage_type = VideoEncodeAccelerator::Config::StorageType::kDmabuf;
+ }
+#endif
+
+ return config;
}
} // namespace
@@ -48,7 +56,7 @@ class VideoEncodeAcceleratorAdapter::SharedMemoryPool
: public base::RefCountedThreadSafe<
VideoEncodeAcceleratorAdapter::SharedMemoryPool> {
public:
- SharedMemoryPool(media::GpuVideoAcceleratorFactories* gpu_factories,
+ SharedMemoryPool(GpuVideoAcceleratorFactories* gpu_factories,
size_t region_size) {
DCHECK(gpu_factories);
gpu_factories_ = gpu_factories;
@@ -112,16 +120,20 @@ class VideoEncodeAcceleratorAdapter::SharedMemoryPool
VideoEncodeAcceleratorAdapter::PendingOp::PendingOp() = default;
VideoEncodeAcceleratorAdapter::PendingOp::~PendingOp() = default;
+VideoEncodeAcceleratorAdapter::PendingEncode::PendingEncode() = default;
+VideoEncodeAcceleratorAdapter::PendingEncode::~PendingEncode() = default;
VideoEncodeAcceleratorAdapter::VideoEncodeAcceleratorAdapter(
- media::GpuVideoAcceleratorFactories* gpu_factories,
- scoped_refptr<base::SingleThreadTaskRunner> callback_task_runner)
+ GpuVideoAcceleratorFactories* gpu_factories,
+ scoped_refptr<base::SequencedTaskRunner> callback_task_runner)
: gpu_factories_(gpu_factories),
accelerator_task_runner_(gpu_factories_->GetTaskRunner()),
- callback_task_runner_(std::move(callback_task_runner)) {}
+ callback_task_runner_(std::move(callback_task_runner)) {
+ DETACH_FROM_SEQUENCE(accelerator_sequence_checker_);
+}
VideoEncodeAcceleratorAdapter::~VideoEncodeAcceleratorAdapter() {
- DCHECK(accelerator_task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
}
void VideoEncodeAcceleratorAdapter::DestroyAsync(
@@ -129,7 +141,7 @@ void VideoEncodeAcceleratorAdapter::DestroyAsync(
DCHECK(self);
auto runner = self->accelerator_task_runner_;
DCHECK(runner);
- if (!runner->BelongsToCurrentThread())
+ if (!runner->RunsTasksInCurrentSequence())
runner->DeleteSoon(FROM_HERE, std::move(self));
}
@@ -137,7 +149,7 @@ void VideoEncodeAcceleratorAdapter::Initialize(VideoCodecProfile profile,
const Options& options,
OutputCB output_cb,
StatusCB done_cb) {
- DCHECK(!accelerator_task_runner_->BelongsToCurrentThread());
+ DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
accelerator_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(
@@ -152,7 +164,7 @@ void VideoEncodeAcceleratorAdapter::InitializeOnAcceleratorThread(
const Options& options,
OutputCB output_cb,
StatusCB done_cb) {
- DCHECK(accelerator_task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
if (state_ != State::kNotInitialized) {
auto status = Status(StatusCode::kEncoderInitializeTwice,
"Encoder has already been initialized.");
@@ -168,35 +180,63 @@ void VideoEncodeAcceleratorAdapter::InitializeOnAcceleratorThread(
return;
}
- VideoEncodeAccelerator::Config vea_config;
- auto status = SetUpVeaConfig(profile, options, &vea_config);
- if (!status.is_ok()) {
+ if (options.frame_size.width() <= 0 || options.frame_size.height() <= 0) {
+ auto status = Status(StatusCode::kEncoderUnsupportedConfig,
+ "Negative width or height values.");
std::move(done_cb).Run(status);
return;
}
- if (!accelerator_->Initialize(vea_config, this)) {
- status = Status(StatusCode::kEncoderInitializationError,
- "Failed to initialize video encode accelerator.");
+ if (!options.frame_size.GetCheckedArea().IsValid()) {
+ auto status =
+ Status(StatusCode::kEncoderUnsupportedConfig, "Frame is too large.");
std::move(done_cb).Run(status);
return;
}
+ profile_ = profile;
+ options_ = options;
+ output_cb_ = std::move(output_cb);
+ state_ = State::kWaitingForFirstFrame;
+
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
- if (profile >= H264PROFILE_MIN && profile <= H264PROFILE_MAX)
+ if (profile_ >= H264PROFILE_MIN && profile_ <= H264PROFILE_MAX)
h264_converter_ = std::make_unique<H264AnnexBToAvcBitstreamConverter>();
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
- output_cb_ = std::move(output_cb);
+ std::move(done_cb).Run(Status());
+
+ // The accelerator will be initialized for real once we have the first frame.
+}
+
+void VideoEncodeAcceleratorAdapter::InitializeInternalOnAcceleratorThread() {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
+ DCHECK_EQ(state_, State::kWaitingForFirstFrame);
+ DCHECK(!pending_encodes_.empty());
+
+ // We use the first frame to setup the VEA config so that we can ensure that
+ // zero copy hardware encoding from the camera can be used.
+ const auto& first_frame = pending_encodes_.front()->frame;
+ auto vea_config = SetUpVeaConfig(profile_, options_, first_frame->format(),
+ first_frame->storage_type());
+
+ if (!accelerator_->Initialize(vea_config, this)) {
+ auto status = Status(StatusCode::kEncoderInitializationError,
+ "Failed to initialize video encode accelerator.");
+ InitCompleted(status);
+ return;
+ }
+
state_ = State::kInitializing;
- pending_init_ = std::make_unique<PendingOp>();
- pending_init_->done_callback = std::move(done_cb);
+ format_ = first_frame->format();
+ storage_type_ = first_frame->storage_type();
+ using_native_input_ = first_frame->HasGpuMemoryBuffer();
}
void VideoEncodeAcceleratorAdapter::Encode(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) {
- DCHECK(!accelerator_task_runner_->BelongsToCurrentThread());
+ DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
accelerator_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread,
@@ -208,7 +248,20 @@ void VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread(
scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) {
- DCHECK(accelerator_task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
+
+ if (state_ == State::kWaitingForFirstFrame ||
+ state_ == State::kInitializing) {
+ auto pending_encode = std::make_unique<PendingEncode>();
+ pending_encode->done_callback = std::move(done_cb);
+ pending_encode->frame = std::move(frame);
+ pending_encode->key_frame = key_frame;
+ pending_encodes_.push_back(std::move(pending_encode));
+ if (state_ == State::kWaitingForFirstFrame)
+ InitializeInternalOnAcceleratorThread();
+ return;
+ }
+
if (state_ != State::kReadyToEncode) {
auto status =
Status(StatusCode::kEncoderFailedEncode, "Encoder can't encode now.");
@@ -216,16 +269,37 @@ void VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread(
return;
}
- if (!frame->IsMappable() || frame->format() != media::PIXEL_FORMAT_I420) {
+#if defined(OS_LINUX) || defined(OS_CHROMEOS)
+ // Linux/ChromeOS require a special configuration to use dmabuf storage.
+ const bool is_same_storage_type = storage_type_ == frame->storage_type();
+#else
+ // Other platforms will happily mix GpuMemoryBuffer storage with regular
+ // storage, so we don't care about mismatches on other platforms.
+ const bool is_same_storage_type = true;
+#endif
+
+ if (format_ != frame->format() || !is_same_storage_type) {
+ auto status = Status(StatusCode::kEncoderFailedEncode,
+ "Unexpected frame format change.")
+ .WithData("current_format", format_)
+ .WithData("current_storage_type", storage_type_)
+ .WithData("new_frame", frame->AsHumanReadableString());
+ std::move(done_cb).Run(status);
+ return;
+ }
+
+ if (!frame->HasGpuMemoryBuffer() && !frame->IsMappable() &&
+ frame->format() != PIXEL_FORMAT_I420) {
auto status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
- .WithData("IsMappable", frame->IsMappable())
- .WithData("format", frame->format());
+ .WithData("frame", frame->AsHumanReadableString());
std::move(done_cb).Run(std::move(status));
return;
}
- if (frame->storage_type() != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFER) {
+ if (!frame->HasGpuMemoryBuffer()) {
+ DCHECK_EQ(format_, PIXEL_FORMAT_I420);
+
int32_t buffer_id;
if (!input_pool_->MaybeAllocateBuffer(&buffer_id)) {
auto status = Status(StatusCode::kEncoderFailedEncode,
@@ -239,7 +313,7 @@ void VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread(
input_pool_->GetMapping(buffer_id);
auto shared_frame = VideoFrame::WrapExternalData(
- media::PIXEL_FORMAT_I420, frame->coded_size(), frame->visible_rect(),
+ format_, frame->coded_size(), frame->visible_rect(),
frame->natural_size(), mapping->GetMemoryAsSpan<uint8_t>().data(),
mapping->size(), frame->timestamp());
@@ -251,38 +325,80 @@ void VideoEncodeAcceleratorAdapter::EncodeOnAcceleratorThread(
}
shared_frame->BackWithSharedMemory(region);
- shared_frame->AddDestructionObserver(
- media::BindToCurrentLoop(base::BindOnce(
- &SharedMemoryPool::ReleaseBuffer, input_pool_, buffer_id)));
- libyuv::I420Copy(frame->visible_data(media::VideoFrame::kYPlane),
- frame->stride(media::VideoFrame::kYPlane),
- frame->visible_data(media::VideoFrame::kUPlane),
- frame->stride(media::VideoFrame::kUPlane),
- frame->visible_data(media::VideoFrame::kVPlane),
- frame->stride(media::VideoFrame::kVPlane),
- shared_frame->visible_data(media::VideoFrame::kYPlane),
- shared_frame->stride(media::VideoFrame::kYPlane),
- shared_frame->visible_data(media::VideoFrame::kUPlane),
- shared_frame->stride(media::VideoFrame::kUPlane),
- shared_frame->visible_data(media::VideoFrame::kVPlane),
- shared_frame->stride(media::VideoFrame::kVPlane),
+ shared_frame->AddDestructionObserver(BindToCurrentLoop(base::BindOnce(
+ &SharedMemoryPool::ReleaseBuffer, input_pool_, buffer_id)));
+ libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane),
+ frame->stride(VideoFrame::kYPlane),
+ frame->visible_data(VideoFrame::kUPlane),
+ frame->stride(VideoFrame::kUPlane),
+ frame->visible_data(VideoFrame::kVPlane),
+ frame->stride(VideoFrame::kVPlane),
+ shared_frame->visible_data(VideoFrame::kYPlane),
+ shared_frame->stride(VideoFrame::kYPlane),
+ shared_frame->visible_data(VideoFrame::kUPlane),
+ shared_frame->stride(VideoFrame::kUPlane),
+ shared_frame->visible_data(VideoFrame::kVPlane),
+ shared_frame->stride(VideoFrame::kVPlane),
frame->visible_rect().width(),
frame->visible_rect().height());
frame = std::move(shared_frame);
+ } else {
+ DCHECK_EQ(format_, PIXEL_FORMAT_NV12);
}
- auto pending_encode = std::make_unique<PendingOp>();
- pending_encode->done_callback = std::move(done_cb);
- pending_encode->timestamp = frame->timestamp();
- pending_encodes_.push_back(std::move(pending_encode));
+ auto active_encode = std::make_unique<PendingOp>();
+ active_encode->done_callback = std::move(done_cb);
+ active_encode->timestamp = frame->timestamp();
+ active_encodes_.push_back(std::move(active_encode));
accelerator_->Encode(frame, key_frame);
}
void VideoEncodeAcceleratorAdapter::ChangeOptions(const Options& options,
- StatusCB done_cb) {}
+ OutputCB output_cb,
+ StatusCB done_cb) {
+ DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
+ accelerator_task_runner_->PostTask(
+ FROM_HERE,
+ base::BindOnce(
+ &VideoEncodeAcceleratorAdapter::ChangeOptionsOnAcceleratorThread,
+ base::Unretained(this), options, WrapCallback(std::move(output_cb)),
+ WrapCallback(std::move(done_cb))));
+}
+
+void VideoEncodeAcceleratorAdapter::ChangeOptionsOnAcceleratorThread(
+ const Options options,
+ OutputCB output_cb,
+ StatusCB done_cb) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
+ DCHECK(active_encodes_.empty());
+ DCHECK(pending_encodes_.empty());
+ DCHECK_EQ(state_, State::kReadyToEncode);
+
+ if (options.frame_size != options_.frame_size) {
+ auto status = Status(StatusCode::kEncoderInitializationError,
+ "Resolution change is not supported.");
+ std::move(done_cb).Run(status);
+ return;
+ }
+
+ uint32_t bitrate =
+ std::min(options.bitrate.value_or(options.frame_size.width() *
+ options.frame_size.height() *
+ kVEADefaultBitratePerPixel),
+ uint64_t{std::numeric_limits<uint32_t>::max()});
+
+ uint32_t framerate = uint32_t{std::round(
+ options.framerate.value_or(VideoEncodeAccelerator::kDefaultFramerate))};
+
+ accelerator_->RequestEncodingParametersChange(bitrate, framerate);
+ options_ = options;
+ if (!output_cb.is_null())
+ output_cb_ = BindToCurrentLoop(std::move(output_cb));
+ std::move(done_cb).Run(Status());
+}
void VideoEncodeAcceleratorAdapter::Flush(StatusCB done_cb) {
- DCHECK(!accelerator_task_runner_->BelongsToCurrentThread());
+ DCHECK(!accelerator_task_runner_->RunsTasksInCurrentSequence());
accelerator_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushOnAcceleratorThread,
@@ -290,27 +406,38 @@ void VideoEncodeAcceleratorAdapter::Flush(StatusCB done_cb) {
}
void VideoEncodeAcceleratorAdapter::FlushOnAcceleratorThread(StatusCB done_cb) {
- DCHECK(accelerator_task_runner_->BelongsToCurrentThread());
- if (state_ != State::kReadyToEncode) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
+ if (state_ == State::kWaitingForFirstFrame) {
+ // Nothing to do since we haven't actually initialized yet.
+ std::move(done_cb).Run(Status());
+ return;
+ }
+
+ if (state_ != State::kReadyToEncode && state_ != State::kInitializing) {
auto status =
Status(StatusCode::kEncoderFailedFlush, "Encoder can't flush now");
std::move(done_cb).Run(status);
return;
}
- if (pending_encodes_.empty()) {
- // Not pending encodes, nothing to flush.
+ if (active_encodes_.empty() && pending_encodes_.empty()) {
+ // No active or pending encodes, nothing to flush.
std::move(done_cb).Run(Status());
return;
}
- state_ = State::kFlushing;
+ // When initializing the flush will be handled after pending encodes are sent.
+ if (state_ != State::kInitializing) {
+ DCHECK_EQ(state_, State::kReadyToEncode);
+ state_ = State::kFlushing;
+ }
+
pending_flush_ = std::make_unique<PendingOp>();
pending_flush_->done_callback = std::move(done_cb);
// If flush is not supported FlushCompleted() will be called by
- // BitstreamBufferReady() when |pending_encodes_| is empty.
- if (flush_support_) {
+ // BitstreamBufferReady() when |active_encodes_| is empty.
+ if (flush_support_ && state_ == State::kFlushing) {
accelerator_->Flush(
base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushCompleted,
base::Unretained(this)));
@@ -321,13 +448,15 @@ void VideoEncodeAcceleratorAdapter::RequireBitstreamBuffers(
unsigned int input_count,
const gfx::Size& input_coded_size,
size_t output_buffer_size) {
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
output_pool_ = base::MakeRefCounted<SharedMemoryPool>(gpu_factories_,
output_buffer_size);
-
- size_t input_buffer_size = media::VideoFrame::AllocationSize(
- media::PIXEL_FORMAT_I420, input_coded_size);
- input_pool_ =
- base::MakeRefCounted<SharedMemoryPool>(gpu_factories_, input_buffer_size);
+ if (!using_native_input_) {
+ size_t input_buffer_size =
+ VideoFrame::AllocationSize(PIXEL_FORMAT_I420, input_coded_size);
+ input_pool_ = base::MakeRefCounted<SharedMemoryPool>(gpu_factories_,
+ input_buffer_size);
+ }
int32_t buffer_id;
if (!output_pool_->MaybeAllocateBuffer(&buffer_id)) {
@@ -367,6 +496,19 @@ void VideoEncodeAcceleratorAdapter::BitstreamBufferReady(
h264_converter_->ConvertChunk(base::span<uint8_t>(src, result.size),
base::span<uint8_t>(dst.get(), dst_size),
&config_changed, &actual_output_size);
+ if (status.code() == StatusCode::kH264BufferTooSmall) {
+ // Between AnnexB and AVCC bitstream formats, the start code length and
+ // the nal size length can be different. See H.264 specification at
+ // http://www.itu.int/rec/T-REC-H.264. Retry the conversion if the output
+ // buffer size is too small.
+ dst_size = actual_output_size;
+ dst.reset(new uint8_t[dst_size]);
+ status = h264_converter_->ConvertChunk(
+ base::span<uint8_t>(src, result.size),
+ base::span<uint8_t>(dst.get(), dst_size), &config_changed,
+ &actual_output_size);
+ }
+
if (!status.is_ok()) {
LOG(ERROR) << status.message();
NotifyError(VideoEncodeAccelerator::kPlatformFailureError);
@@ -396,15 +538,15 @@ void VideoEncodeAcceleratorAdapter::BitstreamBufferReady(
accelerator_->UseOutputBitstreamBuffer(
BitstreamBuffer(buffer_id, region->Duplicate(), region->GetSize()));
- for (auto it = pending_encodes_.begin(); it != pending_encodes_.end(); ++it) {
+ for (auto it = active_encodes_.begin(); it != active_encodes_.end(); ++it) {
if ((*it)->timestamp == result.timestamp) {
std::move((*it)->done_callback).Run(Status());
- pending_encodes_.erase(it);
+ active_encodes_.erase(it);
break;
}
}
output_cb_.Run(std::move(result), std::move(desc));
- if (pending_encodes_.empty() && !flush_support_) {
+ if (active_encodes_.empty() && !flush_support_) {
// Manually call FlushCompleted(), since |accelerator_| won't do it for us.
FlushCompleted(true);
}
@@ -424,14 +566,14 @@ void VideoEncodeAcceleratorAdapter::NotifyError(
FlushCompleted(false);
// Report the error to all encoding-done callbacks
- for (auto& encode : pending_encodes_) {
+ for (auto& encode : active_encodes_) {
auto status =
Status(StatusCode::kEncoderFailedEncode,
"VideoEncodeAccelerator encountered an error")
.WithData("VideoEncodeAccelerator::Error", int32_t{error});
std::move(encode->done_callback).Run(Status());
}
- pending_encodes_.clear();
+ active_encodes_.clear();
state_ = State::kNotInitialized;
}
@@ -439,17 +581,48 @@ void VideoEncodeAcceleratorAdapter::NotifyEncoderInfoChange(
const VideoEncoderInfo& info) {}
void VideoEncodeAcceleratorAdapter::InitCompleted(Status status) {
- DCHECK(accelerator_task_runner_->BelongsToCurrentThread());
- if (!pending_init_)
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
+
+ if (!status.is_ok()) {
+ // Report the error to all encoding-done callbacks
+ for (auto& encode : pending_encodes_) {
+ auto status = Status(StatusCode::kEncoderFailedEncode,
+ "VideoEncodeAccelerator encountered an error");
+ std::move(encode->done_callback).Run(Status());
+ }
+
+ if (pending_flush_)
+ FlushCompleted(false);
+
+ DCHECK(active_encodes_.empty());
+ pending_encodes_.clear();
+ state_ = State::kNotInitialized;
return;
+ }
+
+ state_ = State::kReadyToEncode;
- state_ = status.is_ok() ? State::kReadyToEncode : State::kNotInitialized;
- std::move(pending_init_->done_callback).Run(std::move(status));
- pending_init_.reset();
+ // Send off the encodes that came in while we were waiting for initialization.
+ for (auto& encode : pending_encodes_) {
+ EncodeOnAcceleratorThread(std::move(encode->frame), encode->key_frame,
+ std::move(encode->done_callback));
+ }
+ pending_encodes_.clear();
+
+ // If a Flush() came in during initialization, transition to flushing now that
+ // all the pending encodes have been sent.
+ if (pending_flush_) {
+ state_ = State::kFlushing;
+ if (flush_support_) {
+ accelerator_->Flush(
+ base::BindOnce(&VideoEncodeAcceleratorAdapter::FlushCompleted,
+ base::Unretained(this)));
+ }
+ }
}
void VideoEncodeAcceleratorAdapter::FlushCompleted(bool success) {
- DCHECK(accelerator_task_runner_->BelongsToCurrentThread());
+ DCHECK_CALLED_ON_VALID_SEQUENCE(accelerator_sequence_checker_);
if (!pending_flush_)
return;
@@ -462,7 +635,9 @@ void VideoEncodeAcceleratorAdapter::FlushCompleted(bool success) {
template <class T>
T VideoEncodeAcceleratorAdapter::WrapCallback(T cb) {
DCHECK(callback_task_runner_);
- return media::BindToLoop(callback_task_runner_.get(), std::move(cb));
+ if (cb.is_null())
+ return cb;
+ return BindToLoop(callback_task_runner_.get(), std::move(cb));
}
} // namespace media
diff --git a/chromium/media/video/video_encode_accelerator_adapter.h b/chromium/media/video/video_encode_accelerator_adapter.h
index 1c69ebc4a6e..bd43cf5ad38 100644
--- a/chromium/media/video/video_encode_accelerator_adapter.h
+++ b/chromium/media/video/video_encode_accelerator_adapter.h
@@ -17,6 +17,10 @@
#include "media/video/video_encode_accelerator.h"
#include "ui/gfx/geometry/size.h"
+namespace base {
+class SequencedTaskRunner;
+}
+
namespace media {
class GpuVideoAcceleratorFactories;
class H264AnnexBToAvcBitstreamConverter;
@@ -28,11 +32,11 @@ class H264AnnexBToAvcBitstreamConverter;
// - keeping track of the state machine. Forbiding encodes during flush etc.
class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
: public VideoEncoder,
- public media::VideoEncodeAccelerator::Client {
+ public VideoEncodeAccelerator::Client {
public:
VideoEncodeAcceleratorAdapter(
- media::GpuVideoAcceleratorFactories* gpu_factories,
- scoped_refptr<base::SingleThreadTaskRunner> callback_task_runner);
+ GpuVideoAcceleratorFactories* gpu_factories,
+ scoped_refptr<base::SequencedTaskRunner> callback_task_runner);
~VideoEncodeAcceleratorAdapter() override;
// VideoEncoder implementation.
@@ -43,7 +47,9 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
void Encode(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) override;
- void ChangeOptions(const Options& options, StatusCB done_cb) override;
+ void ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) override;
void Flush(StatusCB done_cb) override;
// VideoEncodeAccelerator::Client implementation
@@ -54,7 +60,7 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
void BitstreamBufferReady(int32_t buffer_id,
const BitstreamBufferMetadata& metadata) override;
- void NotifyError(media::VideoEncodeAccelerator::Error error) override;
+ void NotifyError(VideoEncodeAccelerator::Error error) override;
void NotifyEncoderInfoChange(const VideoEncoderInfo& info) override;
@@ -65,6 +71,7 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
class SharedMemoryPool;
enum class State {
kNotInitialized,
+ kWaitingForFirstFrame,
kInitializing,
kReadyToEncode,
kFlushing
@@ -83,10 +90,14 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
const Options& options,
OutputCB output_cb,
StatusCB done_cb);
+ void InitializeInternalOnAcceleratorThread();
void EncodeOnAcceleratorThread(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb);
void FlushOnAcceleratorThread(StatusCB done_cb);
+ void ChangeOptionsOnAcceleratorThread(const Options options,
+ OutputCB output_cb,
+ StatusCB done_cb);
template <class T>
T WrapCallback(T cb);
@@ -94,25 +105,44 @@ class MEDIA_EXPORT VideoEncodeAcceleratorAdapter
scoped_refptr<SharedMemoryPool> output_pool_;
scoped_refptr<SharedMemoryPool> input_pool_;
std::unique_ptr<VideoEncodeAccelerator> accelerator_;
- media::GpuVideoAcceleratorFactories* gpu_factories_;
+ GpuVideoAcceleratorFactories* gpu_factories_;
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
std::unique_ptr<H264AnnexBToAvcBitstreamConverter> h264_converter_;
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
- base::circular_deque<std::unique_ptr<PendingOp>> pending_encodes_;
+ // These are encodes that have been sent to the accelerator but have not yet
+ // had their encoded data returned via BitstreamBufferReady().
+ base::circular_deque<std::unique_ptr<PendingOp>> active_encodes_;
+
std::unique_ptr<PendingOp> pending_flush_;
- std::unique_ptr<PendingOp> pending_init_;
// For calling accelerator_ methods
- scoped_refptr<base::SingleThreadTaskRunner> accelerator_task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> accelerator_task_runner_;
+ SEQUENCE_CHECKER(accelerator_sequence_checker_);
// For calling user provided callbacks
- scoped_refptr<base::SingleThreadTaskRunner> callback_task_runner_;
+ scoped_refptr<base::SequencedTaskRunner> callback_task_runner_;
State state_ = State::kNotInitialized;
bool flush_support_ = false;
+ struct PendingEncode {
+ PendingEncode();
+ ~PendingEncode();
+ StatusCB done_callback;
+ scoped_refptr<VideoFrame> frame;
+ bool key_frame;
+ };
+
+ // These are encodes that have not been sent to the accelerator.
+ std::vector<std::unique_ptr<PendingEncode>> pending_encodes_;
+
+ bool using_native_input_;
+ VideoPixelFormat format_;
+ VideoFrame::StorageType storage_type_;
+
+ VideoCodecProfile profile_;
Options options_;
OutputCB output_cb_;
};
diff --git a/chromium/media/video/vpx_video_encoder.cc b/chromium/media/video/vpx_video_encoder.cc
index 31f5988a296..41a37eba393 100644
--- a/chromium/media/video/vpx_video_encoder.cc
+++ b/chromium/media/video/vpx_video_encoder.cc
@@ -4,6 +4,8 @@
#include "media/video/vpx_video_encoder.h"
+#include "base/numerics/checked_math.h"
+#include "base/numerics/ranges.h"
#include "base/strings/stringprintf.h"
#include "base/system/sys_info.h"
#include "base/time/time.h"
@@ -39,9 +41,12 @@ int GetNumberOfThreads(int width) {
Status SetUpVpxConfig(const VideoEncoder::Options& opts,
vpx_codec_enc_cfg_t* config) {
- if (opts.width <= 0 || opts.height <= 0)
+ if (opts.frame_size.width() <= 0 || opts.frame_size.height() <= 0)
return Status(StatusCode::kEncoderUnsupportedConfig,
- "Negative width or height values");
+ "Negative width or height values.");
+
+ if (!opts.frame_size.GetCheckedArea().IsValid())
+ return Status(StatusCode::kEncoderUnsupportedConfig, "Frame is too large.");
config->g_pass = VPX_RC_ONE_PASS;
config->g_lag_in_frames = 0;
@@ -51,7 +56,7 @@ Status SetUpVpxConfig(const VideoEncoder::Options& opts,
config->g_timebase.den = base::Time::kMicrosecondsPerSecond;
// Set the number of threads based on the image width and num of cores.
- config->g_threads = GetNumberOfThreads(opts.width);
+ config->g_threads = GetNumberOfThreads(opts.frame_size.width());
// Insert keyframes at will with a given max interval
if (opts.keyframe_interval.has_value()) {
@@ -65,13 +70,13 @@ Status SetUpVpxConfig(const VideoEncoder::Options& opts,
config->rc_target_bitrate = opts.bitrate.value() / 1000;
} else {
config->rc_end_usage = VPX_VBR;
- config->rc_target_bitrate = double{opts.width} * double{opts.height} /
- config->g_w / config->g_h *
- config->rc_target_bitrate;
+ config->rc_target_bitrate =
+ double{opts.frame_size.GetCheckedArea().ValueOrDie()} / config->g_w /
+ config->g_h * config->rc_target_bitrate;
}
- config->g_w = opts.width;
- config->g_h = opts.height;
+ config->g_w = opts.frame_size.width();
+ config->g_h = opts.frame_size.height();
return Status();
}
@@ -94,7 +99,7 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
const Options& options,
OutputCB output_cb,
StatusCB done_cb) {
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeTwice);
return;
@@ -103,10 +108,9 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
profile_ = profile;
vpx_codec_iface_t* iface = nullptr;
- if (profile == media::VP8PROFILE_ANY) {
+ if (profile == VP8PROFILE_ANY) {
iface = vpx_codec_vp8_cx();
- } else if (profile == media::VP9PROFILE_PROFILE0 ||
- profile == media::VP9PROFILE_PROFILE2) {
+ } else if (profile == VP9PROFILE_PROFILE0 || profile == VP9PROFILE_PROFILE2) {
// TODO(https://crbug.com/1116617): Consider support for profiles 1 and 3.
is_vp9_ = true;
iface = vpx_codec_vp9_cx();
@@ -129,17 +133,17 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
vpx_img_fmt img_fmt = VPX_IMG_FMT_NONE;
unsigned int bits_for_storage = 8;
switch (profile) {
- case media::VP9PROFILE_PROFILE1:
+ case VP9PROFILE_PROFILE1:
codec_config_.g_profile = 1;
break;
- case media::VP9PROFILE_PROFILE2:
+ case VP9PROFILE_PROFILE2:
codec_config_.g_profile = 2;
img_fmt = VPX_IMG_FMT_I42016;
bits_for_storage = 16;
codec_config_.g_bit_depth = VPX_BITS_10;
codec_config_.g_input_bit_depth = 10;
break;
- case media::VP9PROFILE_PROFILE3:
+ case VP9PROFILE_PROFILE3:
codec_config_.g_profile = 3;
break;
default:
@@ -185,8 +189,9 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
return;
}
- if (&vpx_image_ != vpx_img_wrap(&vpx_image_, img_fmt, options.width,
- options.height, 1, nullptr)) {
+ if (&vpx_image_ != vpx_img_wrap(&vpx_image_, img_fmt,
+ options.frame_size.width(),
+ options.frame_size.height(), 1, nullptr)) {
status = Status(StatusCode::kEncoderInitializationError,
"Invalid format or frame size.");
std::move(done_cb).Run(status);
@@ -207,7 +212,8 @@ void VpxVideoEncoder::Initialize(VideoCodecProfile profile,
}
options_ = options;
- output_cb_ = media::BindToCurrentLoop(std::move(output_cb));
+ originally_configured_size_ = options.frame_size;
+ output_cb_ = BindToCurrentLoop(std::move(output_cb));
codec_ = std::move(codec);
std::move(done_cb).Run(Status());
}
@@ -216,7 +222,7 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) {
Status status;
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (!codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
@@ -227,7 +233,7 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
"No frame provided for encoding."));
return;
}
- if (!frame->IsMappable() || frame->format() != media::PIXEL_FORMAT_I420) {
+ if (!frame->IsMappable() || frame->format() != PIXEL_FORMAT_I420) {
status =
Status(StatusCode::kEncoderFailedEncode, "Unexpected frame format.")
.WithData("IsMappable", frame->IsMappable())
@@ -237,17 +243,17 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
}
switch (profile_) {
- case media::VP9PROFILE_PROFILE1:
+ case VP9PROFILE_PROFILE1:
NOTREACHED();
break;
- case media::VP9PROFILE_PROFILE2:
+ case VP9PROFILE_PROFILE2:
libyuv::I420ToI010(
- frame->visible_data(media::VideoFrame::kYPlane),
- frame->stride(media::VideoFrame::kYPlane),
- frame->visible_data(media::VideoFrame::kUPlane),
- frame->stride(media::VideoFrame::kUPlane),
- frame->visible_data(media::VideoFrame::kVPlane),
- frame->stride(media::VideoFrame::kVPlane),
+ frame->visible_data(VideoFrame::kYPlane),
+ frame->stride(VideoFrame::kYPlane),
+ frame->visible_data(VideoFrame::kUPlane),
+ frame->stride(VideoFrame::kUPlane),
+ frame->visible_data(VideoFrame::kVPlane),
+ frame->stride(VideoFrame::kVPlane),
reinterpret_cast<uint16_t*>(vpx_image_.planes[VPX_PLANE_Y]),
vpx_image_.stride[VPX_PLANE_Y] / 2,
reinterpret_cast<uint16_t*>(vpx_image_.planes[VPX_PLANE_U]),
@@ -256,31 +262,29 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
vpx_image_.stride[VPX_PLANE_V] / 2, frame->coded_size().width(),
frame->coded_size().height());
break;
- case media::VP9PROFILE_PROFILE3:
+ case VP9PROFILE_PROFILE3:
NOTREACHED();
break;
default:
vpx_image_.planes[VPX_PLANE_Y] =
- const_cast<uint8_t*>(frame->visible_data(media::VideoFrame::kYPlane));
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kYPlane));
vpx_image_.planes[VPX_PLANE_U] =
- const_cast<uint8_t*>(frame->visible_data(media::VideoFrame::kUPlane));
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kUPlane));
vpx_image_.planes[VPX_PLANE_V] =
- const_cast<uint8_t*>(frame->visible_data(media::VideoFrame::kVPlane));
- vpx_image_.stride[VPX_PLANE_Y] =
- frame->stride(media::VideoFrame::kYPlane);
- vpx_image_.stride[VPX_PLANE_U] =
- frame->stride(media::VideoFrame::kUPlane);
- vpx_image_.stride[VPX_PLANE_V] =
- frame->stride(media::VideoFrame::kVPlane);
+ const_cast<uint8_t*>(frame->visible_data(VideoFrame::kVPlane));
+ vpx_image_.stride[VPX_PLANE_Y] = frame->stride(VideoFrame::kYPlane);
+ vpx_image_.stride[VPX_PLANE_U] = frame->stride(VideoFrame::kUPlane);
+ vpx_image_.stride[VPX_PLANE_V] = frame->stride(VideoFrame::kVPlane);
break;
}
- auto timestamp = frame->timestamp().InMicroseconds();
- auto duration = GetFrameDuration(*frame);
+ auto duration_us = GetFrameDuration(*frame).InMicroseconds();
+ auto timestamp_us = frame->timestamp().InMicroseconds();
+ last_frame_timestamp_ = frame->timestamp();
auto deadline = VPX_DL_REALTIME;
vpx_codec_flags_t flags = key_frame ? VPX_EFLAG_FORCE_KF : 0;
- auto vpx_error = vpx_codec_encode(codec_.get(), &vpx_image_, timestamp,
- duration, flags, deadline);
+ auto vpx_error = vpx_codec_encode(codec_.get(), &vpx_image_, timestamp_us,
+ duration_us, flags, deadline);
if (vpx_error != VPX_CODEC_OK) {
std::string msg = base::StringPrintf("VPX encoding error: %s (%s)",
@@ -296,37 +300,106 @@ void VpxVideoEncoder::Encode(scoped_refptr<VideoFrame> frame,
std::move(done_cb).Run(Status());
}
-void VpxVideoEncoder::ChangeOptions(const Options& options, StatusCB done_cb) {
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+void VpxVideoEncoder::ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) {
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (!codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
}
+ // Libvpx is very peculiar about encoded frame size changes,
+ // - VP8: As long as the frame area doesn't increase, internal codec
+ // structures don't need to be reallocated and codec can be simply
+ // reconfigured.
+ // - VP9: The codec cannot increase encoded width or height larger than their
+ // initial values.
+ //
+ // Mind the difference between old frame sizes:
+ // - |originally_configured_size_| is set only once when the vpx_codec_ctx_t
+ // is created.
+ // - |options_.frame_size| changes every time ChangeOptions() is called.
+ // More info can be found here:
+ // https://bugs.chromium.org/p/webm/issues/detail?id=1642
+ // https://bugs.chromium.org/p/webm/issues/detail?id=912
+ if (profile_ == VP8PROFILE_ANY) {
+ // VP8 resize restrictions
+ auto old_area = originally_configured_size_.GetCheckedArea();
+ auto new_area = options.frame_size.GetCheckedArea();
+ DCHECK(old_area.IsValid());
+ if (!new_area.IsValid() || new_area.ValueOrDie() > old_area.ValueOrDie()) {
+ auto status = Status(
+ StatusCode::kEncoderUnsupportedConfig,
+ "libvpx/VP8 doesn't support dynamically increasing frame area");
+ std::move(done_cb).Run(std::move(status));
+ return;
+ }
+ } else {
+ // VP9 resize restrictions
+ if (options.frame_size.width() > originally_configured_size_.width() ||
+ options.frame_size.height() > originally_configured_size_.height()) {
+ auto status = Status(
+ StatusCode::kEncoderUnsupportedConfig,
+ "libvpx/VP9 doesn't support dynamically increasing frame dimentions");
+ std::move(done_cb).Run(std::move(status));
+ return;
+ }
+ }
+
vpx_codec_enc_cfg_t new_config = codec_config_;
auto status = SetUpVpxConfig(options, &new_config);
- if (status.is_ok()) {
- auto vpx_error = vpx_codec_enc_config_set(codec_.get(), &new_config);
- if (vpx_error == VPX_CODEC_OK) {
- codec_config_ = new_config;
- options_ = options;
- } else {
- status = Status(StatusCode::kEncoderUnsupportedConfig,
- "Failed to set new VPX config")
- .WithData("vpx_error", vpx_error);
+ if (!status.is_ok()) {
+ std::move(done_cb).Run(status);
+ return;
+ }
+
+ if (options_.frame_size != options.frame_size) {
+ // Need to re-allocate |vpx_image_| because the size has changed.
+ auto img_fmt = vpx_image_.fmt;
+ auto bit_depth = vpx_image_.bit_depth;
+ vpx_img_free(&vpx_image_);
+ if (&vpx_image_ != vpx_img_wrap(&vpx_image_, img_fmt,
+ options.frame_size.width(),
+ options.frame_size.height(), 1, nullptr)) {
+ status = Status(StatusCode::kEncoderInitializationError,
+ "Invalid format or frame size.");
+ std::move(done_cb).Run(status);
+ return;
}
+ vpx_image_.bit_depth = bit_depth;
+ }
+
+ auto vpx_error = vpx_codec_enc_config_set(codec_.get(), &new_config);
+ if (vpx_error == VPX_CODEC_OK) {
+ codec_config_ = new_config;
+ options_ = options;
+ if (!output_cb.is_null())
+ output_cb_ = BindToCurrentLoop(std::move(output_cb));
+ } else {
+ status = Status(StatusCode::kEncoderUnsupportedConfig,
+ "Failed to set new VPX config")
+ .WithData("vpx_error", vpx_error);
}
std::move(done_cb).Run(std::move(status));
- return;
}
-uint64_t VpxVideoEncoder::GetFrameDuration(const VideoFrame& frame) {
- base::TimeDelta default_duration =
- base::TimeDelta::FromSecondsD(1.0 / options_.framerate);
- return frame.metadata()
- ->frame_duration.value_or(default_duration)
- .InMicroseconds();
+base::TimeDelta VpxVideoEncoder::GetFrameDuration(const VideoFrame& frame) {
+ // Frame has duration in metadata, use it.
+ if (frame.metadata()->frame_duration.has_value())
+ return frame.metadata()->frame_duration.value();
+
+ // Options have framerate specified, use it.
+ if (options_.framerate.has_value())
+ return base::TimeDelta::FromSecondsD(1.0 / options_.framerate.value());
+
+ // No real way to figure out duration, use time passed since the last frame
+ // as an educated guess, but clamp it within a reasonable limits.
+ constexpr auto min_duration = base::TimeDelta::FromSecondsD(1.0 / 60.0);
+ constexpr auto max_duration = base::TimeDelta::FromSecondsD(1.0 / 24.0);
+ auto duration = frame.timestamp() - last_frame_timestamp_;
+ return base::ClampToRange(duration, min_duration, max_duration);
}
VpxVideoEncoder::~VpxVideoEncoder() {
@@ -340,7 +413,7 @@ VpxVideoEncoder::~VpxVideoEncoder() {
}
void VpxVideoEncoder::Flush(StatusCB done_cb) {
- done_cb = media::BindToCurrentLoop(std::move(done_cb));
+ done_cb = BindToCurrentLoop(std::move(done_cb));
if (!codec_) {
std::move(done_cb).Run(StatusCode::kEncoderInitializeNeverCompleted);
return;
diff --git a/chromium/media/video/vpx_video_encoder.h b/chromium/media/video/vpx_video_encoder.h
index 57f20af1893..cd5a17bbcfb 100644
--- a/chromium/media/video/vpx_video_encoder.h
+++ b/chromium/media/video/vpx_video_encoder.h
@@ -28,11 +28,13 @@ class MEDIA_EXPORT VpxVideoEncoder : public VideoEncoder {
void Encode(scoped_refptr<VideoFrame> frame,
bool key_frame,
StatusCB done_cb) override;
- void ChangeOptions(const Options& options, StatusCB done_cb) override;
+ void ChangeOptions(const Options& options,
+ OutputCB output_cb,
+ StatusCB done_cb) override;
void Flush(StatusCB done_cb) override;
private:
- uint64_t GetFrameDuration(const VideoFrame& frame);
+ base::TimeDelta GetFrameDuration(const VideoFrame& frame);
void DrainOutputs();
using vpx_codec_unique_ptr =
@@ -42,6 +44,8 @@ class MEDIA_EXPORT VpxVideoEncoder : public VideoEncoder {
bool is_vp9_ = false;
vpx_codec_enc_cfg_t codec_config_ = {};
vpx_image_t vpx_image_ = {};
+ gfx::Size originally_configured_size_;
+ base::TimeDelta last_frame_timestamp_;
VideoCodecProfile profile_ = VIDEO_CODEC_PROFILE_UNKNOWN;
Options options_;
OutputCB output_cb_;
diff --git a/chromium/media/webrtc/DIR_METADATA b/chromium/media/webrtc/DIR_METADATA
new file mode 100644
index 00000000000..7ae006c85b7
--- /dev/null
+++ b/chromium/media/webrtc/DIR_METADATA
@@ -0,0 +1,11 @@
+# Metadata information for this directory.
+#
+# For more information on DIR_METADATA files, see:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/README.md
+#
+# For the schema of this file, see Metadata message:
+# https://source.chromium.org/chromium/infra/infra/+/master:go/src/infra/tools/dirmd/proto/dir_metadata.proto
+
+monorail {
+ component: "Blink>WebRTC>Audio"
+} \ No newline at end of file
diff --git a/chromium/media/webrtc/OWNERS b/chromium/media/webrtc/OWNERS
index 3e75ff446b5..094bddf3394 100644
--- a/chromium/media/webrtc/OWNERS
+++ b/chromium/media/webrtc/OWNERS
@@ -2,5 +2,3 @@ olka@chromium.org
dalecurtis@chromium.org
miu@chromium.org
ossu@chromium.org
-
-# COMPONENT: Blink>WebRTC>Audio